file_name
stringlengths 3
137
| prefix
stringlengths 0
918k
| suffix
stringlengths 0
962k
| middle
stringlengths 0
812k
|
---|---|---|---|
gintmsk.rs | #[doc = "Reader of register GINTMSK"]
pub type R = crate::R<u32, super::GINTMSK>;
#[doc = "Writer for register GINTMSK"]
pub type W = crate::W<u32, super::GINTMSK>;
#[doc = "Register GINTMSK `reset()`'s with value 0"]
impl crate::ResetValue for super::GINTMSK {
type Type = u32;
#[inline(always)]
fn reset_value() -> Self::Type {
0
}
}
#[doc = "Reader of field `MODEMISMSK`"]
pub type MODEMISMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `MODEMISMSK`"]
pub struct MODEMISMSK_W<'a> {
w: &'a mut W,
}
impl<'a> MODEMISMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 1)) | (((value as u32) & 0x01) << 1);
self.w
}
}
#[doc = "Reader of field `OTGINTMSK`"]
pub type OTGINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OTGINTMSK`"]
pub struct OTGINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> OTGINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 2)) | (((value as u32) & 0x01) << 2);
self.w
}
}
#[doc = "Reader of field `SOFMSK`"]
pub type SOFMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SOFMSK`"]
pub struct SOFMSK_W<'a> {
w: &'a mut W,
}
impl<'a> SOFMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 3)) | (((value as u32) & 0x01) << 3);
self.w
}
}
#[doc = "Reader of field `RXFLVLMSK`"]
pub type RXFLVLMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RXFLVLMSK`"]
pub struct RXFLVLMSK_W<'a> {
w: &'a mut W,
}
impl<'a> RXFLVLMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 4)) | (((value as u32) & 0x01) << 4);
self.w
}
}
#[doc = "Reader of field `NPTXFEMPMSK`"]
pub type NPTXFEMPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `NPTXFEMPMSK`"]
pub struct NPTXFEMPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> NPTXFEMPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 5)) | (((value as u32) & 0x01) << 5);
self.w
}
}
#[doc = "Reader of field `GINNAKEFFMSK`"]
pub type GINNAKEFFMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `GINNAKEFFMSK`"]
pub struct GINNAKEFFMSK_W<'a> {
w: &'a mut W,
}
impl<'a> GINNAKEFFMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 6)) | (((value as u32) & 0x01) << 6);
self.w
}
}
#[doc = "Reader of field `GOUTNAKEFFMSK`"]
pub type GOUTNAKEFFMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `GOUTNAKEFFMSK`"]
pub struct GOUTNAKEFFMSK_W<'a> {
w: &'a mut W,
}
impl<'a> GOUTNAKEFFMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 7)) | (((value as u32) & 0x01) << 7);
self.w
}
}
#[doc = "Reader of field `ERLYSUSPMSK`"]
pub type ERLYSUSPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ERLYSUSPMSK`"]
pub struct ERLYSUSPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> ERLYSUSPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 10)) | (((value as u32) & 0x01) << 10);
self.w
}
}
#[doc = "Reader of field `USBSUSPMSK`"]
pub type USBSUSPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `USBSUSPMSK`"]
pub struct USBSUSPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> USBSUSPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 11)) | (((value as u32) & 0x01) << 11);
self.w
}
}
#[doc = "Reader of field `USBRSTMSK`"]
pub type USBRSTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `USBRSTMSK`"]
pub struct USBRSTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> USBRSTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 12)) | (((value as u32) & 0x01) << 12);
self.w
}
}
#[doc = "Reader of field `ENUMDONEMSK`"] | #[doc = "Write proxy for field `ENUMDONEMSK`"]
pub struct ENUMDONEMSK_W<'a> {
w: &'a mut W,
}
impl<'a> ENUMDONEMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 13)) | (((value as u32) & 0x01) << 13);
self.w
}
}
#[doc = "Reader of field `ISOOUTDROPMSK`"]
pub type ISOOUTDROPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `ISOOUTDROPMSK`"]
pub struct ISOOUTDROPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> ISOOUTDROPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 14)) | (((value as u32) & 0x01) << 14);
self.w
}
}
#[doc = "Reader of field `EOPFMSK`"]
pub type EOPFMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EOPFMSK`"]
pub struct EOPFMSK_W<'a> {
w: &'a mut W,
}
impl<'a> EOPFMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 15)) | (((value as u32) & 0x01) << 15);
self.w
}
}
#[doc = "Reader of field `EPMISMSK`"]
pub type EPMISMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `EPMISMSK`"]
pub struct EPMISMSK_W<'a> {
w: &'a mut W,
}
impl<'a> EPMISMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 17)) | (((value as u32) & 0x01) << 17);
self.w
}
}
#[doc = "Reader of field `IEPINTMSK`"]
pub type IEPINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `IEPINTMSK`"]
pub struct IEPINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> IEPINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 18)) | (((value as u32) & 0x01) << 18);
self.w
}
}
#[doc = "Reader of field `OEPINTMSK`"]
pub type OEPINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `OEPINTMSK`"]
pub struct OEPINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> OEPINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 19)) | (((value as u32) & 0x01) << 19);
self.w
}
}
#[doc = "Reader of field `INCOMPISOINMSK`"]
pub type INCOMPISOINMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INCOMPISOINMSK`"]
pub struct INCOMPISOINMSK_W<'a> {
w: &'a mut W,
}
impl<'a> INCOMPISOINMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 20)) | (((value as u32) & 0x01) << 20);
self.w
}
}
#[doc = "Reader of field `INCOMPLPMSK`"]
pub type INCOMPLPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `INCOMPLPMSK`"]
pub struct INCOMPLPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> INCOMPLPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 21)) | (((value as u32) & 0x01) << 21);
self.w
}
}
#[doc = "Reader of field `FETSUSPMSK`"]
pub type FETSUSPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `FETSUSPMSK`"]
pub struct FETSUSPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> FETSUSPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 22)) | (((value as u32) & 0x01) << 22);
self.w
}
}
#[doc = "Reader of field `RESETDETMSK`"]
pub type RESETDETMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `RESETDETMSK`"]
pub struct RESETDETMSK_W<'a> {
w: &'a mut W,
}
impl<'a> RESETDETMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 23)) | (((value as u32) & 0x01) << 23);
self.w
}
}
#[doc = "Reader of field `PRTINTMSK`"]
pub type PRTINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PRTINTMSK`"]
pub struct PRTINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> PRTINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 24)) | (((value as u32) & 0x01) << 24);
self.w
}
}
#[doc = "Reader of field `HCHINTMSK`"]
pub type HCHINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `HCHINTMSK`"]
pub struct HCHINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> HCHINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 25)) | (((value as u32) & 0x01) << 25);
self.w
}
}
#[doc = "Reader of field `PTXFEMPMSK`"]
pub type PTXFEMPMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `PTXFEMPMSK`"]
pub struct PTXFEMPMSK_W<'a> {
w: &'a mut W,
}
impl<'a> PTXFEMPMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 26)) | (((value as u32) & 0x01) << 26);
self.w
}
}
#[doc = "Reader of field `CONIDSTSCHNGMSK`"]
pub type CONIDSTSCHNGMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `CONIDSTSCHNGMSK`"]
pub struct CONIDSTSCHNGMSK_W<'a> {
w: &'a mut W,
}
impl<'a> CONIDSTSCHNGMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 28)) | (((value as u32) & 0x01) << 28);
self.w
}
}
#[doc = "Reader of field `DISCONNINTMSK`"]
pub type DISCONNINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `DISCONNINTMSK`"]
pub struct DISCONNINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> DISCONNINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 29)) | (((value as u32) & 0x01) << 29);
self.w
}
}
#[doc = "Reader of field `SESSREQINTMSK`"]
pub type SESSREQINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `SESSREQINTMSK`"]
pub struct SESSREQINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> SESSREQINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 30)) | (((value as u32) & 0x01) << 30);
self.w
}
}
#[doc = "Reader of field `WKUPINTMSK`"]
pub type WKUPINTMSK_R = crate::R<bool, bool>;
#[doc = "Write proxy for field `WKUPINTMSK`"]
pub struct WKUPINTMSK_W<'a> {
w: &'a mut W,
}
impl<'a> WKUPINTMSK_W<'a> {
#[doc = r"Sets the field bit"]
#[inline(always)]
pub fn set_bit(self) -> &'a mut W {
self.bit(true)
}
#[doc = r"Clears the field bit"]
#[inline(always)]
pub fn clear_bit(self) -> &'a mut W {
self.bit(false)
}
#[doc = r"Writes raw bits to the field"]
#[inline(always)]
pub fn bit(self, value: bool) -> &'a mut W {
self.w.bits = (self.w.bits & !(0x01 << 31)) | (((value as u32) & 0x01) << 31);
self.w
}
}
impl R {
#[doc = "Bit 1 - Mode Mismatch Interrupt Mask (host and device)"]
#[inline(always)]
pub fn modemismsk(&self) -> MODEMISMSK_R {
MODEMISMSK_R::new(((self.bits >> 1) & 0x01) != 0)
}
#[doc = "Bit 2 - OTG Interrupt Mask (host and device)"]
#[inline(always)]
pub fn otgintmsk(&self) -> OTGINTMSK_R {
OTGINTMSK_R::new(((self.bits >> 2) & 0x01) != 0)
}
#[doc = "Bit 3 - Start of Frame Mask (host and device)"]
#[inline(always)]
pub fn sofmsk(&self) -> SOFMSK_R {
SOFMSK_R::new(((self.bits >> 3) & 0x01) != 0)
}
#[doc = "Bit 4 - Receive FIFO Non-Empty Mask (host and device)"]
#[inline(always)]
pub fn rxflvlmsk(&self) -> RXFLVLMSK_R {
RXFLVLMSK_R::new(((self.bits >> 4) & 0x01) != 0)
}
#[doc = "Bit 5 - Non-Periodic TxFIFO Empty Mask (host only)"]
#[inline(always)]
pub fn nptxfempmsk(&self) -> NPTXFEMPMSK_R {
NPTXFEMPMSK_R::new(((self.bits >> 5) & 0x01) != 0)
}
#[doc = "Bit 6 - Global Non-periodic IN NAK Effective Mask (device only)"]
#[inline(always)]
pub fn ginnakeffmsk(&self) -> GINNAKEFFMSK_R {
GINNAKEFFMSK_R::new(((self.bits >> 6) & 0x01) != 0)
}
#[doc = "Bit 7 - Global OUT NAK Effective Mask (device only)"]
#[inline(always)]
pub fn goutnakeffmsk(&self) -> GOUTNAKEFFMSK_R {
GOUTNAKEFFMSK_R::new(((self.bits >> 7) & 0x01) != 0)
}
#[doc = "Bit 10 - Early Suspend Mask (device only)"]
#[inline(always)]
pub fn erlysuspmsk(&self) -> ERLYSUSPMSK_R {
ERLYSUSPMSK_R::new(((self.bits >> 10) & 0x01) != 0)
}
#[doc = "Bit 11 - USB Suspend Mask (device only)"]
#[inline(always)]
pub fn usbsuspmsk(&self) -> USBSUSPMSK_R {
USBSUSPMSK_R::new(((self.bits >> 11) & 0x01) != 0)
}
#[doc = "Bit 12 - USB Reset Mask (device only)"]
#[inline(always)]
pub fn usbrstmsk(&self) -> USBRSTMSK_R {
USBRSTMSK_R::new(((self.bits >> 12) & 0x01) != 0)
}
#[doc = "Bit 13 - Enumeration Done Mask (device only)"]
#[inline(always)]
pub fn enumdonemsk(&self) -> ENUMDONEMSK_R {
ENUMDONEMSK_R::new(((self.bits >> 13) & 0x01) != 0)
}
#[doc = "Bit 14 - Isochronous OUT Packet Dropped Interrupt Mask (device only)"]
#[inline(always)]
pub fn isooutdropmsk(&self) -> ISOOUTDROPMSK_R {
ISOOUTDROPMSK_R::new(((self.bits >> 14) & 0x01) != 0)
}
#[doc = "Bit 15 - End of Periodic Frame Interrupt Mask (device only)"]
#[inline(always)]
pub fn eopfmsk(&self) -> EOPFMSK_R {
EOPFMSK_R::new(((self.bits >> 15) & 0x01) != 0)
}
#[doc = "Bit 17 - Endpoint Mismatch Interrupt Mask (device only)"]
#[inline(always)]
pub fn epmismsk(&self) -> EPMISMSK_R {
EPMISMSK_R::new(((self.bits >> 17) & 0x01) != 0)
}
#[doc = "Bit 18 - IN Endpoints Interrupt Mask (device only)"]
#[inline(always)]
pub fn iepintmsk(&self) -> IEPINTMSK_R {
IEPINTMSK_R::new(((self.bits >> 18) & 0x01) != 0)
}
#[doc = "Bit 19 - OUT Endpoints Interrupt Mask (device only)"]
#[inline(always)]
pub fn oepintmsk(&self) -> OEPINTMSK_R {
OEPINTMSK_R::new(((self.bits >> 19) & 0x01) != 0)
}
#[doc = "Bit 20 - Incomplete Isochronous IN Transfer Mask (device only)"]
#[inline(always)]
pub fn incompisoinmsk(&self) -> INCOMPISOINMSK_R {
INCOMPISOINMSK_R::new(((self.bits >> 20) & 0x01) != 0)
}
#[doc = "Bit 21 - Incomplete Periodic Transfer Mask (host only)"]
#[inline(always)]
pub fn incomplpmsk(&self) -> INCOMPLPMSK_R {
INCOMPLPMSK_R::new(((self.bits >> 21) & 0x01) != 0)
}
#[doc = "Bit 22 - Data Fetch Suspended Mask (device only)"]
#[inline(always)]
pub fn fetsuspmsk(&self) -> FETSUSPMSK_R {
FETSUSPMSK_R::new(((self.bits >> 22) & 0x01) != 0)
}
#[doc = "Bit 23 - Reset detected Interrupt Mask (device only)"]
#[inline(always)]
pub fn resetdetmsk(&self) -> RESETDETMSK_R {
RESETDETMSK_R::new(((self.bits >> 23) & 0x01) != 0)
}
#[doc = "Bit 24 - Host Port Interrupt Mask (host only)"]
#[inline(always)]
pub fn prtintmsk(&self) -> PRTINTMSK_R {
PRTINTMSK_R::new(((self.bits >> 24) & 0x01) != 0)
}
#[doc = "Bit 25 - Host Channels Interrupt Mask (host only)"]
#[inline(always)]
pub fn hchintmsk(&self) -> HCHINTMSK_R {
HCHINTMSK_R::new(((self.bits >> 25) & 0x01) != 0)
}
#[doc = "Bit 26 - Periodic TxFIFO Empty Mask (host only)"]
#[inline(always)]
pub fn ptxfempmsk(&self) -> PTXFEMPMSK_R {
PTXFEMPMSK_R::new(((self.bits >> 26) & 0x01) != 0)
}
#[doc = "Bit 28 - Connector ID Status Change Mask (host and device)"]
#[inline(always)]
pub fn conidstschngmsk(&self) -> CONIDSTSCHNGMSK_R {
CONIDSTSCHNGMSK_R::new(((self.bits >> 28) & 0x01) != 0)
}
#[doc = "Bit 29 - Disconnect Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn disconnintmsk(&self) -> DISCONNINTMSK_R {
DISCONNINTMSK_R::new(((self.bits >> 29) & 0x01) != 0)
}
#[doc = "Bit 30 - Session Request/New Session Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn sessreqintmsk(&self) -> SESSREQINTMSK_R {
SESSREQINTMSK_R::new(((self.bits >> 30) & 0x01) != 0)
}
#[doc = "Bit 31 - Resume/Remote Wakeup Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn wkupintmsk(&self) -> WKUPINTMSK_R {
WKUPINTMSK_R::new(((self.bits >> 31) & 0x01) != 0)
}
}
impl W {
#[doc = "Bit 1 - Mode Mismatch Interrupt Mask (host and device)"]
#[inline(always)]
pub fn modemismsk(&mut self) -> MODEMISMSK_W {
MODEMISMSK_W { w: self }
}
#[doc = "Bit 2 - OTG Interrupt Mask (host and device)"]
#[inline(always)]
pub fn otgintmsk(&mut self) -> OTGINTMSK_W {
OTGINTMSK_W { w: self }
}
#[doc = "Bit 3 - Start of Frame Mask (host and device)"]
#[inline(always)]
pub fn sofmsk(&mut self) -> SOFMSK_W {
SOFMSK_W { w: self }
}
#[doc = "Bit 4 - Receive FIFO Non-Empty Mask (host and device)"]
#[inline(always)]
pub fn rxflvlmsk(&mut self) -> RXFLVLMSK_W {
RXFLVLMSK_W { w: self }
}
#[doc = "Bit 5 - Non-Periodic TxFIFO Empty Mask (host only)"]
#[inline(always)]
pub fn nptxfempmsk(&mut self) -> NPTXFEMPMSK_W {
NPTXFEMPMSK_W { w: self }
}
#[doc = "Bit 6 - Global Non-periodic IN NAK Effective Mask (device only)"]
#[inline(always)]
pub fn ginnakeffmsk(&mut self) -> GINNAKEFFMSK_W {
GINNAKEFFMSK_W { w: self }
}
#[doc = "Bit 7 - Global OUT NAK Effective Mask (device only)"]
#[inline(always)]
pub fn goutnakeffmsk(&mut self) -> GOUTNAKEFFMSK_W {
GOUTNAKEFFMSK_W { w: self }
}
#[doc = "Bit 10 - Early Suspend Mask (device only)"]
#[inline(always)]
pub fn erlysuspmsk(&mut self) -> ERLYSUSPMSK_W {
ERLYSUSPMSK_W { w: self }
}
#[doc = "Bit 11 - USB Suspend Mask (device only)"]
#[inline(always)]
pub fn usbsuspmsk(&mut self) -> USBSUSPMSK_W {
USBSUSPMSK_W { w: self }
}
#[doc = "Bit 12 - USB Reset Mask (device only)"]
#[inline(always)]
pub fn usbrstmsk(&mut self) -> USBRSTMSK_W {
USBRSTMSK_W { w: self }
}
#[doc = "Bit 13 - Enumeration Done Mask (device only)"]
#[inline(always)]
pub fn enumdonemsk(&mut self) -> ENUMDONEMSK_W {
ENUMDONEMSK_W { w: self }
}
#[doc = "Bit 14 - Isochronous OUT Packet Dropped Interrupt Mask (device only)"]
#[inline(always)]
pub fn isooutdropmsk(&mut self) -> ISOOUTDROPMSK_W {
ISOOUTDROPMSK_W { w: self }
}
#[doc = "Bit 15 - End of Periodic Frame Interrupt Mask (device only)"]
#[inline(always)]
pub fn eopfmsk(&mut self) -> EOPFMSK_W {
EOPFMSK_W { w: self }
}
#[doc = "Bit 17 - Endpoint Mismatch Interrupt Mask (device only)"]
#[inline(always)]
pub fn epmismsk(&mut self) -> EPMISMSK_W {
EPMISMSK_W { w: self }
}
#[doc = "Bit 18 - IN Endpoints Interrupt Mask (device only)"]
#[inline(always)]
pub fn iepintmsk(&mut self) -> IEPINTMSK_W {
IEPINTMSK_W { w: self }
}
#[doc = "Bit 19 - OUT Endpoints Interrupt Mask (device only)"]
#[inline(always)]
pub fn oepintmsk(&mut self) -> OEPINTMSK_W {
OEPINTMSK_W { w: self }
}
#[doc = "Bit 20 - Incomplete Isochronous IN Transfer Mask (device only)"]
#[inline(always)]
pub fn incompisoinmsk(&mut self) -> INCOMPISOINMSK_W {
INCOMPISOINMSK_W { w: self }
}
#[doc = "Bit 21 - Incomplete Periodic Transfer Mask (host only)"]
#[inline(always)]
pub fn incomplpmsk(&mut self) -> INCOMPLPMSK_W {
INCOMPLPMSK_W { w: self }
}
#[doc = "Bit 22 - Data Fetch Suspended Mask (device only)"]
#[inline(always)]
pub fn fetsuspmsk(&mut self) -> FETSUSPMSK_W {
FETSUSPMSK_W { w: self }
}
#[doc = "Bit 23 - Reset detected Interrupt Mask (device only)"]
#[inline(always)]
pub fn resetdetmsk(&mut self) -> RESETDETMSK_W {
RESETDETMSK_W { w: self }
}
#[doc = "Bit 24 - Host Port Interrupt Mask (host only)"]
#[inline(always)]
pub fn prtintmsk(&mut self) -> PRTINTMSK_W {
PRTINTMSK_W { w: self }
}
#[doc = "Bit 25 - Host Channels Interrupt Mask (host only)"]
#[inline(always)]
pub fn hchintmsk(&mut self) -> HCHINTMSK_W {
HCHINTMSK_W { w: self }
}
#[doc = "Bit 26 - Periodic TxFIFO Empty Mask (host only)"]
#[inline(always)]
pub fn ptxfempmsk(&mut self) -> PTXFEMPMSK_W {
PTXFEMPMSK_W { w: self }
}
#[doc = "Bit 28 - Connector ID Status Change Mask (host and device)"]
#[inline(always)]
pub fn conidstschngmsk(&mut self) -> CONIDSTSCHNGMSK_W {
CONIDSTSCHNGMSK_W { w: self }
}
#[doc = "Bit 29 - Disconnect Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn disconnintmsk(&mut self) -> DISCONNINTMSK_W {
DISCONNINTMSK_W { w: self }
}
#[doc = "Bit 30 - Session Request/New Session Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn sessreqintmsk(&mut self) -> SESSREQINTMSK_W {
SESSREQINTMSK_W { w: self }
}
#[doc = "Bit 31 - Resume/Remote Wakeup Detected Interrupt Mask (host and device)"]
#[inline(always)]
pub fn wkupintmsk(&mut self) -> WKUPINTMSK_W {
WKUPINTMSK_W { w: self }
}
} | pub type ENUMDONEMSK_R = crate::R<bool, bool>; |
__init__.py | __version__ = '1.0a3' |
||
util.go | package hclutils
import (
"bytes"
"errors"
"fmt"
"github.com/hashicorp/hcl2/hcl"
hjson "github.com/hashicorp/hcl2/hcl/json"
"github.com/hashicorp/hcl2/hcldec"
"github.com/hashicorp/nomad/nomad/structs"
"github.com/ugorji/go/codec"
"github.com/zclconf/go-cty/cty"
"github.com/zclconf/go-cty/cty/function"
"github.com/zclconf/go-cty/cty/function/stdlib"
)
// ParseHclInterface is used to convert an interface value representing a hcl2
// body and return the interpolated value. Vars may be nil if there are no
// variables to interpolate.
func | (val interface{}, spec hcldec.Spec, vars map[string]cty.Value) (cty.Value, hcl.Diagnostics, []error) {
evalCtx := &hcl.EvalContext{
Variables: vars,
Functions: GetStdlibFuncs(),
}
// Encode to json
var buf bytes.Buffer
enc := codec.NewEncoder(&buf, structs.JsonHandle)
err := enc.Encode(val)
if err != nil {
// Convert to a hcl diagnostics message
errorMessage := fmt.Sprintf("Label encoding failed: %v", err)
return cty.NilVal,
hcl.Diagnostics([]*hcl.Diagnostic{{
Severity: hcl.DiagError,
Summary: "Failed to encode label value",
Detail: errorMessage,
}}),
[]error{errors.New(errorMessage)}
}
// Parse the json as hcl2
hclFile, diag := hjson.Parse(buf.Bytes(), "")
if diag.HasErrors() {
return cty.NilVal, diag, formattedDiagnosticErrors(diag)
}
value, decDiag := hcldec.Decode(hclFile.Body, spec, evalCtx)
diag = diag.Extend(decDiag)
if diag.HasErrors() {
return cty.NilVal, diag, formattedDiagnosticErrors(diag)
}
return value, diag, nil
}
// GetStdlibFuncs returns the set of stdlib functions.
func GetStdlibFuncs() map[string]function.Function {
return map[string]function.Function{
"abs": stdlib.AbsoluteFunc,
"coalesce": stdlib.CoalesceFunc,
"concat": stdlib.ConcatFunc,
"hasindex": stdlib.HasIndexFunc,
"int": stdlib.IntFunc,
"jsondecode": stdlib.JSONDecodeFunc,
"jsonencode": stdlib.JSONEncodeFunc,
"length": stdlib.LengthFunc,
"lower": stdlib.LowerFunc,
"max": stdlib.MaxFunc,
"min": stdlib.MinFunc,
"reverse": stdlib.ReverseFunc,
"strlen": stdlib.StrlenFunc,
"substr": stdlib.SubstrFunc,
"upper": stdlib.UpperFunc,
}
}
// TODO: update hcl2 library with better diagnostics formatting for streamed configs
// - should be arbitrary labels not JSON https://github.com/hashicorp/hcl2/blob/4fba5e1a75e382aed7f7a7993f2c4836a5e1cd52/hcl/json/structure.go#L66
// - should not print diagnostic subject https://github.com/hashicorp/hcl2/blob/4fba5e1a75e382aed7f7a7993f2c4836a5e1cd52/hcl/diagnostic.go#L77
func formattedDiagnosticErrors(diag hcl.Diagnostics) []error {
var errs []error
for _, d := range diag {
if d.Summary == "Extraneous JSON object property" {
d.Summary = "Invalid label"
}
err := errors.New(fmt.Sprintf("%s: %s", d.Summary, d.Detail))
errs = append(errs, err)
}
return errs
}
| ParseHclInterface |
token.rs | use std::error::Error;
use std::fmt;
use std::str;
use std::iter::Peekable;
#[derive(Debug, PartialEq, Eq)]
pub enum Token<'a> {
LeftParen,
RightParen,
Ident(&'a str),
Comma,
Equals,
String(&'a str),
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum TokenType {
LeftParen,
RightParen,
Ident,
Comma,
Equals,
String,
}
impl fmt::Display for TokenType {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
TokenType::Ident => write!(f, "identifier"),
TokenType::String => write!(f, "string"),
TokenType::LeftParen => write!(f, "`(`"),
TokenType::RightParen => write!(f, "`)`"),
TokenType::Comma => write!(f, "`,`"),
TokenType::Equals => write!(f, "`=`"),
}
}
}
impl<'a> fmt::Display for Token<'a> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result |
}
impl<'a> Token<'a> {
pub fn is_type(&self, t: TokenType) -> bool {
self.get_type() == t
}
fn get_type(&self) -> TokenType {
match self {
&Token::LeftParen => TokenType::LeftParen,
&Token::RightParen => TokenType::RightParen,
&Token::Ident(_) => TokenType::Ident,
&Token::Comma => TokenType::Comma,
&Token::Equals => TokenType::Equals,
&Token::String(_) => TokenType::String,
}
}
}
pub struct Tokenizer<'a> {
s: Peekable<str::CharIndices<'a>>,
orig: &'a str,
}
#[derive(Debug, PartialEq, Eq)]
pub enum TokenError {
UnterminatedString{
pos: usize,
},
UnexpectedCharacter{
pos: usize,
ch: char,
},
}
impl fmt::Display for TokenError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
&TokenError::UnterminatedString{pos} => write!(f, "Unterminated string starting at {}", pos),
&TokenError::UnexpectedCharacter{pos, ch} => write!(f, "Unexpected character {} at {}", ch, pos),
}
}
}
impl Error for TokenError {
fn description(&self) -> &str {
match self {
&TokenError::UnterminatedString{..} => "unterminated string",
&TokenError::UnexpectedCharacter{..} => "unexpected character",
}
}
fn cause(&self) -> Option<&Error> {
None
}
}
impl<'a> From<&'a str> for Tokenizer<'a> {
fn from(s: &'a str) -> Self {
Tokenizer{
s: s.char_indices().peekable(),
orig: s,
}
}
}
impl<'a> Iterator for Tokenizer<'a> {
type Item = Result<Token<'a>, TokenError>;
fn next(&mut self) -> Option<Result<Token<'a>, TokenError>> {
loop {
match self.s.next() {
Some((_, ' ')) => {}
Some((_, '(')) => return Some(Ok(Token::LeftParen)),
Some((_, ')')) => return Some(Ok(Token::RightParen)),
Some((_, ',')) => return Some(Ok(Token::Comma)),
Some((_, '=')) => return Some(Ok(Token::Equals)),
Some((start, '"')) => {
while let Some((end, ch)) = self.s.next() {
if ch == '"' {
return Some(Ok(Token::String(&self.orig[start+1..end])))
}
}
return Some(Err(TokenError::UnterminatedString{pos: start}))
}
Some((start, ch)) if is_ident_start(ch) => {
while let Some(&(end, ch)) = self.s.peek() {
if !is_ident_rest(ch) {
return Some(Ok(Token::Ident(&self.orig[start..end])))
} else {
self.s.next();
}
}
return Some(Ok(Token::Ident(&self.orig[start..])))
}
Some((pos, ch)) => return Some(Err(TokenError::UnexpectedCharacter{pos, ch})),
None => return None
}
}
}
}
fn is_ident_start(ch: char) -> bool {
ch == '_' || ('a' <= ch && ch <= 'z') || ('A' <= ch && ch <= 'Z')
}
fn is_ident_rest(ch: char) -> bool {
is_ident_start(ch) || ('0' <= ch && ch <= '9')
}
#[cfg(test)]
mod tests {
use super::{Tokenizer, Token, TokenError};
#[test]
fn single_token() {
let mut t = Tokenizer::from("(");
assert_eq!(t.next(), Some(Ok(Token::LeftParen)));
assert_eq!(t.next(), None);
t = Tokenizer::from(")");
assert_eq!(t.next(), Some(Ok(Token::RightParen)));
assert_eq!(t.next(), None);
t = Tokenizer::from(",");
assert_eq!(t.next(), Some(Ok(Token::Comma)));
assert_eq!(t.next(), None);
t = Tokenizer::from("=");
assert_eq!(t.next(), Some(Ok(Token::Equals)));
assert_eq!(t.next(), None);
t = Tokenizer::from(r#""foo""#);
assert_eq!(t.next(), Some(Ok(Token::String("foo"))));
assert_eq!(t.next(), None);
t = Tokenizer::from("foo");
assert_eq!(t.next(), Some(Ok(Token::Ident("foo"))));
assert_eq!(t.next(), None);
t = Tokenizer::from("_bar");
assert_eq!(t.next(), Some(Ok(Token::Ident("_bar"))));
assert_eq!(t.next(), None);
}
#[test]
fn single_bad() {
let mut t = Tokenizer::from("");
assert_eq!(t.next(), None);
t = Tokenizer::from(" ");
assert_eq!(t.next(), None);
t = Tokenizer::from("\t");
assert_eq!(t.next(), Some(Err(TokenError::UnexpectedCharacter{pos:0, ch: '\t'})));
assert_eq!(t.next(), None);
t = Tokenizer::from(r#"" "#);
assert_eq!(t.next(), Some(Err(TokenError::UnterminatedString{pos:0})));
assert_eq!(t.next(), None);
t = Tokenizer::from("7");
assert_eq!(t.next(), Some(Err(TokenError::UnexpectedCharacter{pos:0, ch: '7'})));
assert_eq!(t.next(), None);
}
} | {
match self {
&Token::Ident(ref s) => write!(f, "`{}`", s),
&Token::String(ref s) => write!(f, r#"`"{}"`"#, s),
t => t.get_type().fmt(f),
}
} |
states.go | package service
const (
// InstanceStateProvisioning represents the state where service instance
// provisioning is in progress
InstanceStateProvisioning = "PROVISIONING"
// InstanceStateProvisioned represents the state where service instance
// provisioning has completed successfully
InstanceStateProvisioned = "PROVISIONED"
// InstanceStateUpdating represents the state where service instance
// updating is in progress | // InstanceStateUpdated represents the state where service instance
// updating has completed successfully
// It redirects to InstanceStateProvisioned because it means the same thing
// to any other operations besides updating
InstanceStateUpdated = InstanceStateProvisioned
// InstanceStateProvisioningFailed represents the state where service instance
// provisioning has failed
InstanceStateProvisioningFailed = "PROVISIONING_FAILED"
// InstanceStateDeprovisioning represents the state where service instance
// deprovisioning is in progress
InstanceStateDeprovisioning = "DEPROVISIONING"
// InstanceStateDeprovisioningFailed represents the state where service
// instance deprovisioning has failed
InstanceStateDeprovisioningFailed = "DEPROVISIONING_FAILED"
// BindingStateBound represents the state where service binding has completed
// successfully
BindingStateBound = "BOUND"
// BindingStateBindingFailed represents the state where service binding has
// failed
BindingStateBindingFailed = "BINDING_FAILED"
// BindingStateUnbindingFailed represents the state where service unbinding
// has failed
BindingStateUnbindingFailed = "UNBINDING_FAILED"
) | InstanceStateUpdating = "UPDATING" |
restful_test.py | # -*- coding: utf-8 -*-
"""
@Time : 2020/12/11 11:57
@Author : Corey
"""
from flask import Flask, request
from flask_restful import Api, Resource, marshal, fields, reqparse
app = Flask(__name__)
# restful接口方法
api = Api(app)
class UserApi(Resource):
def get(self):
return ' | f post(self):
return 'update restful api data'
def delete(self):
return 'delete restful api data '
api.add_resource(UserApi, '/users', endpoint='user')
if __name__ == '__main__':
app.run() | get restful api data'
de |
mocks.ts | import {
IContainer,
IDisposable,
IIndexable,
IServiceLocator,
emptyArray,
} from '@aurelia/kernel';
import {
ExpressionKind,
IBinding,
IConnectableBinding,
IndexMap,
IObserverLocator,
ISignaler,
ISubscribable,
LifecycleFlags,
} from '@aurelia/runtime-html';
import type { Scope } from '@aurelia/runtime-html';
export class MockBinding implements IConnectableBinding {
public interceptor: this = this;
public id!: number;
public observerSlots!: number;
public version!: number;
public observerLocator!: IObserverLocator;
public locator!: IServiceLocator;
public $scope?: Scope | undefined;
public isBound!: boolean;
public calls: [keyof MockBinding, ...any[]][] = [];
public updateTarget(value: unknown, flags: LifecycleFlags): void {
this.trace('updateTarget', value, flags);
}
public updateSource(value: unknown, flags: LifecycleFlags): void {
this.trace('updateSource', value, flags);
}
public handleChange(newValue: unknown, _previousValue: unknown, flags: LifecycleFlags): void {
this.trace('handleChange', newValue, _previousValue, flags);
}
public observeProperty(obj: IIndexable, propertyName: string): void {
this.trace('observeProperty', obj, propertyName);
}
public unobserve(all?: boolean): void {
this.trace('unobserve', all);
}
public addObserver(observer: ISubscribable): void {
this.trace('addObserver', observer);
}
public $bind(flags: LifecycleFlags, scope: Scope): void {
this.trace('$bind', flags, scope);
}
public $unbind(flags: LifecycleFlags): void {
this.trace('$unbind', flags);
}
public trace(fnName: keyof MockBinding, ...args: any[]): void {
this.calls.push([fnName, ...args]);
}
public dispose(): void {
this.trace('dispose');
}
}
export class MockBindingBehavior {
public calls: [keyof MockBindingBehavior, ...any[]][] = [];
public bind(flags: LifecycleFlags, scope: Scope, binding: IBinding, ...rest: any[]): void {
this.trace('bind', flags, scope, binding, ...rest);
}
public unbind(flags: LifecycleFlags, scope: Scope, binding: IBinding, ...rest: any[]): void {
this.trace('unbind', flags, scope, binding, ...rest);
}
public trace(fnName: keyof MockBindingBehavior, ...args: any[]): void {
this.calls.push([fnName, ...args]);
}
}
export interface MockServiceLocator extends IContainer {}
export class MockServiceLocator {
public calls: [keyof MockServiceLocator, ...any[]][] = [];
public constructor(public registrations: Map<any, any>) {}
public get(key: any): any {
this.trace('get', key);
return this.registrations.get(key);
}
public trace(fnName: keyof MockServiceLocator, ...args: any[]): void {
this.calls.push([fnName, ...args]);
}
}
export interface MockSignaler extends ISignaler {}
export class | {
public calls: [keyof MockSignaler, ...any[]][] = [];
public dispatchSignal(...args: any[]): void {
this.trace('dispatchSignal', ...args);
}
public addSignalListener(...args: any[]): void {
this.trace('addSignalListener', ...args);
}
public removeSignalListener(...args: any[]): void {
this.trace('removeSignalListener', ...args);
}
public trace(fnName: keyof MockSignaler, ...args: any[]): void {
this.calls.push([fnName, ...args]);
}
}
export class MockPropertySubscriber {
public calls: [keyof MockPropertySubscriber, ...any[]][] = [];
public handleChange(newValue: any, previousValue: any, flags: LifecycleFlags): void {
this.trace(`handleChange`, newValue, previousValue, flags);
}
public trace(fnName: keyof MockPropertySubscriber, ...args: any[]): void {
this.calls.push([fnName, ...args]);
}
}
export class MockTracingExpression {
public $kind: ExpressionKind = ExpressionKind.HasBind | ExpressionKind.HasUnbind;
public calls: [keyof MockTracingExpression, ...any[]][] = [];
public constructor(public inner: any) {}
public evaluate(...args: any[]): any {
this.trace('evaluate', ...args);
return this.inner.evaluate(...args);
}
public assign(...args: any[]): any {
this.trace('assign', ...args);
return this.inner.assign(...args);
}
public connect(...args: any[]): any {
this.trace('connect', ...args);
this.inner.connect(...args);
}
public bind(...args: any[]): any {
this.trace('bind', ...args);
if (this.inner.bind) {
this.inner.bind(...args);
}
}
public unbind(...args: any[]): any {
this.trace('unbind', ...args);
if (this.inner.unbind) {
this.inner.unbind(...args);
}
}
public accept(...args: any[]): any {
this.trace('accept', ...args);
this.inner.accept(...args);
}
public trace(fnName: keyof MockTracingExpression, ...args: any[]): void {
this.calls.push([fnName, ...args]);
}
}
export class MockValueConverter {
public calls: [keyof MockValueConverter, ...any[]][] = [];
public fromView!: MockValueConverter['$fromView'];
public toView!: MockValueConverter['$toView'];
public constructor(methods: string[]) {
for (const method of methods) {
this[method as 'fromView' | 'toView'] = this[`$${method}` as '$toView' | '$fromView'];
}
}
public $fromView(value: any, ...args: any[]): any {
this.trace('fromView', value, ...args);
return value;
}
public $toView(value: any, ...args: any[]): any {
this.trace('toView', value, ...args);
return value;
}
public trace(fnName: keyof MockValueConverter, ...args: any[]): void {
this.calls.push([fnName, ...args]);
}
}
export class MockContext {
public log: any[] = [];
}
export type ExposedContext = IContainer & IDisposable & IContainer;
export class MockBrowserHistoryLocation {
public changeCallback?: (ev: PopStateEvent) => Promise<void>;
private readonly states: Record<string, unknown>[] = [{}];
private readonly paths: string[] = [''];
private index: number = 0;
public get length(): number {
return this.states.length;
}
public get state(): Record<string, unknown> {
return this.states[this.index];
}
public get path(): string {
return this.paths[this.index];
}
public get pathname(): string {
const parts = this.parts;
// parts.shift();
let path = parts.shift()!;
if (!path.startsWith('/')) {
path = `/${path}`;
}
return path;
}
public get search(): string {
const parts = this.parts;
// if (parts.shift()) {
// parts.shift();
// }
parts.shift();
const part: string = parts.shift()!;
return part !== undefined ? `?${part}` : '';
}
public get hash(): string {
const parts = this.parts;
// if (!parts.shift()) {
// parts.shift();
// }
parts.shift();
parts.shift();
const part: string = parts.shift()!;
return part !== undefined ? `#${part}` : '';
}
public set hash(value: string) {
if (value.startsWith('#')) {
value = value.substring(1);
}
const parts = this.parts;
// const hashFirst = parts.shift();
let path = parts.shift();
// if (hashFirst) {
// parts.shift();
// path += `#${value}`;
// const part = parts.shift();
// if (part !== undefined) {
// path += `?${part}`;
// }
// } else {
const part = parts.shift();
if (part !== undefined) {
path += `?${part}`;
}
parts.shift();
path += `#${value}`;
// }
this.pushState({}, null!, path!);
this.notifyChange();
}
public activate(): void { return; }
public deactivate(): void { return; }
// TODO: Fix a better split
private get parts(): string[] {
const parts = [];
const ph = this.path.split('#');
if (ph.length > 1) {
parts.unshift(ph.pop());
} else {
parts.unshift(undefined);
}
const pq = ph[0].split('?');
if (pq.length > 1) {
parts.unshift(pq.pop());
} else {
parts.unshift(undefined);
}
parts.unshift(pq[0]);
// const parts: (string | boolean)[] = this.path.split(/[#?]/);
// let search = this.path.indexOf('?') >= 0 ? this.path.indexOf('?') : 99999;
// let hash = this.path.indexOf('#') >= 0 ? this.path.indexOf('#') : 99999;
// parts.unshift(hash < search);
return parts as string[];
}
public pushState(data: Record<string, unknown>, title: string, path: string) {
this.states.splice(this.index + 1);
this.paths.splice(this.index + 1);
this.states.push(data);
this.paths.push(path);
this.index++;
}
public replaceState(data: Record<string, unknown>, title: string, path: string) {
this.states[this.index] = data;
this.paths[this.index] = path;
}
public go(movement: number) {
const newIndex = this.index + movement;
if (newIndex >= 0 && newIndex < this.states.length) {
this.index = newIndex;
this.notifyChange();
}
}
private notifyChange() {
if (this.changeCallback) {
this.changeCallback(null as any).catch((error: Error) => { throw error; });
}
}
}
export class ChangeSet implements IDisposable {
public readonly index: number;
public readonly flags: LifecycleFlags;
public get newValue(): any {
return this._newValue;
}
public get oldValue(): any {
return this._oldValue;
}
private _newValue: any;
private _oldValue: any;
public constructor(
index: number,
flags: LifecycleFlags,
newValue: any,
oldValue: any,
) {
this.index = index;
this.flags = flags;
this._newValue = newValue;
this._oldValue = oldValue;
}
public dispose(): void {
this._newValue = (void 0)!;
this._oldValue = (void 0)!;
}
}
export class ProxyChangeSet implements IDisposable {
public readonly index: number;
public readonly flags: LifecycleFlags;
public readonly key: PropertyKey;
public get newValue(): any {
return this._newValue;
}
public get oldValue(): any {
return this._oldValue;
}
private _newValue: any;
private _oldValue: any;
public constructor(
index: number,
flags: LifecycleFlags,
key: PropertyKey,
newValue: any,
oldValue: any,
) {
this.index = index;
this.flags = flags;
this.key = key;
this._newValue = newValue;
this._oldValue = oldValue;
}
public dispose(): void {
this._newValue = (void 0)!;
this._oldValue = (void 0)!;
}
}
export class CollectionChangeSet implements IDisposable {
public readonly index: number;
public readonly flags: LifecycleFlags;
public get indexMap(): IndexMap {
return this._indexMap;
}
private _indexMap: IndexMap;
public constructor(
index: number,
flags: LifecycleFlags,
indexMap: IndexMap,
) {
this.index = index;
this.flags = flags;
this._indexMap = indexMap;
}
public dispose(): void {
this._indexMap = (void 0)!;
}
}
export class SpySubscriber implements IDisposable {
public get changes(): ChangeSet[] {
if (this._changes === void 0) {
return emptyArray;
}
return this._changes;
}
public get proxyChanges(): ProxyChangeSet[] {
if (this._proxyChanges === void 0) {
return emptyArray;
}
return this._proxyChanges;
}
public get collectionChanges(): CollectionChangeSet[] {
if (this._collectionChanges === void 0) {
return emptyArray;
}
return this._collectionChanges;
}
public get hasChanges(): boolean {
return this._changes !== void 0;
}
public get hasProxyChanges(): boolean {
return this._proxyChanges !== void 0;
}
public get hasCollectionChanges(): boolean {
return this._collectionChanges !== void 0;
}
public get callCount(): number {
return this._callCount;
}
private _changes?: ChangeSet[];
private _proxyChanges?: ProxyChangeSet[];
private _collectionChanges?: CollectionChangeSet[];
private _callCount: number;
public constructor() {
this._changes = void 0;
this._proxyChanges = void 0;
this._collectionChanges = void 0;
this._callCount = 0;
}
public handleChange(newValue: any, oldValue: any, flags: LifecycleFlags): void {
if (this._changes === void 0) {
this._changes = [new ChangeSet(this._callCount++, flags, newValue, oldValue)];
} else {
this._changes.push(new ChangeSet(this._callCount++, flags, newValue, oldValue));
}
}
public handleProxyChange(key: PropertyKey, newValue: any, oldValue: any, flags: LifecycleFlags): void {
if (this._proxyChanges === void 0) {
this._proxyChanges = [new ProxyChangeSet(this._callCount++, flags, key, newValue, oldValue)];
} else {
this._proxyChanges.push(new ProxyChangeSet(this._callCount++, flags, key, newValue, oldValue));
}
}
public handleCollectionChange(indexMap: IndexMap, flags: LifecycleFlags): void {
if (this._collectionChanges === void 0) {
this._collectionChanges = [new CollectionChangeSet(this._callCount++, flags, indexMap)];
} else {
this._collectionChanges.push(new CollectionChangeSet(this._callCount++, flags, indexMap));
}
}
public dispose(): void {
if (this._changes !== void 0) {
this._changes.forEach(c => c.dispose());
this._changes = void 0;
}
if (this._proxyChanges !== void 0) {
this._proxyChanges.forEach(c => c.dispose());
this._proxyChanges = void 0;
}
if (this._collectionChanges !== void 0) {
this._collectionChanges.forEach(c => c.dispose());
this._collectionChanges = void 0;
}
this._callCount = 0;
}
}
| MockSignaler |
test_ccm.py | import logging
import time
import numpy as np
import pytest
from ophyd.sim import fake_device_cache, make_fake_device
from .. import ccm
from ..sim import FastMotor
logger = logging.getLogger(__name__)
SAMPLE_ALIO = 4.575 # Current value as of writing this file
SAMPLE_THETA = 1.2 # Modest angle
SAMPLE_WAVELENGTH = 1.5 # hard xray
# Make sure the calcs are properly inverted
def test_theta_alio_inversion():
logger.debug('test_theta_alio_inversion')
theta = ccm.alio_to_theta(SAMPLE_ALIO, ccm.default_theta0, ccm.default_gr,
ccm.default_gd)
alio_calc = ccm.theta_to_alio(theta, ccm.default_theta0, ccm.default_gr,
ccm.default_gd)
# Unlike the other inversions, this is just an approximation
assert np.isclose(alio_calc, SAMPLE_ALIO)
def test_wavelength_theta_inversion():
|
def test_energy_wavelength_inversion():
logger.debug('test_energy_wavelength_inversion')
energy = ccm.wavelength_to_energy(SAMPLE_WAVELENGTH)
wavelength_calc = ccm.energy_to_wavelength(energy)
assert wavelength_calc == SAMPLE_WAVELENGTH
@pytest.fixture(scope='function')
def fake_ccm():
return make_fake_ccm()
class FakeAlio(FastMotor):
kill = None
home = None
def make_fake_ccm():
fake_device_cache[ccm.CCMMotor] = FastMotor
fake_device_cache[ccm.CCMAlio] = FakeAlio
FakeCCM = make_fake_device(ccm.CCM)
fake_ccm = FakeCCM(alio_prefix='ALIO', theta2fine_prefix='THETA',
theta2coarse_prefix='THTA', chi2_prefix='CHI',
x_down_prefix='X:DOWN', x_up_prefix='X:UP',
y_down_prefix='Y:DOWN', y_up_north_prefix='Y:UP:NORTH',
y_up_south_prefix='Y:UP:SOUTH', in_pos=8, out_pos=0,
name='fake_ccm')
def init_pos(mot, pos=0):
mot.user_readback.sim_put(0)
mot.user_setpoint.sim_put(0)
mot.user_setpoint.sim_set_limits((0, 0))
mot.motor_spg.sim_put(2)
mot.part_number.sim_put('tasdf')
init_pos(fake_ccm.x.down)
init_pos(fake_ccm.x.up)
init_pos(fake_ccm.y.down)
init_pos(fake_ccm.y.up_north)
init_pos(fake_ccm.y.up_south)
fake_ccm.alio.set(SAMPLE_ALIO)
fake_ccm.energy.alio.set(SAMPLE_ALIO)
fake_ccm.energy_with_vernier.alio.set(SAMPLE_ALIO)
fake_ccm.energy_with_vernier.vernier.setpoint.sim_put(0)
return fake_ccm
def test_fake_ccm(fake_ccm):
logger.debug('test_fake_ccm')
fake_ccm.get()
# Make sure we set up the forward/inverse to use the right methods
def test_ccm_calc(fake_ccm):
logger.debug('test_ccm_calc')
calc = fake_ccm.energy
logger.debug('physics pos is %s', calc.position)
logger.debug('real pos is %s', calc.real_position)
logger.debug('sample alio is %s', SAMPLE_ALIO)
theta_func = ccm.alio_to_theta(
SAMPLE_ALIO,
calc.theta0_rad_val,
calc.gr_val,
calc.gd_val,
)
wavelength_func = ccm.theta_to_wavelength(theta_func, calc.dspacing_val)
energy_func = ccm.wavelength_to_energy(wavelength_func)
energy = calc.energy.position
assert energy == energy_func
calc.alio.move(0)
calc.move(energy, wait=False)
assert np.isclose(calc.alio.position, SAMPLE_ALIO)
calc.alio.move(calc.alio.position)
calc.move(energy=calc.energy.position, wait=False)
assert np.isclose(calc.alio.position, SAMPLE_ALIO)
# Make sure sync'd axes work and that unk/in/out states work
@pytest.mark.timeout(5)
def test_ccm_main(fake_ccm):
logger.debug('test_ccm_main')
fake_ccm.y.move(5, wait=False)
assert fake_ccm.y.down.user_setpoint.get() == 5
assert fake_ccm.y.up_north.user_setpoint.get() == 5
assert fake_ccm.y.up_south.user_setpoint.get() == 5
assert fake_ccm.removed
assert not fake_ccm.inserted
fake_ccm.x.down.user_readback.sim_put(8)
fake_ccm.x.up.user_readback.sim_put(8)
assert not fake_ccm.removed
assert fake_ccm.inserted
fake_ccm.x.down.user_readback.sim_put(4)
fake_ccm.x.up.user_readback.sim_put(4)
assert not fake_ccm.removed
assert not fake_ccm.inserted
fake_ccm.insert(wait=False)
assert fake_ccm.x.down.user_setpoint.get() == 8
assert fake_ccm.x.up.user_setpoint.get() == 8
fake_ccm.remove(wait=False)
assert fake_ccm.x.down.user_setpoint.get() == 0
assert fake_ccm.x.up.user_setpoint.get() == 0
@pytest.mark.timeout(5)
def test_vernier(fake_ccm):
logger.debug('test_vernier')
pseudopos = fake_ccm.energy_with_vernier
# Moving with vernier should move the energy request motor too
pseudopos.move(7, wait=False)
assert np.isclose(pseudopos.energy.position, 7)
assert pseudopos.vernier.position == 7000
pseudopos.move(8, wait=False)
assert np.isclose(pseudopos.energy.position, 8)
assert pseudopos.vernier.position == 8000
pseudopos.move(9, wait=False)
assert np.isclose(pseudopos.energy.position, 9)
assert pseudopos.vernier.position == 9000
# Small moves (less than 30eV) should be skipped on the energy request
pseudopos.move(9.001, wait=False)
assert np.isclose(pseudopos.energy.position, 9.001)
assert pseudopos.vernier.position == 9000
# Unless we set the option for not skipping them
pseudopos.vernier.skip_small_moves = False
pseudopos.move(9.002, wait=False)
assert np.isclose(pseudopos.energy.position, 9.002)
assert pseudopos.vernier.position == 9002
@pytest.mark.timeout(5)
def test_set_current_position(fake_ccm):
logger.debug('test_set_current_position')
mot = fake_ccm.energy.energy
for energy in range(6, 14):
mot.set_current_position(energy)
assert np.isclose(mot.position, energy)
@pytest.mark.timeout(5)
def test_check_valid_constant(fake_ccm):
logger.debug('test_check_valid_constant')
# First call to make_valid sends the first monitor update
def make_valid(sig, valid):
if valid:
sig.put(1)
else:
sig.put(0)
def make_conn(sig, conn):
sig._metadata['connected'] = conn
def output(sig):
return fake_ccm._check_valid_constant(sig, sig.get())
test_sig = fake_ccm.dspacing
# Can we get to all the enum values?
make_conn(test_sig, False)
assert output(test_sig) == ccm.CCMConstantWarning.ALWAYS_DISCONNECT
make_conn(test_sig, True)
make_valid(test_sig, False)
assert output(test_sig) == ccm.CCMConstantWarning.INVALID_CONNECT
make_conn(test_sig, False)
assert output(test_sig) == ccm.CCMConstantWarning.INVALID_DISCONNECT
make_conn(test_sig, True)
make_valid(test_sig, True)
assert output(test_sig) == ccm.CCMConstantWarning.NO_WARNING
make_conn(test_sig, False)
assert output(test_sig) == ccm.CCMConstantWarning.VALID_DISCONNECT
# theta0_deg is allowed to be zero, unlike the others
test_sig2 = fake_ccm.theta0_deg
make_conn(test_sig2, True)
make_valid(test_sig2, False)
assert output(test_sig2) == ccm.CCMConstantWarning.NO_WARNING
@pytest.mark.timeout(5)
def test_show_constant_warning(fake_ccm, caplog):
logger.debug('test_show_constant_warning')
for warning in (
ccm.CCMConstantWarning.NO_WARNING,
ccm.CCMConstantWarning.ALWAYS_DISCONNECT,
ccm.CCMConstantWarning.VALID_DISCONNECT,
ccm.CCMConstantWarning.INVALID_DISCONNECT,
ccm.CCMConstantWarning.INVALID_CONNECT,
):
caplog.clear()
with caplog.at_level(logging.WARNING):
fake_ccm._show_constant_warning(
warning,
fake_ccm.dspacing,
0.111111,
0.222222,
)
if warning == ccm.CCMConstantWarning.NO_WARNING:
assert len(caplog.records) == 0
else:
assert len(caplog.records) == 1
@pytest.mark.timeout(5)
def test_warn_invalid_constants(fake_ccm, caplog):
logger.debug('test_warn_invalid_constants')
# Trick the warning into thinking we've be initialized for a while
fake_ccm._init_time = time.monotonic() - 1000
fake_ccm.theta0_deg.put(0)
fake_ccm.dspacing.put(0)
fake_ccm.gr.put(0)
fake_ccm.gd.put(0)
# We expect three warnings from the fake PVs that start at 0
caplog.clear()
with caplog.at_level(logging.WARNING):
fake_ccm.warn_invalid_constants(only_new=False)
assert len(caplog.records) == 3
# We expect the warnings to not repeat
caplog.clear()
fake_ccm.warn_invalid_constants(only_new=True)
assert len(caplog.records) == 0
# Unless we ask them to
caplog.clear()
fake_ccm.warn_invalid_constants(only_new=False)
assert len(caplog.records) == 3
# Let's fix the issue and make sure no warnings are shown
fake_ccm.reset_calc_constant_defaults(confirm=False)
caplog.clear()
fake_ccm.warn_invalid_constants(only_new=False)
assert len(caplog.records) == 0
@pytest.mark.timeout(5)
def test_disconnected_ccm():
ccm.CCM(alio_prefix='ALIO', theta2fine_prefix='THETA',
theta2coarse_prefix='THTA', chi2_prefix='CHI',
x_down_prefix='X:DOWN', x_up_prefix='X:UP',
y_down_prefix='Y:DOWN', y_up_north_prefix='Y:UP:NORTH',
y_up_south_prefix='Y:UP:SOUTH', in_pos=8, out_pos=0,
name='ccm')
| logger.debug('test_wavelength_theta_inversion')
wavelength = ccm.theta_to_wavelength(SAMPLE_THETA, ccm.default_dspacing)
theta = ccm.wavelength_to_theta(wavelength, ccm.default_dspacing)
logger.debug('%s, %s', wavelength, theta)
assert np.isclose(theta, SAMPLE_THETA)
theta = ccm.wavelength_to_theta(SAMPLE_WAVELENGTH, ccm.default_dspacing)
wavelength = ccm.theta_to_wavelength(theta, ccm.default_dspacing)
logger.debug('%s, %s', wavelength, theta)
assert np.isclose(wavelength, SAMPLE_WAVELENGTH) |
option.go | package grpc
type interceptorConfig struct {
serviceName string
traceStreamCalls, traceStreamMessages bool
noDebugStack bool
}
func (cfg *interceptorConfig) serverServiceName() string {
if cfg.serviceName == "" {
return "grpc.server"
}
return cfg.serviceName
}
func (cfg *interceptorConfig) clientServiceName() string {
if cfg.serviceName == "" {
return "grpc.client"
}
return cfg.serviceName
}
// InterceptorOption represents an option that can be passed to the grpc unary
// client and server interceptors.
type InterceptorOption func(*interceptorConfig)
func | (cfg *interceptorConfig) {
// cfg.serviceName defaults are set in interceptors
cfg.traceStreamCalls = true
cfg.traceStreamMessages = true
}
// WithServiceName sets the given service name for the intercepted client.
func WithServiceName(name string) InterceptorOption {
return func(cfg *interceptorConfig) {
cfg.serviceName = name
}
}
// WithStreamCalls enables or disables tracing of streaming calls.
func WithStreamCalls(enabled bool) InterceptorOption {
return func(cfg *interceptorConfig) {
cfg.traceStreamCalls = enabled
}
}
// WithStreamMessages enables or disables tracing of streaming messages.
func WithStreamMessages(enabled bool) InterceptorOption {
return func(cfg *interceptorConfig) {
cfg.traceStreamMessages = enabled
}
}
// NoDebugStack disables debug stacks for traces with errors. This is useful in situations
// where errors are frequent and the overhead of calling debug.Stack may affect performance.
func NoDebugStack() InterceptorOption {
return func(cfg *interceptorConfig) {
cfg.noDebugStack = true
}
}
| defaults |
configsstorage_test.go | package conf_test
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/hsqds/conf"
)
// TestConfigsStorage
func | (t *testing.T) {
t.Parallel()
t.Run("should get and set config", func(t *testing.T) {
t.Parallel()
const svc1 = "service1"
cfg := conf.NewMapConfig(map[string]string{})
s := conf.NewSyncedConfigsStorage()
err := s.Set(svc1, cfg)
assert.Nil(t, err)
r, err := s.ByServiceName(svc1)
assert.Nil(t, err)
assert.Equal(t, cfg, r)
})
t.Run("should return ErrServiceConfigNotFound when no config for service", func(t *testing.T) {
t.Parallel()
s := conf.NewSyncedConfigsStorage()
_, err := s.ByServiceName("serviceName")
assert.IsType(t, conf.ErrServiceConfigNotFound{}, err)
})
t.Run("should return true if config exists at the storage and false otherwise", func(t *testing.T) {
t.Parallel()
const (
existingSvc = "service1"
inexistingSvc = "inexisting"
)
cfg := conf.NewMapConfig(map[string]string{})
s := conf.NewSyncedConfigsStorage()
err := s.Set(existingSvc, cfg)
assert.Nil(t, err)
ok := s.Has(existingSvc)
assert.True(t, ok)
ok = s.Has(inexistingSvc)
assert.False(t, ok)
})
}
| TestConfigsStorage |
mod.rs | // Copyright 2015 The Servo Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
mod os {
#[cfg(all(not(feature = "force-inprocess"), target_os = "linux"))]
include!("linux/mod.rs");
#[cfg(all(not(feature = "force-inprocess"), target_os = "macos"))]
include!("macos/mod.rs");
#[cfg(any(feature = "force-inprocess", target_os = "windows", target_os = "android"))] | }
pub use self::os::{OsIpcChannel, OsIpcOneShotServer, OsIpcReceiver, OsIpcReceiverSet};
pub use self::os::{OsIpcSelectionResult, OsIpcSender, OsIpcSharedMemory};
pub use self::os::{OsOpaqueIpcChannel, channel};
#[cfg(test)]
mod test; | include!("inprocess/mod.rs"); |
blankLineBetweenDocstringAndFirstImportPreserved.py | # pylint: disable=missing-docstring, invalid-name |
from mod import solve, in_between, Point
print(solve, in_between, Point, sys) |
"""2016 - Day 1 Puzzle Part 2 tests."""
import sys |
setDefaultAddress.ts | // Copyright 2019-2020 @Premiurly/polkassembly authors & contributors
// This software may be modified and distributed under the terms
// of the Apache-2.0 license. See the LICENSE file for details.
import AuthService from '../../services/auth';
import { AddressArgs, ChangeResponseType, Context } from '../../types';
import getTokenFromReq from '../../utils/getTokenFromReq';
import messages from '../../utils/messages';
export default async (parent: void, { address }: AddressArgs, ctx: Context): Promise<ChangeResponseType> => {
const token = getTokenFromReq(ctx.req);
const authServiceInstance = new AuthService(); | };
}; |
return {
message: messages.ADDRESS_DEFAULT_SUCCESS,
token: await authServiceInstance.SetDefaultAddress(token, address) |
sensor.py | """
This component provides HA sensor for Netgear Arlo IP cameras.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/sensor.arlo/
"""
import logging
import homeassistant.helpers.config_validation as cv
import voluptuous as vol
from homeassistant.const import (
ATTR_ATTRIBUTION,
CONF_MONITORED_CONDITIONS,
DEVICE_CLASS_HUMIDITY,
DEVICE_CLASS_TEMPERATURE,
TEMP_CELSIUS,
)
from homeassistant.core import callback
from homeassistant.helpers.config_validation import PLATFORM_SCHEMA
from homeassistant.helpers.entity import Entity
from homeassistant.helpers.icon import icon_for_battery_level
from . import COMPONENT_ATTRIBUTION, COMPONENT_BRAND, COMPONENT_DATA, COMPONENT_DOMAIN
from .pyaarlo.constant import (
AIR_QUALITY_KEY,
BATTERY_KEY,
CAPTURED_TODAY_KEY,
HUMIDITY_KEY,
LAST_CAPTURE_KEY,
RECENT_ACTIVITY_KEY,
SIGNAL_STR_KEY,
TEMPERATURE_KEY,
TOTAL_CAMERAS_KEY,
)
_LOGGER = logging.getLogger(__name__)
DEPENDENCIES = [COMPONENT_DOMAIN]
# sensor_type [ description, unit, icon, attribute ]
SENSOR_TYPES = {
"last_capture": ["Last", None, "run-fast", LAST_CAPTURE_KEY],
"total_cameras": ["Arlo Cameras", None, "video", TOTAL_CAMERAS_KEY],
"recent_activity": ["Recent Activity", None, "run-fast", RECENT_ACTIVITY_KEY],
"captured_today": ["Captured Today", None, "file-video", CAPTURED_TODAY_KEY],
"battery_level": ["Battery Level", "%", "battery-50", BATTERY_KEY],
"signal_strength": ["Signal Strength", None, "signal", SIGNAL_STR_KEY],
"temperature": ["Temperature", TEMP_CELSIUS, "thermometer", TEMPERATURE_KEY],
"humidity": ["Humidity", "%", "water-percent", HUMIDITY_KEY],
"air_quality": ["Air Quality", "ppm", "biohazard", AIR_QUALITY_KEY],
}
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_MONITORED_CONDITIONS, default=list(SENSOR_TYPES)): vol.All(
cv.ensure_list, [vol.In(SENSOR_TYPES)]
),
}
)
async def async_setup_platform(hass, config, async_add_entities, _discovery_info=None):
"""Set up an Arlo IP sensor."""
arlo = hass.data.get(COMPONENT_DATA)
if not arlo:
return
sensors = []
for sensor_type in config.get(CONF_MONITORED_CONDITIONS):
if sensor_type == "total_cameras":
sensors.append(ArloSensor(arlo, None, sensor_type))
else:
for camera in arlo.cameras:
if camera.has_capability(SENSOR_TYPES[sensor_type][3]):
sensors.append(ArloSensor(arlo, camera, sensor_type))
for doorbell in arlo.doorbells:
if doorbell.has_capability(SENSOR_TYPES[sensor_type][3]):
sensors.append(ArloSensor(arlo, doorbell, sensor_type))
for light in arlo.lights:
if light.has_capability(SENSOR_TYPES[sensor_type][3]):
sensors.append(ArloSensor(arlo, light, sensor_type))
async_add_entities(sensors)
class ArloSensor(Entity):
"""An implementation of a Netgear Arlo IP sensor."""
def __init__(self, arlo, device, sensor_type):
"""Initialize an Arlo sensor."""
sensor_details = SENSOR_TYPES[sensor_type]
if device is None:
self._name = sensor_details[0]
self._unique_id = sensor_type
self._device = arlo
else:
self._name = "{0} {1}".format(sensor_details[0], device.name)
self._unique_id = (
"{0}_{1}".format(sensor_details[0], device.entity_id)
.lower()
.replace(" ", "_")
)
self._device = device
self._sensor_type = sensor_type
self._icon = "mdi:{}".format(sensor_details[2])
self._state = None
self._attr = sensor_details[3]
_LOGGER.info("ArloSensor: %s created", self._name)
async def async_added_to_hass(self):
"""Register callbacks."""
@callback
def update_state(_device, attr, value):
_LOGGER.debug("callback:" + self._name + ":" + attr + ":" + str(value)[:80])
self._state = value
self.async_schedule_update_ha_state()
if self._attr is not None:
self._state = self._device.attribute(self._attr)
self._device.add_attr_callback(self._attr, update_state)
@property
def should_poll(self):
return False
@property
def unique_id(self):
"""Return a unique ID."""
return self._unique_id
@property
def state(self):
|
@property
def icon(self):
"""Icon to use in the frontend, if any."""
if self._sensor_type == "battery_level" and self._state is not None:
return icon_for_battery_level(
battery_level=int(self._state), charging=False
)
return self._icon
@property
def unit_of_measurement(self):
"""Return the units of measurement."""
return SENSOR_TYPES.get(self._sensor_type)[1]
@property
def device_class(self):
"""Return the device class of the sensor."""
if self._sensor_type == "temperature":
return DEVICE_CLASS_TEMPERATURE
if self._sensor_type == "humidity":
return DEVICE_CLASS_HUMIDITY
return None
@property
def device_state_attributes(self):
"""Return the device state attributes."""
attrs = {
ATTR_ATTRIBUTION: COMPONENT_ATTRIBUTION,
"brand": COMPONENT_BRAND,
"friendly_name": self._name,
"camera_name": self._device.name,
"device_id": self._device.device_id,
"model": self._device.model_id,
}
if self._sensor_type == "last_capture":
video = self._device.last_video
if video is not None:
attrs["object_type"] = video.object_type
attrs["object_region"] = video.object_region
attrs["thumbnail_url"] = video.thumbnail_url
attrs["video_url"] = video.video_url
else:
attrs["object_type"] = None
return attrs
| """Return the state of the sensor."""
return self._state |
dist2zip.py |
import base64
import bz2
import time
import os
import shutil
import util
def | ():
dist_dir = util.get_launch_name()
print('zipping %s (%s)' % (dist_dir, os.path.abspath(dist_dir)))
shutil.make_archive(dist_dir, 'zip', dist_dir)
if __name__ == '__main__':
main()
| main |
ComponentDestroyError.js | const ExtendableError = require("./ExtendableError");
class | extends ExtendableError {
constructor(error) {
super("Error destroying component.", error);
}
}
module.exports = exports = ComponentDestroyError; | ComponentDestroyError |
instantiate_inline_workflow_template.go | // Copyright 2020 Google LLC
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// https://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package dataproc shows how you can use the Cloud Dataproc Client library to manage
// Cloud Dataproc clusters. In this example, we'll show how to instantiate an inline
// workflow template.
package dataproc
// [START dataproc_instantiate_inline_workflow_template]
import (
"context"
"fmt"
"io"
dataproc "cloud.google.com/go/dataproc/apiv1"
"google.golang.org/api/option"
dataprocpb "google.golang.org/genproto/googleapis/cloud/dataproc/v1"
)
func instantiateInlineWorkflowTemplate(w io.Writer, projectID, region string) error |
// [END dataproc_instantiate_inline_workflow_template]
| {
// projectID := "your-project-id"
// region := "us-central1"
ctx := context.Background()
// Create the cluster client.
endpoint := region + "-dataproc.googleapis.com:443"
workflowTemplateClient, err := dataproc.NewWorkflowTemplateClient(ctx, option.WithEndpoint(endpoint))
if err != nil {
return fmt.Errorf("dataproc.NewWorkflowTemplateClient: %v", err)
}
// Create the Instantiate Inline Workflow Template Request.
req := &dataprocpb.InstantiateInlineWorkflowTemplateRequest{
Parent: fmt.Sprintf("projects/%s/regions/%s", projectID, region),
Template: &dataprocpb.WorkflowTemplate{
Jobs: []*dataprocpb.OrderedJob{
{
JobType: &dataprocpb.OrderedJob_HadoopJob{
HadoopJob: &dataprocpb.HadoopJob{
Driver: &dataprocpb.HadoopJob_MainJarFileUri{
MainJarFileUri: "file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar",
},
Args: []string{
"teragen",
"1000",
"hdfs:///gen/",
},
},
},
StepId: "teragen",
},
{
JobType: &dataprocpb.OrderedJob_HadoopJob{
HadoopJob: &dataprocpb.HadoopJob{
Driver: &dataprocpb.HadoopJob_MainJarFileUri{
MainJarFileUri: "file:///usr/lib/hadoop-mapreduce/hadoop-mapreduce-examples.jar",
},
Args: []string{
"terasort",
"hdfs:///gen/",
"hdfs:///sort/",
},
},
},
StepId: "terasort",
PrerequisiteStepIds: []string{
"teragen",
},
},
},
Placement: &dataprocpb.WorkflowTemplatePlacement{
Placement: &dataprocpb.WorkflowTemplatePlacement_ManagedCluster{
ManagedCluster: &dataprocpb.ManagedCluster{
ClusterName: "my-managed-cluster",
Config: &dataprocpb.ClusterConfig{
GceClusterConfig: &dataprocpb.GceClusterConfig{
// Leave "ZoneUri" empty for "Auto Zone Placement"
// ZoneUri: ""
ZoneUri: "us-central1-a",
},
},
},
},
},
},
}
// Create the cluster.
op, err := workflowTemplateClient.InstantiateInlineWorkflowTemplate(ctx, req)
if err != nil {
return fmt.Errorf("InstantiateInlineWorkflowTemplate: %v", err)
}
if err := op.Wait(ctx); err != nil {
return fmt.Errorf("InstantiateInlineWorkflowTemplate.Wait: %v", err)
}
// Output a success message.
fmt.Fprintf(w, "Workflow created successfully.")
return nil
} |
__init__.py | # ==================================================================================================
# Copyright 2011 Twitter, Inc.
# --------------------------------------------------------------------------------------------------
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this work except in compliance with the License.
# You may obtain a copy of the License in the LICENSE file, or at:
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==================================================================================================
from sys import version_info
from twitter.common.lang import Compatibility
if version_info[0] == 2:
from .ordereddict import OrderedDict
else:
from collections import OrderedDict
from .orderedset import OrderedSet
from .ringbuffer import RingBuffer
def maybe_list(value, expected_type=Compatibility.string, raise_type=ValueError):
"""Given a value that could be a single value or iterable of a particular type, always return a
list of that type.
By default the expected type is a string/unicode, but can be specified with the 'expected_type' kwarg,
which can be a type or tuple of types.
By default raises ValueError on 'expected_type' mismatch, but can be specified with the
'raise_type' kwarg.
Raises ValueError if any type mismatches.
"""
from collections import Iterable
if isinstance(value, expected_type):
return [value]
elif isinstance(value, Iterable):
real_values = list(value)
for v in real_values:
if not isinstance(v, expected_type):
raise raise_type('Expected elements of list to be %r, got value %s of type %r' %
(expected_type, v, type(v))) | else:
raise raise_type('Value must be of type %r or iterable of type %r' %
(expected_type, expected_type))
__all__ = (
maybe_list,
OrderedSet,
OrderedDict,
RingBuffer,
) | return real_values |
make_quiz.py | import json
from app import db
from app.models import *
from utils import utils
# turn annotation labels by hit X into a quiz Job
def annotation_to_quiz(hit_id, alt_hit_id, quiz_label):
'''
hit_id and alt_hit_id should be for the same task. hit_id has the strictly correct answers and alt_hit_id has possibly correct.
'''
anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % hit_id)
cmd = {}
cmd['label'] = quiz_label
values, patch_ids, image_ids, label_ids = zip(*anns)
attr_ids = sorted(set(label_ids), key=lambda x: label_ids.index(x))
attributes = []
for id in attr_ids:
name = Label.query.get(id).name
attributes.append({'id': id, 'name': name})
cmd['attributes'] = attributes
unique_patch_ids = sorted(set(patch_ids), key=lambda x: patch_ids.index(x))
patches = []
for patch_id in unique_patch_ids:
p = Patch.query.get(patch_id)
seg = p.segmentation
img_id = p.image_id
patches.append({'id': patch_id, 'image_id': img_id, 'segmentation': str(seg)})
cmd['patches'] = patches
answers = {}
for idx, val in enumerate(values):
try:
cur_dict = answers[str(patch_ids[idx])]
except KeyError, e:
answers[str(patch_ids[idx])] = {}
cur_dict = answers[str(patch_ids[idx])]
cur_dict[str(label_ids[idx])] = 1 if val else 0
cmd['answers'] = answers
alt_anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % alt_hit_id)
values, patch_ids, image_ids, label_ids = zip(*alt_anns)
alt_answers = {}
for idx, val in enumerate(values):
try:
cur_dict = alt_answers[str(patch_ids[idx])]
except KeyError, e:
alt_answers[str(patch_ids[idx])] = {}
cur_dict = alt_answers[str(patch_ids[idx])]
cur_dict[str(label_ids[idx])] = 1 if val else 0
cmd['alt_answers'] = alt_answers
j = Jobs(cmd=json.dumps(cmd), job_type='quiz')
db.session.add(j)
db.session.commit()
return j.id
def | (hit_id, alt_hit_id, quiz_label):
'''
hit_id and alt_hit_id should be for the same task. hit_id has the strictly correct answers and alt_hit_id has possibly correct.
'''
anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % hit_id)
cmd = {}
cmd['label'] = quiz_label
values, patch_ids, image_ids, label_ids = zip(*anns)
attr_id = label_ids[0]
name = Label.query.get(attr_id).name
attribute = {'id':attr_id, 'name': name}
cmd['attribute'] = attribute
unique_patch_ids = sorted(set(patch_ids), key=lambda x: patch_ids.index(x))
patches = []
# make patches have x, y, w, h
for patch_id in patch_ids:
p = Patch.query.get(patch_id)
seg = [json.loads(p.segmentation)[0]]
segx = [seg[0][ix] for ix in range(0,len(seg[0]),2)]
segy = [seg[0][iy] for iy in range(1,len(seg[0]),2)]
img_id = p.image_id
seg.append(p.x)
seg.append(p.y)
seg.append(p.width)
seg.append(p.height)
img = Image.query.get(img_id)
seg.append(img.width)
seg.append(img.height)
patches.append({'id': patch_id, 'image_id': img_id, 'segmentation': json.dumps(seg)})
cmd['patches'] = patches
answers = {}
for idx, val in enumerate(values):
try:
cur_dict = answers[str(patch_ids[idx])]
except KeyError, e:
answers[str(patch_ids[idx])] = {}
cur_dict = answers[str(patch_ids[idx])]
cur_dict[attr_id] = 1 if val else 0
cmd['answers'] = answers
alt_anns = utils.get_all_db_res('select value, patch_id, image_id, label_id from annotation where hit_id = %d' % alt_hit_id)
values, patch_ids, image_ids, label_ids = zip(*alt_anns)
attr_id = label_ids[0]
alt_answers = {}
for idx, val in enumerate(values):
try:
cur_dict = alt_answers[str(patch_ids[idx])]
except KeyError, e:
alt_answers[str(patch_ids[idx])] = {}
cur_dict = alt_answers[str(patch_ids[idx])]
cur_dict[attr_id] = 1 if val else 0
cmd['alt_answers'] = alt_answers
j = Jobs(cmd=json.dumps(cmd), job_type='quiz')
db.session.add(j)
db.session.commit()
return j.id
| allimgs_annotation_to_quiz |
orm_test.go | package main
import (
"context"
"fmt"
"log"
"testing"
"github.com/tmc/srvgql/ent"
)
func TestORM(t *testing.T) {
ctx := context.Background()
client, err := ent.Open("sqlite3", "file:ent?mode=memory&cache=shared&_fk=1")
if err != nil {
log.Fatalf("failed opening connection to sqlite: %v", err)
}
defer client.Close()
// Run the auto migration tool.
if err := client.Schema.Create(ctx); err != nil {
log.Fatalf("failed creating schema resources: %v", err)
}
// Create account
u, err := CreateAccount(ctx, client)
if err != nil {
log.Println(err)
}
o, err := CreateOrganization(ctx, client, u)
if err != nil {
log.Println(err)
}
fmt.Println(u)
fmt.Println(o)
a, err := QueryAccount(ctx, client)
if err != nil |
log.Println("account", a)
log.Println("orgs", a.Edges.Organization)
}
| {
log.Println("err querying accounts:", err)
} |
funding_project_api.py | from flask import request
from flask_restx import Resource, reqparse
from flask_restx._http import HTTPStatus
from werkzeug.datastructures import FileStorage
from ..service import funding_detail_service as fd_service
from ..service import funding_project_service as fp_service
from .dto import FundingDetailDTO, FundingProjectDTO
api = FundingProjectDTO.funding_project_api
_funding_project = FundingProjectDTO.funding_project
_funding_detail = FundingDetailDTO.funding_detail
_funding_detail_parser = reqparse.RequestParser()
_funding_detail_parser.add_argument(
"title", required=True, type=str, help="Funding detail title", location="form"
)
_funding_detail_parser.add_argument(
"description",
required=True,
type=str,
help="Funding detail description",
location="form",
)
_funding_detail_parser.add_argument(
"document_name", type=str, help="Document name", location="form"
)
_funding_detail_parser.add_argument("file", type=FileStorage, location="files")
@api.route("/")
class FundingProjectList(Resource):
@api.doc("list of funding projects")
@api.marshal_list_with(_funding_project, envelope="data")
def get(self):
"""List all funding projects."""
return fp_service.get_all_funding_projects()
@api.doc("Create a new funding project")
@api.expect(_funding_project, validate=True)
@api.response(HTTPStatus.NOT_FOUND, "Investor not found")
@api.response(HTTPStatus.NOT_FOUND, "Funding application not found")
@api.response(201, "Funding project successfully registered")
def post(self):
"""Create a new funding project."""
data = request.json
return fp_service.create_funding_project(data=data)
@api.route("/<funding_project_number>")
@api.param("funding_project_number", "Funding project number to process")
class FundingProjectByID(Resource):
@api.doc("Get a single funding project")
@api.marshal_with(_funding_project)
def get(self, funding_project_number):
"""Retrieve a funding project by number."""
funding_project = fp_service.get_funding_project_by_number(
funding_project_number
)
if not funding_project:
self.api.abort(
code=HTTPStatus.NOT_FOUND, message="Funding project not found"
)
else:
return funding_project
@api.doc("Delete an funding project")
@api.response(HTTPStatus.BAD_REQUEST, "Can't delete the funding project")
def delete(self, funding_project_number):
"""Delete an funding project."""
funding_project = fp_service.get_funding_project_by_number(
funding_project_number
)
if not funding_project:
self.api.abort(
code=HTTPStatus.NOT_FOUND, message="Funding project not found"
)
else:
return fp_service.delete_funding_project(funding_project)
@api.doc("Update a Funding project")
@api.expect(_funding_project, validate=True)
@api.response(HTTPStatus.CREATED, "Funding project successfully updated")
@api.response(HTTPStatus.BAD_REQUEST, "Can't update the Funding project")
def patch(self, funding_project_number):
"""Update a Funding project."""
funding_project = fp_service.get_funding_project_by_number(
funding_project_number
)
if not funding_project:
self.api.abort(
code=HTTPStatus.NOT_FOUND, message="Funding project not found"
)
else:
data = request.json
return fp_service.update_funding_project(data, funding_project)
@api.route("/<funding_project_number>/funding_details")
@api.param("funding_project_number", "Funding project number to process")
@api.response(HTTPStatus.NOT_FOUND, "Funding detail not found")
class FundingProjectDetail(Resource):
@api.doc("list of funding details of a funding project")
@api.marshal_list_with(_funding_detail, envelope="data")
def | (self, funding_project_number):
"""
List all funding details of a funding project.
"""
return fp_service.get_project_funding_details(funding_project_number)
@api.doc("Add funding detail")
@api.expect(_funding_detail_parser, validate=True)
@api.response(HTTPStatus.NOT_FOUND, "Funding project not found")
@api.response(201, "Funding detail successfully added")
def post(self, funding_project_number):
"""Add a funding detail."""
funding_project = fp_service.get_funding_project_by_number(
funding_project_number
)
if not funding_project:
self.api.abort(
code=HTTPStatus.NOT_FOUND, message="Funding project not found"
)
else:
data = _funding_detail_parser.parse_args()
if not data["title"] or not data["description"]:
self.api.abort(
code=HTTPStatus.NOT_FOUND, message="Empty inputs",
)
else:
return fd_service.create_funding_detail(data, funding_project)
@api.route("/<funding_project_number>/funding_details/<funding_detail_id>")
@api.param("funding_project_number", "Funding project number to process")
@api.param("funding_detail_id", "Funding detail ID to process")
@api.response(HTTPStatus.NOT_FOUND, "Funding detail not found")
class FundingProjectDetailByID(Resource):
@api.doc("Remove funding detail")
@api.response(HTTPStatus.NOT_FOUND, "Funding project not found")
@api.response(201, "Funding detail successfully added")
def delete(self, funding_project_number, funding_detail_id):
"""Remove funding detail."""
funding_project = fp_service.get_funding_project_by_number(
funding_project_number
)
if not funding_project:
self.api.abort(
code=HTTPStatus.NOT_FOUND, message="Funding project not found"
)
funding_detail = fd_service.get_funding_detail_by_id(funding_detail_id)
if not funding_detail:
self.api.abort(
code=HTTPStatus.NOT_FOUND, message="Funding detail not found"
)
return fp_service.remove_funding_detail(funding_project, funding_detail)
# @api.route("/<funding_project_number>/project_milestones")
# @api.param("funding_project_number", "Funding project number to process")
# @api.response(HTTPStatus.NOT_FOUND, "Funding detail not found")
# class FundingProjectMilestones(Resource):
# @api.doc("list of project milestones of a funding project")
# @api.marshal_list_with(_funding_milestone, envelope="data")
# def get(self, funding_project_number):
# """
# List all project milestones of a funding project.
# """
# return fp_service.get_project_project_milestones(funding_project_number)
# @api.route("/<funding_project_number>/fund_disbursements")
# @api.param("funding_project_number", "Funding project number to process")
# @api.response(HTTPStatus.NOT_FOUND, "Fund disbursements not found")
# class FundingProjectFundDisbursement(Resource):
# @api.doc("list of fund disbursements of a funding project")
# @api.marshal_list_with(_fund_disbursement, envelope="data")
# def get(self, funding_project_number):
# """
# List all fund disbursements of a funding project.
# """
# return fp_service.get_project_fund_disbursements(funding_project_number)
| get |
mod.rs | use crate::config::{project::Project, Config};
use crate::errors::AppError;
use ansi_term::Colour;
use rayon::prelude::*;
use std::collections::BTreeSet;
use slog::Logger;
use slog::{error, info};
use std::borrow::ToOwned;
use crate::util::random_colour;
use slog::{debug, o};
use std::io::{BufRead, BufReader};
use std::path::PathBuf;
use std::process::{Child, Command, Stdio};
use std::thread;
fn forward_process_output_to_stdout<T: std::io::Read>(read: T, prefix: &str, colour: Colour, atty: bool, mark_err: bool) -> Result<(), AppError> {
let mut buf = BufReader::new(read);
loop {
let mut line = String::new();
let read: usize = buf.read_line(&mut line)?;
if read == 0 {
break;
}
if mark_err {
let prefix = format!("{:>21.21} |", prefix);
if atty {
print!("{} {} {}", Colour::Red.paint("ERR"), colour.paint(prefix), line);
} else {
print!("ERR {} {}", prefix, line);
};
} else {
let prefix = format!("{:>25.25} |", prefix);
if atty {
print!("{} {}", colour.paint(prefix), line);
} else {
print!("{} {}", prefix, line);
};
}
}
Ok(())
}
fn is_stdout_a_tty() -> bool {
atty::is(atty::Stream::Stdout)
}
fn is_stderr_a_tty() -> bool {
atty::is(atty::Stream::Stderr)
}
pub fn spawn_maybe(shell: &[String], cmd: &str, workdir: &PathBuf, project_name: &str, colour: Colour, logger: &Logger) -> Result<(), AppError> {
let program: &str = shell
.first()
.ok_or_else(|| AppError::UserError("shell entry in project settings must have at least one element".to_owned()))?;
let rest: &[String] = shell.split_at(1).1;
let mut result: Child = Command::new(program)
.args(rest)
.arg(cmd)
.current_dir(&workdir)
.env("FW_PROJECT", project_name)
.stdout(Stdio::piped())
.stderr(Stdio::piped())
.stdin(Stdio::null())
.spawn()?;
let stdout_child = if let Some(stdout) = result.stdout.take() {
let project_name = project_name.to_owned();
Some(thread::spawn(move || {
let atty: bool = is_stdout_a_tty();
forward_process_output_to_stdout(stdout, &project_name, colour, atty, false)
}))
} else {
None
};
// stream stderr in this thread. no need to spawn another one.
if let Some(stderr) = result.stderr.take() {
let atty: bool = is_stderr_a_tty();
forward_process_output_to_stdout(stderr, project_name, colour, atty, true)?
}
if let Some(child) = stdout_child {
child.join().expect("Must be able to join child")?;
}
let status = result.wait()?;
if status.code().unwrap_or(0) > 0 {
error!(logger, "cmd failed");
Err(AppError::UserError("External command failed.".to_owned()))
} else {
info!(logger, "cmd finished");
Ok(())
}
}
pub fn init_threads(parallel_raw: &Option<String>, logger: &Logger) -> Result<(), AppError> |
pub fn foreach(
maybe_config: Result<Config, AppError>,
cmd: &str,
tags: &BTreeSet<String>,
logger: &Logger,
parallel_raw: &Option<String>,
) -> Result<(), AppError> {
let config = maybe_config?;
init_threads(parallel_raw, logger)?;
let projects: Vec<&Project> = config.projects.values().collect();
let script_results = projects
.par_iter()
.filter(|p| tags.is_empty() || p.tags.clone().unwrap_or_default().intersection(tags).count() > 0)
.map(|p| {
let shell = config.settings.get_shell_or_default();
let project_logger = logger.new(o!("project" => p.name.clone()));
let path = config.actual_path_to_project(p, &project_logger);
info!(project_logger, "Entering");
spawn_maybe(&shell, cmd, &path, &p.name, random_colour(), &project_logger)
})
.collect::<Vec<Result<(), AppError>>>();
script_results.into_iter().fold(Ok(()), Result::and)
}
| {
if let Some(ref raw_num) = *parallel_raw {
let num_threads = raw_num.parse::<usize>()?;
rayon::ThreadPoolBuilder::new().num_threads(num_threads).build_global().expect(
"Tried to initialize rayon more than once (this is a software bug on fw side, please file an issue at https://github.com/brocode/fw/issues/new )",
);
debug!(logger, "Rayon rolling with thread pool of size {}", raw_num)
}
Ok(())
} |
data_harvester.rs | //! This is the main file to house data collection functions.
use std::time::Instant;
#[cfg(target_os = "linux")]
use fxhash::FxHashMap;
#[cfg(not(target_os = "linux"))]
use sysinfo::{System, SystemExt};
#[cfg(feature = "battery")]
use battery::{Battery, Manager};
use crate::app::layout_manager::UsedWidgets;
use futures::join;
use super::DataFilters;
#[cfg(feature = "battery")]
pub mod batteries;
pub mod cpu;
pub mod disks;
pub mod memory;
pub mod network;
pub mod processes;
pub mod temperature;
#[derive(Clone, Debug)]
pub struct Data {
pub last_collection_time: Instant,
pub cpu: Option<cpu::CpuHarvest>,
pub load_avg: Option<cpu::LoadAvgHarvest>,
pub memory: Option<memory::MemHarvest>,
pub swap: Option<memory::MemHarvest>,
pub temperature_sensors: Option<Vec<temperature::TempHarvest>>,
pub network: Option<network::NetworkHarvest>,
pub list_of_processes: Option<Vec<processes::ProcessHarvest>>,
pub disks: Option<Vec<disks::DiskHarvest>>,
pub io: Option<disks::IoHarvest>,
#[cfg(feature = "battery")]
pub list_of_batteries: Option<Vec<batteries::BatteryHarvest>>,
}
impl Default for Data {
fn default() -> Self {
Data {
last_collection_time: Instant::now(),
cpu: None,
load_avg: None,
memory: None,
swap: None,
temperature_sensors: None,
list_of_processes: None,
disks: None,
io: None,
network: None,
#[cfg(feature = "battery")]
list_of_batteries: None,
}
}
}
impl Data {
pub fn cleanup(&mut self) {
self.io = None;
self.temperature_sensors = None;
self.list_of_processes = None;
self.disks = None;
self.memory = None;
self.swap = None;
self.cpu = None;
self.load_avg = None;
if let Some(network) = &mut self.network {
network.first_run_cleanup();
}
}
}
#[derive(Debug)]
pub struct DataCollector {
pub data: Data,
#[cfg(not(target_os = "linux"))]
sys: System,
previous_cpu_times: Vec<(cpu::PastCpuWork, cpu::PastCpuTotal)>,
previous_average_cpu_time: Option<(cpu::PastCpuWork, cpu::PastCpuTotal)>,
#[cfg(target_os = "linux")]
pid_mapping: FxHashMap<crate::Pid, processes::PrevProcDetails>,
#[cfg(target_os = "linux")]
prev_idle: f64,
#[cfg(target_os = "linux")]
prev_non_idle: f64,
mem_total_kb: u64,
temperature_type: temperature::TemperatureType,
use_current_cpu_total: bool,
last_collection_time: Instant,
total_rx: u64,
total_tx: u64,
show_average_cpu: bool,
widgets_to_harvest: UsedWidgets,
#[cfg(feature = "battery")]
battery_manager: Option<Manager>,
#[cfg(feature = "battery")]
battery_list: Option<Vec<Battery>>,
filters: DataFilters,
}
impl DataCollector {
pub fn new(filters: DataFilters) -> Self {
DataCollector {
data: Data::default(),
#[cfg(not(target_os = "linux"))]
sys: System::new_with_specifics(sysinfo::RefreshKind::new()),
previous_cpu_times: vec![],
previous_average_cpu_time: None,
#[cfg(target_os = "linux")]
pid_mapping: FxHashMap::default(),
#[cfg(target_os = "linux")]
prev_idle: 0_f64,
#[cfg(target_os = "linux")]
prev_non_idle: 0_f64,
mem_total_kb: 0,
temperature_type: temperature::TemperatureType::Celsius,
use_current_cpu_total: false,
last_collection_time: Instant::now(),
total_rx: 0,
total_tx: 0,
show_average_cpu: false,
widgets_to_harvest: UsedWidgets::default(),
#[cfg(feature = "battery")]
battery_manager: None,
#[cfg(feature = "battery")]
battery_list: None,
filters,
}
}
pub fn init(&mut self) {
#[cfg(target_os = "linux")]
{
futures::executor::block_on(self.initialize_memory_size());
}
#[cfg(not(target_os = "linux"))]
{
self.sys.refresh_memory();
self.mem_total_kb = self.sys.get_total_memory();
// TODO: Would be good to get this and network list running on a timer instead...?
// Refresh components list once...
if self.widgets_to_harvest.use_temp {
self.sys.refresh_components_list();
}
// Refresh network list once...
if cfg!(target_os = "windows") && self.widgets_to_harvest.use_net {
self.sys.refresh_networks_list();
}
}
#[cfg(feature = "battery")]
|
futures::executor::block_on(self.update_data());
std::thread::sleep(std::time::Duration::from_millis(250));
self.data.cleanup();
// trace!("Enabled widgets to harvest: {:#?}", self.widgets_to_harvest);
}
#[cfg(target_os = "linux")]
async fn initialize_memory_size(&mut self) {
self.mem_total_kb = if let Ok(mem) = heim::memory::memory().await {
mem.total().get::<heim::units::information::kilobyte>()
} else {
1
};
}
pub fn set_collected_data(&mut self, used_widgets: UsedWidgets) {
self.widgets_to_harvest = used_widgets;
}
pub fn set_temperature_type(&mut self, temperature_type: temperature::TemperatureType) {
self.temperature_type = temperature_type;
}
pub fn set_use_current_cpu_total(&mut self, use_current_cpu_total: bool) {
self.use_current_cpu_total = use_current_cpu_total;
}
pub fn set_show_average_cpu(&mut self, show_average_cpu: bool) {
self.show_average_cpu = show_average_cpu;
}
pub async fn update_data(&mut self) {
#[cfg(not(target_os = "linux"))]
{
if self.widgets_to_harvest.use_proc {
self.sys.refresh_processes();
}
if self.widgets_to_harvest.use_temp {
self.sys.refresh_components();
}
if cfg!(target_os = "windows") && self.widgets_to_harvest.use_net {
self.sys.refresh_networks();
}
}
let current_instant = std::time::Instant::now();
// CPU
if self.widgets_to_harvest.use_cpu {
if let Ok(cpu_data) = cpu::get_cpu_data_list(
self.show_average_cpu,
&mut self.previous_cpu_times,
&mut self.previous_average_cpu_time,
)
.await
{
self.data.cpu = Some(cpu_data);
}
#[cfg(target_family = "unix")]
{
// Load Average
if let Ok(load_avg_data) = cpu::get_load_avg().await {
self.data.load_avg = Some(load_avg_data);
}
}
}
// Batteries
#[cfg(feature = "battery")]
{
if let Some(battery_manager) = &self.battery_manager {
if let Some(battery_list) = &mut self.battery_list {
self.data.list_of_batteries =
Some(batteries::refresh_batteries(battery_manager, battery_list));
}
}
}
if self.widgets_to_harvest.use_proc {
if let Ok(process_list) = {
#[cfg(target_os = "linux")]
{
processes::get_process_data(
&mut self.prev_idle,
&mut self.prev_non_idle,
&mut self.pid_mapping,
self.use_current_cpu_total,
current_instant
.duration_since(self.last_collection_time)
.as_secs(),
self.mem_total_kb,
)
}
#[cfg(not(target_os = "linux"))]
{
processes::get_process_data(
&self.sys,
self.use_current_cpu_total,
self.mem_total_kb,
)
}
} {
self.data.list_of_processes = Some(process_list);
}
}
let network_data_fut = {
#[cfg(target_os = "windows")]
{
network::get_network_data(
&self.sys,
self.last_collection_time,
&mut self.total_rx,
&mut self.total_tx,
current_instant,
self.widgets_to_harvest.use_net,
&self.filters.net_filter,
)
}
#[cfg(not(target_os = "windows"))]
{
network::get_network_data(
self.last_collection_time,
&mut self.total_rx,
&mut self.total_tx,
current_instant,
self.widgets_to_harvest.use_net,
&self.filters.net_filter,
)
}
};
let mem_data_fut = memory::get_mem_data(self.widgets_to_harvest.use_mem);
let disk_data_fut = disks::get_disk_usage(
self.widgets_to_harvest.use_disk,
&self.filters.disk_filter,
&self.filters.mount_filter,
);
let disk_io_usage_fut = disks::get_io_usage(self.widgets_to_harvest.use_disk);
let temp_data_fut = {
#[cfg(not(target_os = "linux"))]
{
temperature::get_temperature_data(
&self.sys,
&self.temperature_type,
self.widgets_to_harvest.use_temp,
&self.filters.temp_filter,
)
}
#[cfg(target_os = "linux")]
{
temperature::get_temperature_data(
&self.temperature_type,
self.widgets_to_harvest.use_temp,
&self.filters.temp_filter,
)
}
};
let (net_data, mem_res, disk_res, io_res, temp_res) = join!(
network_data_fut,
mem_data_fut,
disk_data_fut,
disk_io_usage_fut,
temp_data_fut
);
if let Ok(net_data) = net_data {
if let Some(net_data) = &net_data {
self.total_rx = net_data.total_rx;
self.total_tx = net_data.total_tx;
}
self.data.network = net_data;
}
if let Ok(memory) = mem_res.0 {
self.data.memory = memory;
}
if let Ok(swap) = mem_res.1 {
self.data.swap = swap;
}
if let Ok(disks) = disk_res {
self.data.disks = disks;
}
if let Ok(io) = io_res {
self.data.io = io;
}
if let Ok(temp) = temp_res {
self.data.temperature_sensors = temp;
}
// Update time
self.data.last_collection_time = current_instant;
self.last_collection_time = current_instant;
}
}
| {
if self.widgets_to_harvest.use_battery {
if let Ok(battery_manager) = Manager::new() {
if let Ok(batteries) = battery_manager.batteries() {
let battery_list: Vec<Battery> = batteries.filter_map(Result::ok).collect();
if !battery_list.is_empty() {
self.battery_list = Some(battery_list);
self.battery_manager = Some(battery_manager);
}
}
}
}
} |
core_plugin_test.py | # Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests the TensorBoard core endpoints."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections.abc
import contextlib
import json
import os
import six
import zipfile
try:
# python version >= 3.3
from unittest import mock
except ImportError:
import mock # pylint: disable=unused-import
import tensorflow as tf
from werkzeug import test as werkzeug_test
from werkzeug import wrappers
from tensorboard.backend import application
from tensorboard.backend.event_processing import (
plugin_event_multiplexer as event_multiplexer,
)
from tensorboard.data import provider
from tensorboard.plugins import base_plugin
from tensorboard.plugins.core import core_plugin
from tensorboard.util import test_util
FAKE_INDEX_HTML = b"<!doctype html><title>fake-index</title>"
class FakeFlags(object):
def __init__(
self,
bind_all=False,
host=None,
inspect=False,
version_tb=False,
logdir="",
logdir_spec="",
event_file="",
db="",
path_prefix="",
generic_data="true",
):
self.bind_all = bind_all
self.host = host
self.inspect = inspect
self.version_tb = version_tb
self.logdir = logdir
self.logdir_spec = logdir_spec
self.event_file = event_file
self.db = db
self.path_prefix = path_prefix
self.generic_data = generic_data
class CorePluginFlagsTest(tf.test.TestCase):
def | (self):
loader = core_plugin.CorePluginLoader()
loader.fix_flags(FakeFlags(version_tb=True))
loader.fix_flags(FakeFlags(inspect=True, logdir="/tmp"))
loader.fix_flags(FakeFlags(inspect=True, event_file="/tmp/event.out"))
loader.fix_flags(FakeFlags(inspect=False, logdir="/tmp"))
loader.fix_flags(FakeFlags(inspect=False, db="sqlite:foo"))
# User can pass both, although the behavior is not clearly defined.
loader.fix_flags(
FakeFlags(inspect=False, logdir="/tmp", db="sqlite:foo")
)
logdir_or_db_req = r"A logdir or db must be specified"
one_of_event_or_logdir_req = (
r"Must specify either --logdir.*but not both.$"
)
event_or_logdir_req = r"Must specify either --logdir or --event_file.$"
with six.assertRaisesRegex(self, ValueError, event_or_logdir_req):
loader.fix_flags(FakeFlags(inspect=True))
with six.assertRaisesRegex(
self, ValueError, one_of_event_or_logdir_req
):
loader.fix_flags(
FakeFlags(
inspect=True, logdir="/tmp", event_file="/tmp/event.out"
)
)
with six.assertRaisesRegex(self, ValueError, logdir_or_db_req):
loader.fix_flags(FakeFlags(inspect=False))
with six.assertRaisesRegex(self, ValueError, logdir_or_db_req):
loader.fix_flags(
FakeFlags(inspect=False, event_file="/tmp/event.out")
)
def testPathPrefix_stripsTrailingSlashes(self):
loader = core_plugin.CorePluginLoader()
for path_prefix in ("/hello", "/hello/", "/hello//", "/hello///"):
flag = FakeFlags(
inspect=False, logdir="/tmp", path_prefix=path_prefix
)
loader.fix_flags(flag)
self.assertEqual(
flag.path_prefix,
"/hello",
"got %r (input %r)" % (flag.path_prefix, path_prefix),
)
def testPathPrefix_mustStartWithSlash(self):
loader = core_plugin.CorePluginLoader()
flag = FakeFlags(inspect=False, logdir="/tmp", path_prefix="noslash")
with self.assertRaises(base_plugin.FlagsError) as cm:
loader.fix_flags(flag)
msg = str(cm.exception)
self.assertIn("must start with slash", msg)
self.assertIn(repr("noslash"), msg)
class CorePluginNoDataTest(tf.test.TestCase):
def setUp(self):
super(CorePluginNoDataTest, self).setUp()
context = base_plugin.TBContext(
assets_zip_provider=get_test_assets_zip_provider(),
logdir=self.get_temp_dir(),
multiplexer=event_multiplexer.EventMultiplexer(),
window_title="title foo",
)
self.plugin = core_plugin.CorePlugin(context)
app = application.TensorBoardWSGI([self.plugin])
self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
def _get_json(self, server, path):
response = server.get(path)
self.assertEqual(200, response.status_code)
self.assertEqual(
"application/json", response.headers.get("Content-Type")
)
return json.loads(response.get_data().decode("utf-8"))
def testRoutesProvided(self):
"""Tests that the plugin offers the correct routes."""
routes = self.plugin.get_plugin_apps()
self.assertIsInstance(routes["/data/logdir"], collections.abc.Callable)
self.assertIsInstance(routes["/data/runs"], collections.abc.Callable)
def testIndex_returnsActualHtml(self):
"""Test the format of the root / endpoint."""
response = self.server.get("/")
self.assertEqual(200, response.status_code)
self.assertStartsWith(response.headers.get("Content-Type"), "text/html")
html = response.get_data()
self.assertEqual(html, FAKE_INDEX_HTML)
def testDataPaths_disableAllCaching(self):
"""Test the format of the /data/runs endpoint."""
for path in ("/data/runs", "/data/logdir"):
response = self.server.get(path)
self.assertEqual(200, response.status_code, msg=path)
self.assertEqual("0", response.headers.get("Expires"), msg=path)
def testEnvironmentForWindowTitle(self):
"""Test that the environment route correctly returns the window
title."""
parsed_object = self._get_json(self.server, "/data/environment")
self.assertEqual(parsed_object["window_title"], "title foo")
def testEnvironmentForLogdir(self):
"""Test that the environment route correctly returns the logdir."""
parsed_object = self._get_json(self.server, "/data/environment")
self.assertEqual(parsed_object["data_location"], self.get_temp_dir())
def testLogdir(self):
"""Test the format of the data/logdir endpoint."""
parsed_object = self._get_json(self.server, "/data/logdir")
self.assertEqual(parsed_object, {"logdir": self.get_temp_dir()})
class CorePluginExperimentMetadataTest(tf.test.TestCase):
def _get_json(self, server, path):
response = server.get(path)
self.assertEqual(200, response.status_code)
self.assertEqual(
"application/json", response.headers.get("Content-Type")
)
return json.loads(response.get_data().decode("utf-8"))
def testGetEnvironmentDataWithExperimentMetadata(self):
"""Test environment route returns correct metadata about experiment."""
class FakeDataProvider(object):
def data_location(self, ctx, *, experiment_id):
del experiment_id # Unused.
return ""
def experiment_metadata(self, ctx, *, experiment_id):
del experiment_id # Unused.
return provider.ExperimentMetadata(
experiment_name="Experiment #5 (実験#5)",
experiment_description="Take five (😊)",
creation_time=1234.5,
)
self.context = base_plugin.TBContext(
flags=FakeFlags(generic_data="true"),
data_provider=FakeDataProvider(),
)
self.plugin = core_plugin.CorePlugin(self.context)
app = application.TensorBoardWSGI([self.plugin])
self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
parsed_object = self._get_json(self.server, "/data/environment")
self.assertEqual(parsed_object["data_location"], "")
self.assertEqual(parsed_object["window_title"], None)
self.assertEqual(
parsed_object["experiment_name"], "Experiment #5 (実験#5)"
)
self.assertEqual(
parsed_object["experiment_description"], "Take five (😊)"
)
self.assertEqual(parsed_object["creation_time"], 1234.5)
def testGetEnvironmentDataWithNoExperimentMetadata(self):
"""Test environment route works when no experiment metadata exists."""
class FakeDataProvider(object):
def data_location(self, ctx, *, experiment_id):
del experiment_id # Unused.
return ""
def experiment_metadata(self, ctx, *, experiment_id):
del experiment_id # Unused.
return None
self.context = base_plugin.TBContext(
flags=FakeFlags(generic_data="true"),
data_provider=FakeDataProvider(),
)
self.plugin = core_plugin.CorePlugin(self.context)
app = application.TensorBoardWSGI([self.plugin])
self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
parsed_object = self._get_json(self.server, "/data/environment")
self.assertEqual(parsed_object["data_location"], "")
self.assertEqual(parsed_object["window_title"], None)
self.assertNotIn("experiment_name", parsed_object)
self.assertNotIn("experiment_description", parsed_object)
self.assertNotIn("creation_time", parsed_object)
class CorePluginTestBase(object):
def setUp(self):
super(CorePluginTestBase, self).setUp()
self.logdir = self.get_temp_dir()
self.multiplexer = event_multiplexer.EventMultiplexer()
context = base_plugin.TBContext(
assets_zip_provider=get_test_assets_zip_provider(),
logdir=self.logdir,
multiplexer=self.multiplexer,
)
self.plugin = core_plugin.CorePlugin(context)
app = application.TensorBoardWSGI([self.plugin])
self.server = werkzeug_test.Client(app, wrappers.BaseResponse)
def create_multiplexer(self):
raise NotImplementedError()
def _add_run(self, run_name):
run_path = os.path.join(self.logdir, run_name)
with test_util.FileWriter(run_path) as writer:
writer.add_test_summary("foo")
self.multiplexer.AddRunsFromDirectory(self.logdir)
self.multiplexer.Reload()
def _get_json(self, server, path):
response = server.get(path)
self.assertEqual(200, response.status_code)
self.assertEqual(
"application/json", response.headers.get("Content-Type")
)
return json.loads(response.get_data().decode("utf-8"))
def testRuns(self):
"""Test the format of the /data/runs endpoint."""
self._add_run("run1")
run_json = self._get_json(self.server, "/data/runs")
self.assertEqual(run_json, ["run1"])
def testRunsAppendOnly(self):
"""Test that new runs appear after old ones in /data/runs."""
fake_wall_times = {
"run1": 1234.0,
"avocado": 2345.0,
"zebra": 3456.0,
"ox": 4567.0,
"mysterious": None,
"enigmatic": None,
}
def FirstEventTimestamp_stub(run_name):
matches = [
candidate_name
for candidate_name in fake_wall_times
if run_name.endswith(candidate_name)
]
self.assertEqual(len(matches), 1, "%s (%s)" % (matches, run_name))
wall_time = fake_wall_times[matches[0]]
if wall_time is None:
raise ValueError("No event timestamp could be found")
else:
return wall_time
with mock.patch.object(
self.multiplexer, "FirstEventTimestamp"
) as mock_first_event_timestamp:
mock_first_event_timestamp.side_effect = FirstEventTimestamp_stub
# Start with a single run.
self._add_run("run1")
# Add one run: it should come last.
self._add_run("avocado")
self.assertEqual(
self._get_json(self.server, "/data/runs"), ["run1", "avocado"],
)
# Add another run: it should come last, too.
self._add_run("zebra")
self.assertEqual(
self._get_json(self.server, "/data/runs"),
["run1", "avocado", "zebra"],
)
# And maybe there's a run for which we somehow have no timestamp.
self._add_run("mysterious")
self.assertEqual(
self._get_json(self.server, "/data/runs"),
["run1", "avocado", "zebra", "mysterious"],
)
# Add another timestamped run: it should come before the timestamp-less one.
self._add_run("ox")
self.assertEqual(
self._get_json(self.server, "/data/runs"),
["run1", "avocado", "zebra", "ox", "mysterious"],
)
# Add another timestamp-less run, lexicographically before the other one:
# it should come after all timestamped runs but first among timestamp-less.
self._add_run("enigmatic")
self.assertEqual(
self._get_json(self.server, "/data/runs"),
["run1", "avocado", "zebra", "ox", "enigmatic", "mysterious"],
)
def get_test_assets_zip_provider():
memfile = six.BytesIO()
with zipfile.ZipFile(
memfile, mode="w", compression=zipfile.ZIP_DEFLATED
) as zf:
zf.writestr("index.html", FAKE_INDEX_HTML)
return lambda: contextlib.closing(six.BytesIO(memfile.getvalue()))
if __name__ == "__main__":
tf.test.main()
| testFlag |
release.py | import toml
from os.path import join, abspath, dirname
import sys
import re
root = abspath(join(dirname(__file__), ".."))
def update_cargo(ver):
path = join(root, "Cargo.toml")
raw = toml.load(path)
raw['package']['version'] = ver
with open(path, "w") as f:
toml.dump(raw, f)
def update_pyproject(ver):
path = join(root, "pyproject.toml")
raw = toml.load(path)
raw['tools']['poetry']['version'] = ver
with open(path, "w") as f:
toml.dump(raw, f)
def | (ver):
path = join(root, "setup.py")
with open(path) as f:
raw = f.read()
pat = r"version\=\'(\d+\.\d+\.\d+)\'\,"
raw = re.sub(pat, "version=\'" + ver + "\',", raw)
with open(path, "w") as f:
f.write(raw)
def update_sphinx(ver):
path = join(root, "docs", "source", "conf.py")
with open(path) as f:
raw = f.read()
pat = r"release\s\=\s\'(\d+\.\d+\.\d+)\'"
raw = re.sub(pat, "release = \'" + ver + "\'", raw)
with open(path, "w") as f:
f.write(raw)
if __name__ == "__main__":
ver = sys.argv[1]
update_cargo(ver)
update_pyproject(ver)
update_setuppy(ver)
update_sphinx(ver)
| update_setuppy |
utils.py | _components = {}
def add_component(path, data):
_components[path] = data
| return _components[path]
except KeyError:
raise NameError('There is no component with path {}'.format(path)) |
def get_component(path):
try: |
local.rs | use crate::format::*;
use nom::{
bytes::streaming::tag,
combinator::opt,
number::streaming::{le_u16, le_u32, le_u64},
sequence::preceded,
};
#[derive(Debug)]
/// 4.3.7 Local file header
pub struct | {
/// version needed to extract
pub reader_version: Version,
/// general purpose bit flag
pub flags: u16,
/// compression method
pub method: u16,
/// last mod file datetime
pub modified: MsdosTimestamp,
/// crc-32
pub crc32: u32,
/// compressed size
pub compressed_size: u32,
/// uncompressed size
pub uncompressed_size: u32,
// file name
pub name: ZipString,
// extra field
pub extra: ZipBytes,
}
impl LocalFileHeaderRecord {
pub const SIGNATURE: &'static str = "PK\x03\x04";
pub fn parse<'a>(i: &'a [u8]) -> parse::Result<'a, Self> {
preceded(
tag(Self::SIGNATURE),
fields!({
reader_version: Version::parse,
flags: le_u16,
method: le_u16,
modified: MsdosTimestamp::parse,
crc32: le_u32,
compressed_size: le_u32,
uncompressed_size: le_u32,
name_len: le_u16,
extra_len: le_u16,
} chain fields!({
name: ZipString::parser(name_len),
extra: ZipBytes::parser(extra_len),
} map Self {
reader_version,
flags,
method,
modified,
crc32,
compressed_size,
uncompressed_size,
name,
extra,
})),
)(i)
}
pub fn has_data_descriptor(&self) -> bool {
// 4.3.9.1 This descriptor MUST exist if bit 3 of the general
// purpose bit flag is set (see below).
self.flags & 0b1000 != 0
}
}
/// 4.3.9 Data descriptor:
#[derive(Debug)]
pub struct DataDescriptorRecord {
/// CRC32 checksum
pub crc32: u32,
/// Compressed size
pub compressed_size: u64,
/// Uncompressed size
pub uncompressed_size: u64,
}
impl DataDescriptorRecord {
const SIGNATURE: &'static str = "PK\x07\x08";
pub fn parse<'a>(i: &'a [u8], is_zip64: bool) -> parse::Result<'a, Self> {
if is_zip64 {
preceded(
opt(tag(Self::SIGNATURE)),
fields!(Self {
crc32: le_u32,
compressed_size: le_u64,
uncompressed_size: le_u64,
}),
)(i)
} else {
preceded(
opt(tag(Self::SIGNATURE)),
fields!({
crc32: le_u32,
compressed_size: le_u32,
uncompressed_size: le_u32,
} map Self {
crc32,
compressed_size: compressed_size as u64,
uncompressed_size: uncompressed_size as u64,
}),
)(i)
}
}
}
| LocalFileHeaderRecord |
Fetch_Data_Stock_CHN_Daily.py | import sys, os, time, datetime, warnings, configparser
import pandas as pd
import numpy as np
import tushare as ts
import concurrent.futures
from tqdm import tqdm
cur_path = os.path.dirname(os.path.abspath(__file__))
for _ in range(2):
root_path = cur_path[0:cur_path.rfind('/', 0, len(cur_path))]
cur_path = root_path
sys.path.append(root_path + "/" + 'Source/DataBase/')
from Source.DataBase.DB_API import queryStock, storeStock, queryStockList, storeStockList, queryStockPublishDay, storePublishDay
def getStocksList(root_path):
try:
df = queryStockList(root_path, "DB_STOCK", "SHEET_CHN_DAILY")
df.index = df.index.astype(str).str.zfill(6)
except Exception as e:
df = pd.DataFrame()
if df.empty == False: return df
import subprocess
subprocess.Popen('brew services restart mongodb'.split())
stock_info = ts.get_stock_basics()
listData = pd.DataFrame(stock_info)
#listData.index.name = 'symbol'
#listData.index = listData.index.astype(str).str.zfill(6) #[str(symbol).zfill(6) for symbol in listData.index] #listData.index.astype(str).str.zfill(6)
#print(listData.index)
#listData['symbol'] = listData['symbol'].str.strip()
storeStockList(root_path, "DB_STOCK", "SHEET_CHN_DAILY", listData)
df = queryStockList(root_path, "DB_STOCK", "SHEET_CHN_DAILY")
df.index = df.index.astype(str).str.zfill(6)
return df
def getSingleStock(symbol):
repeat_times = 1
message = ""
df = pd.DataFrame()
for _ in range(repeat_times):
try:
data = ts.get_hist_data(symbol)
data.sort_index(ascending=True, inplace=True)
return data, ""
except Exception as e:
message = symbol + " fetch exception: " + str(e)
continue
return df, message
def getSingleStockByTime(symbol, from_date, till_date):
start = from_date.split('-')
start_y, start_m, start_d = start[0], start[1], start[2] # starting date
end = till_date.split('-')
end_y, end_m, end_d = end[0], end[1], end[2] # until now
repeat_times = 1
message = ""
df = pd.DataFrame()
for _ in range(repeat_times):
try:
data = ts.get_hist_data(symbol, from_date, till_date)
data.sort_index(ascending=True, inplace=True)
return data, ""
except Exception as e:
message = symbol + " fetch exception: " + str(e)
continue
return df, message
def judgeOpenDaysInRange(from_date, to_date):
holidays=["2017-01-01", "2017-01-02",
"2017-01-27", "2017-01-28", "2017-01-29", "2017-01-30", "2017-01-31", "2017-02-01", "2017-02-02",
"2017-04-02", "2017-04-03", "2017-04-04",
"2017-05-01",
"2017-05-28", "2017-05-29", "2017-05-30",
"2017-10-01", "2017-10-02", "2017-10-03", "2017-10-04", "2017-10-05","2017-10-06","2017-10-07","2017-10-08"]
#holidays = cal.holidays(from_date, to_date)
duedays = pd.bdate_range(from_date, to_date)
df = pd.DataFrame()
df['date'] = duedays
df['holiday'] = duedays.isin(holidays)
opendays = df[df['holiday'] == False]
return opendays
def judgeNeedPostDownload(from_date, to_date):
today = datetime.datetime.now()
start_date = pd.Timestamp(from_date)
end_date = pd.Timestamp(to_date)
if start_date > today: return False
if end_date > today: to_date = today.strftime("%Y-%m-%d")
dateList = judgeOpenDaysInRange(from_date, to_date)
if len(dateList) > 0: return True
return False
def | (root_path, symbol, force_check):
startTime = time.time()
message = ""
if len(symbol) == 0: return startTime, message
till_date = (datetime.datetime.now()).strftime("%Y-%m-%d")
end_date = pd.Timestamp(till_date)
stockData, lastUpdateTime = queryStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol)
if stockData.empty:
stockData, message = getSingleStock(symbol)
if stockData.empty == False:
storeStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol, stockData)
return startTime, message
modified = False
first_date = pd.Timestamp(stockData.index[0])
last_date = pd.Timestamp(stockData.index[-1])
updateOnce = end_date > lastUpdateTime
if end_date > last_date and (updateOnce or force_check):
to_date = (last_date + datetime.timedelta(days=1)).strftime("%Y-%m-%d")
if judgeNeedPostDownload(to_date, till_date):
message = message + ", download post data from " + to_date + " to " + till_date
moreStockData, tempMessage = getSingleStockByTime(symbol, to_date, till_date)
message = message + tempMessage
if len(moreStockData) > 0:
if isinstance(moreStockData.index, pd.DatetimeIndex):
moreStockData.index = moreStockData.index.strftime("%Y-%m-%d")
modified = True
stockData = pd.concat([stockData, moreStockData])
stockData.index.name = 'date'
if modified:
stockData = stockData[~stockData.index.duplicated(keep='first')]
storeStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol, stockData)
elif updateOnce:
stockData = stockData[~stockData.index.duplicated(keep='first')]
storeStock(root_path, "DB_STOCK", "SHEET_CHN_DAILY", symbol, stockData)
message = message + ", nothing updated"
else:
message = ""
return startTime, message
def updateStockData_CHN(root_path, storeType, force_check = False):
symbols = getStocksList(root_path).index.values.tolist()
pbar = tqdm(total=len(symbols))
if storeType == 2:
for symbol in symbols:
startTime, message = updateSingleStockData(root_path, symbol, force_check)
outMessage = '%-*s fetched in: %.4s seconds' % (6, symbol, (time.time() - startTime))
pbar.set_description(outMessage)
pbar.update(1)
if storeType == 1:
log_errors = []
log_update = []
with concurrent.futures.ThreadPoolExecutor(max_workers=8) as executor:
# Start the load operations and mark each future with its URL
future_to_stock = {executor.submit(updateSingleStockData, root_path, symbol, force_check): symbol for symbol in symbols}
for future in concurrent.futures.as_completed(future_to_stock):
stock = future_to_stock[future]
try:
startTime, message = future.result()
except Exception as exc:
startTime = time.time()
log_errors.append('%r generated an exception: %s' % (stock, exc))
else:
if len(message) > 0: log_update.append(message)
outMessage = '%-*s fetched in: %.4s seconds' % (6, stock, (time.time() - startTime))
pbar.set_description(outMessage)
pbar.update(1)
if len(log_errors) > 0: print(log_errors)
# if len(log_update) > 0: print(log_update)
pbar.close()
return symbols
if __name__ == "__main__":
pd.set_option('precision', 3)
pd.set_option('display.width',1000)
warnings.filterwarnings('ignore', category=pd.io.pytables.PerformanceWarning)
config = configparser.ConfigParser()
config.read(root_path + "/" + "config.ini")
storeType = int(config.get('Setting', 'StoreType'))
if storeType == 1:
from Start_DB_Server import StartServer, ShutdownServer
# start database server (async)
thread = StartServer(root_path)
# wait for db start, the standard procedure should listen to
# the completed event of function "StartServer"
time.sleep(5)
updateStockData_CHN(root_path, storeType)
if storeType == 1:
# stop database server (sync)
time.sleep(5)
ShutdownServer()
| updateSingleStockData |
bt_inherited_acl_info.rs | /*
* Onshape REST API
*
* The Onshape REST API consumed by all clients.
*
* The version of the OpenAPI document: 1.104
* Contact: [email protected]
* Generated by: https://openapi-generator.tech
*/
#[derive(Clone, Debug, PartialEq, Serialize, Deserialize)]
pub struct BtInheritedAclInfo {
#[serde(rename = "objectName", skip_serializing_if = "Option::is_none")]
pub object_name: Option<String>,
#[serde(rename = "sharedWithSupport", skip_serializing_if = "Option::is_none")]
pub shared_with_support: Option<bool>,
#[serde(rename = "entries", skip_serializing_if = "Option::is_none")]
pub entries: Option<Vec<crate::models::BtAclEntryInfo>>,
#[serde(rename = "objectId", skip_serializing_if = "Option::is_none")]
pub object_id: Option<String>,
#[serde(rename = "objectType", skip_serializing_if = "Option::is_none")]
pub object_type: Option<i64>,
#[serde(rename = "visibility", skip_serializing_if = "Option::is_none")]
pub visibility: Option<String>,
#[serde(rename = "public", skip_serializing_if = "Option::is_none")]
pub public: Option<bool>,
#[serde(rename = "owner", skip_serializing_if = "Option::is_none")]
pub owner: Option<crate::models::BtOwnerInfo>,
#[serde(rename = "href", skip_serializing_if = "Option::is_none")]
pub href: Option<String>,
#[serde(rename = "viewRef", skip_serializing_if = "Option::is_none")]
pub view_ref: Option<String>,
#[serde(rename = "name", skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(rename = "id", skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
}
impl BtInheritedAclInfo {
pub fn new() -> BtInheritedAclInfo |
}
| {
BtInheritedAclInfo {
object_name: None,
shared_with_support: None,
entries: None,
object_id: None,
object_type: None,
visibility: None,
public: None,
owner: None,
href: None,
view_ref: None,
name: None,
id: None,
}
} |
0001_initial.py | # Generated by Django 3.0.2 on 2020-02-03 21:40
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
| initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='License',
fields=[
('license_key', models.CharField(max_length=32, primary_key=True, serialize=False, unique=True)),
('created_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Session',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('session_key', models.CharField(max_length=64)),
('created_at', models.DateTimeField(auto_now_add=True)),
],
),
migrations.CreateModel(
name='Subscription',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('created_at', models.DateTimeField(auto_now_add=True)),
('license_key', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='payment.License')),
],
),
] |
|
coding.py | from typing import Union, List, Optional
from pyspark.sql.types import (
StructType,
StructField,
StringType,
ArrayType,
BooleanType,
DataType,
)
# This file is auto-generated by generate_schema so do not edit manually
# noinspection PyPep8Naming
class CodingSchema:
"""
A reference to a code defined by a terminology system.
"""
# noinspection PyDefaultArgument
@staticmethod
def get_schema(
max_nesting_depth: Optional[int] = 6,
nesting_depth: int = 0,
nesting_list: List[str] = [],
max_recursion_limit: Optional[int] = 2,
include_extension: Optional[bool] = False,
extension_fields: Optional[List[str]] = [
"valueBoolean",
"valueCode",
"valueDate",
"valueDateTime",
"valueDecimal",
"valueId",
"valueInteger",
"valuePositiveInt",
"valueString",
"valueTime",
"valueUnsignedInt",
"valueUri",
"valueQuantity",
],
extension_depth: int = 0,
max_extension_depth: Optional[int] = 2,
) -> Union[StructType, DataType]:
"""
A reference to a code defined by a terminology system.
id: unique id for the element within a resource (for internal references). This
may be any string value that does not contain spaces.
extension: May be used to represent additional information that is not part of the basic
definition of the element. In order to make the use of extensions safe and
manageable, there is a strict set of governance applied to the definition and
use of extensions. Though any implementer is allowed to define an extension,
there is a set of requirements that SHALL be met as part of the definition of
the extension.
system: The identification of the code system that defines the meaning of the symbol
in the code.
version: The version of the code system which was used when choosing this code. Note
that a well-maintained code system does not need the version reported, because
the meaning of codes is consistent across versions. However this cannot
consistently be assured. and when the meaning is not guaranteed to be
consistent, the version SHOULD be exchanged.
code: A symbol in syntax defined by the system. The symbol may be a predefined code
or an expression in a syntax defined by the coding system (e.g. post-
coordination).
display: A representation of the meaning of the code in the system, following the rules
of the system.
userSelected: Indicates that this coding was chosen by a user directly - i.e. off a pick
list of available items (codes or displays).
"""
from spark_fhir_schemas.stu3.complex_types.extension import ExtensionSchema
if (
max_recursion_limit and nesting_list.count("Coding") >= max_recursion_limit
) or (max_nesting_depth and nesting_depth >= max_nesting_depth):
return StructType([StructField("id", StringType(), True)])
# add my name to recursion list for later
my_nesting_list: List[str] = nesting_list + ["Coding"]
schema = StructType(
[
# unique id for the element within a resource (for internal references). This
# may be any string value that does not contain spaces.
StructField("id", StringType(), True),
# May be used to represent additional information that is not part of the basic
# definition of the element. In order to make the use of extensions safe and
# manageable, there is a strict set of governance applied to the definition and
# use of extensions. Though any implementer is allowed to define an extension,
# there is a set of requirements that SHALL be met as part of the definition of
# the extension.
StructField(
"extension",
ArrayType(
ExtensionSchema.get_schema(
max_nesting_depth=max_nesting_depth,
nesting_depth=nesting_depth + 1,
nesting_list=my_nesting_list,
max_recursion_limit=max_recursion_limit,
include_extension=include_extension,
extension_fields=extension_fields,
extension_depth=extension_depth,
max_extension_depth=max_extension_depth, | ),
# The identification of the code system that defines the meaning of the symbol
# in the code.
StructField("system", StringType(), True),
# The version of the code system which was used when choosing this code. Note
# that a well-maintained code system does not need the version reported, because
# the meaning of codes is consistent across versions. However this cannot
# consistently be assured. and when the meaning is not guaranteed to be
# consistent, the version SHOULD be exchanged.
StructField("version", StringType(), True),
# A symbol in syntax defined by the system. The symbol may be a predefined code
# or an expression in a syntax defined by the coding system (e.g. post-
# coordination).
StructField("code", StringType(), True),
# A representation of the meaning of the code in the system, following the rules
# of the system.
StructField("display", StringType(), True),
# Indicates that this coding was chosen by a user directly - i.e. off a pick
# list of available items (codes or displays).
StructField("userSelected", BooleanType(), True),
]
)
if not include_extension:
schema.fields = [
c
if c.name != "extension"
else StructField("extension", StringType(), True)
for c in schema.fields
]
return schema | )
),
True, |
test_utils.py | import pytest
from gene_finder.utils import get_neighborhood_ranges
def _build_hit_dictionary(coords):
hits = {}
for coord in coords:
key = "hit_{}_{}".format(coord[0], coord[1])
hits[key] = {}
hits[key]["Query_start-pos"] = coord[0]
hits[key]["Query_end-pos"] = coord[1]
return hits
@pytest.mark.parametrize('hit_coords,expected_num_neighborhoods,expected_ranges', [
([(1,500), (3500, 3600), (6000, 6500)], 3, [(0, 1500), (2500, 4600), (5000, 7500)]),
([(1,500), (600, 11400), (20000, 20250)], 2, [(0, 12400), (19000, 21250)]),
([(1,500), (500, 1000), (1000, 1500)], 1, [(0, 2500)]),
([(500, 1), (600, 11400), (20000, 20250)], 2, [(0, 12400), (19000, 21250)]),
([(1,500), (500,1), (400, 600)], 1, [(0, 1600)]),
([(1, 500), (400, 2)], 1, [(0, 1500)]),
([(1, 500), (400, 1100), (1101, 1200)], 1, [(0, 2200)]),
([(2500, 2000)], 1, [(1000, 3500)])
]) | for nbh, expected_range in zip(neighborhoods, expected_ranges):
assert nbh == expected_range
@pytest.mark.parametrize('bait_ORF_coords,expected_last_range', [
([(6000, 6500)], (5000, 7500)),
([(1,500)], (0, 1500)),
([(98000, 99000)], (97000, 100000)),
([(98000, 99800)], (97000, 100000))
])
def test_get_neighborhood_ranges_bounds(bait_ORF_coords, expected_last_range):
"""
Test that `get_neighborhood_ranges` always reports candidate region coordinates
that are actually within the bounds of the parent contig.
"""
hits = _build_hit_dictionary(bait_ORF_coords)
neighborhoods = get_neighborhood_ranges(hits, contig_len=100000, span=1000)
assert neighborhoods[0] == expected_last_range | def test_get_neighborhood_ranges(hit_coords, expected_num_neighborhoods, expected_ranges):
hits = _build_hit_dictionary(hit_coords)
neighborhoods = get_neighborhood_ranges(hits, 100000, span=1000)
assert len(neighborhoods) == expected_num_neighborhoods |
deleteUser.go | // Copyright © 2018 NAME HERE <EMAIL ADDRESS>
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package cmd
import (
"log"
"github.com/Agenda-Go/entity"
"github.com/spf13/cobra"
)
// deleteUserCmd represents the deleteUser command
var deleteUserCmd = &cobra.Command{
Use: "deleteUser",
Short: "Delete you account",
Long: "Usage:agenda deleteUser",
Run: func(cmd *cobra.Command, args []string) {
log.Println("deleteUser called")
entity.Init()
if entity.GetCurrentUser() == "" {
log.Println("Please log in first")
} else {
flag1 := entity.DeleteAllMeeting(entity.GetCurrentUser())
flag2 := entity.DeleteUser(entity.GetCurrentUser())
if flag1 == 0 && flag2 {
entity.SetCurrentUser("")
entity.UpdateLib()
log.Println("Delete user " + entity.GetCurrentUser() + " successfully")
} else {
log.Println("Delete user " + entity.GetCurrentUser() + " failed")
}
}
},
}
func init() {
| rootCmd.AddCommand(deleteUserCmd)
}
|
|
config.go | // Copyright 2016 The Go Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package ld
import (
"cmd/internal/obj"
"cmd/internal/sys"
"fmt"
"log"
)
var (
Linkmode LinkMode
Buildmode BuildMode
)
// A BuildMode indicates the sort of object we are building.
//
// Possible build modes are the same as those for the -buildmode flag
// in cmd/go, and are documented in 'go help buildmode'.
type BuildMode uint8
const (
BuildmodeUnset BuildMode = iota
BuildmodeExe
BuildmodePIE
BuildmodeCArchive
BuildmodeCShared
BuildmodeShared
BuildmodePlugin
)
func (mode *BuildMode) Set(s string) error {
badmode := func() error {
return fmt.Errorf("buildmode %s not supported on %s/%s", s, obj.GOOS, obj.GOARCH)
}
switch s {
default:
return fmt.Errorf("invalid buildmode: %q", s)
case "exe":
*mode = BuildmodeExe
case "pie":
switch obj.GOOS {
case "android", "linux":
default:
return badmode()
}
*mode = BuildmodePIE
case "c-archive":
switch obj.GOOS {
case "darwin", "linux":
case "windows":
switch obj.GOARCH {
case "amd64", "386":
default:
return badmode()
}
default:
return badmode()
}
*mode = BuildmodeCArchive
case "c-shared":
switch obj.GOARCH {
case "386", "amd64", "arm", "arm64":
default:
return badmode()
}
*mode = BuildmodeCShared
case "shared":
switch obj.GOOS {
case "linux":
switch obj.GOARCH {
case "386", "amd64", "arm", "arm64", "ppc64le", "s390x":
default:
return badmode()
}
default:
return badmode()
}
*mode = BuildmodeShared
case "plugin":
switch obj.GOOS {
case "linux":
switch obj.GOARCH {
case "386", "amd64", "arm", "arm64":
default:
return badmode()
}
case "darwin":
switch obj.GOARCH {
case "amd64":
default:
return badmode()
}
default:
return badmode()
}
*mode = BuildmodePlugin
}
return nil
}
func (mode *BuildMode) String() string {
switch *mode {
case BuildmodeUnset:
return "" // avoid showing a default in usage message
case BuildmodeExe:
return "exe"
case BuildmodePIE:
return "pie"
case BuildmodeCArchive:
return "c-archive"
case BuildmodeCShared:
return "c-shared"
case BuildmodeShared:
return "shared"
case BuildmodePlugin:
return "plugin"
}
return fmt.Sprintf("BuildMode(%d)", uint8(*mode))
}
// LinkMode indicates whether an external linker is used for the final link.
type LinkMode uint8
const (
LinkAuto LinkMode = iota
LinkInternal
LinkExternal
)
func (mode *LinkMode) Set(s string) error {
switch s {
default:
return fmt.Errorf("invalid linkmode: %q", s)
case "auto":
*mode = LinkAuto
case "internal":
*mode = LinkInternal
case "external":
*mode = LinkExternal
}
return nil
}
func (mode *LinkMode) String() string {
switch *mode {
case LinkAuto:
return "auto"
case LinkInternal:
return "internal"
case LinkExternal:
return "external"
}
return fmt.Sprintf("LinkMode(%d)", uint8(*mode))
}
// mustLinkExternal reports whether the program being linked requires
// the external linker be used to complete the link.
func mustLinkExternal(ctxt *Link) (res bool, reason string) {
if ctxt.Debugvlog > 1 {
defer func() {
if res |
}()
}
switch obj.GOOS {
case "android":
return true, "android"
case "darwin":
if SysArch.InFamily(sys.ARM, sys.ARM64) {
return true, "iOS"
}
}
if *flagMsan {
return true, "msan"
}
// Internally linking cgo is incomplete on some architectures.
// https://golang.org/issue/10373
// https://golang.org/issue/14449
if iscgo && SysArch.InFamily(sys.ARM64, sys.MIPS64) {
return true, obj.GOARCH + " does not support internal cgo"
}
// Some build modes require work the internal linker cannot do (yet).
switch Buildmode {
case BuildmodeCArchive:
return true, "buildmode=c-archive"
case BuildmodeCShared:
return true, "buildmode=c-shared"
case BuildmodePIE:
switch obj.GOOS + "/" + obj.GOARCH {
case "linux/amd64":
default:
// Internal linking does not support TLS_IE.
return true, "buildmode=pie"
}
case BuildmodeShared:
return true, "buildmode=shared"
}
if *FlagLinkshared {
return true, "dynamically linking with a shared library"
}
return false, ""
}
// determineLinkMode sets Linkmode.
//
// It is called after flags are processed and inputs are processed,
// so the Linkmode variable has an initial value from the -linkmode
// flag and the iscgo externalobj variables are set.
func determineLinkMode(ctxt *Link) {
switch Linkmode {
case LinkAuto:
// The environment variable GO_EXTLINK_ENABLED controls the
// default value of -linkmode. If it is not set when the
// linker is called we take the value it was set to when
// cmd/link was compiled. (See make.bash.)
switch obj.Getgoextlinkenabled() {
case "0":
if needed, reason := mustLinkExternal(ctxt); needed {
Exitf("internal linking requested via GO_EXTLINK_ENABLED, but external linking required: %s", reason)
}
Linkmode = LinkInternal
case "1":
Linkmode = LinkExternal
default:
if needed, _ := mustLinkExternal(ctxt); needed {
Linkmode = LinkExternal
} else if iscgo && externalobj {
Linkmode = LinkExternal
} else {
Linkmode = LinkInternal
}
}
case LinkInternal:
if needed, reason := mustLinkExternal(ctxt); needed {
Exitf("internal linking requested but external linking required: %s", reason)
}
}
}
| {
log.Printf("external linking is forced by: %s\n", reason)
} |
walletsvrcmds_test.go | // Copyright (c) 2014-2020 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package btcjson_test
import (
"bytes"
"encoding/json"
"fmt"
"reflect"
"testing"
"github.com/John-Tonny/vclsuite_vcld/btcjson"
"github.com/btcsuite/btcutil"
)
// TestWalletSvrCmds tests all of the wallet server commands marshal and
// unmarshal into valid results include handling of optional fields being
// omitted in the marshalled command, while optional fields with defaults have
// the default assigned on unmarshalled commands.
func TestWalletSvrCmds(t *testing.T) {
t.Parallel()
testID := int(1)
tests := []struct {
name string
newCmd func() (interface{}, error)
staticCmd func() interface{}
marshalled string
unmarshalled interface{}
}{
{
name: "addmultisigaddress",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("addmultisigaddress", 2, []string{"031234", "035678"})
},
staticCmd: func() interface{} {
keys := []string{"031234", "035678"}
return btcjson.NewAddMultisigAddressCmd(2, keys, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"addmultisigaddress","params":[2,["031234","035678"]],"id":1}`,
unmarshalled: &btcjson.AddMultisigAddressCmd{
NRequired: 2,
Keys: []string{"031234", "035678"},
Account: nil,
},
},
{
name: "addmultisigaddress optional",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("addmultisigaddress", 2, []string{"031234", "035678"}, "test")
},
staticCmd: func() interface{} {
keys := []string{"031234", "035678"}
return btcjson.NewAddMultisigAddressCmd(2, keys, btcjson.String("test"))
},
marshalled: `{"jsonrpc":"1.0","method":"addmultisigaddress","params":[2,["031234","035678"],"test"],"id":1}`,
unmarshalled: &btcjson.AddMultisigAddressCmd{
NRequired: 2,
Keys: []string{"031234", "035678"},
Account: btcjson.String("test"),
},
},
{
name: "createwallet",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("createwallet", "mywallet", true, true, "secret", true)
},
staticCmd: func() interface{} {
return btcjson.NewCreateWalletCmd("mywallet",
btcjson.Bool(true), btcjson.Bool(true),
btcjson.String("secret"), btcjson.Bool(true))
},
marshalled: `{"jsonrpc":"1.0","method":"createwallet","params":["mywallet",true,true,"secret",true],"id":1}`,
unmarshalled: &btcjson.CreateWalletCmd{
WalletName: "mywallet",
DisablePrivateKeys: btcjson.Bool(true),
Blank: btcjson.Bool(true),
Passphrase: btcjson.String("secret"),
AvoidReuse: btcjson.Bool(true),
},
},
{
name: "createwallet - optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("createwallet", "mywallet")
},
staticCmd: func() interface{} {
return btcjson.NewCreateWalletCmd("mywallet",
nil, nil, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"createwallet","params":["mywallet"],"id":1}`,
unmarshalled: &btcjson.CreateWalletCmd{
WalletName: "mywallet",
DisablePrivateKeys: btcjson.Bool(false),
Blank: btcjson.Bool(false),
Passphrase: btcjson.String(""),
AvoidReuse: btcjson.Bool(false),
},
},
{
name: "createwallet - optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("createwallet", "mywallet", "null", "null", "secret")
},
staticCmd: func() interface{} {
return btcjson.NewCreateWalletCmd("mywallet",
nil, nil, btcjson.String("secret"), nil)
},
marshalled: `{"jsonrpc":"1.0","method":"createwallet","params":["mywallet",null,null,"secret"],"id":1}`,
unmarshalled: &btcjson.CreateWalletCmd{
WalletName: "mywallet",
DisablePrivateKeys: nil,
Blank: nil,
Passphrase: btcjson.String("secret"),
AvoidReuse: btcjson.Bool(false),
},
},
{
name: "addwitnessaddress",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("addwitnessaddress", "1address")
},
staticCmd: func() interface{} {
return btcjson.NewAddWitnessAddressCmd("1address")
},
marshalled: `{"jsonrpc":"1.0","method":"addwitnessaddress","params":["1address"],"id":1}`,
unmarshalled: &btcjson.AddWitnessAddressCmd{
Address: "1address",
},
},
{
name: "backupwallet",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("backupwallet", "backup.dat")
},
staticCmd: func() interface{} {
return btcjson.NewBackupWalletCmd("backup.dat")
},
marshalled: `{"jsonrpc":"1.0","method":"backupwallet","params":["backup.dat"],"id":1}`,
unmarshalled: &btcjson.BackupWalletCmd{Destination: "backup.dat"},
},
{
name: "loadwallet",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("loadwallet", "wallet.dat")
},
staticCmd: func() interface{} {
return btcjson.NewLoadWalletCmd("wallet.dat")
},
marshalled: `{"jsonrpc":"1.0","method":"loadwallet","params":["wallet.dat"],"id":1}`,
unmarshalled: &btcjson.LoadWalletCmd{WalletName: "wallet.dat"},
},
{
name: "unloadwallet",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("unloadwallet", "default")
},
staticCmd: func() interface{} {
return btcjson.NewUnloadWalletCmd(btcjson.String("default"))
},
marshalled: `{"jsonrpc":"1.0","method":"unloadwallet","params":["default"],"id":1}`,
unmarshalled: &btcjson.UnloadWalletCmd{WalletName: btcjson.String("default")},
},
{name: "unloadwallet - nil arg",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("unloadwallet")
},
staticCmd: func() interface{} {
return btcjson.NewUnloadWalletCmd(nil)
},
marshalled: `{"jsonrpc":"1.0","method":"unloadwallet","params":[],"id":1}`,
unmarshalled: &btcjson.UnloadWalletCmd{WalletName: nil},
},
{
name: "createmultisig",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("createmultisig", 2, []string{"031234", "035678"})
},
staticCmd: func() interface{} {
keys := []string{"031234", "035678"}
return btcjson.NewCreateMultisigCmd(2, keys)
},
marshalled: `{"jsonrpc":"1.0","method":"createmultisig","params":[2,["031234","035678"]],"id":1}`,
unmarshalled: &btcjson.CreateMultisigCmd{
NRequired: 2,
Keys: []string{"031234", "035678"},
},
},
{
name: "dumpprivkey",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("dumpprivkey", "1Address")
},
staticCmd: func() interface{} {
return btcjson.NewDumpPrivKeyCmd("1Address")
},
marshalled: `{"jsonrpc":"1.0","method":"dumpprivkey","params":["1Address"],"id":1}`,
unmarshalled: &btcjson.DumpPrivKeyCmd{
Address: "1Address",
},
},
{
name: "encryptwallet",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("encryptwallet", "pass")
},
staticCmd: func() interface{} {
return btcjson.NewEncryptWalletCmd("pass")
},
marshalled: `{"jsonrpc":"1.0","method":"encryptwallet","params":["pass"],"id":1}`,
unmarshalled: &btcjson.EncryptWalletCmd{
Passphrase: "pass",
},
},
{
name: "estimatefee",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("estimatefee", 6)
},
staticCmd: func() interface{} {
return btcjson.NewEstimateFeeCmd(6)
},
marshalled: `{"jsonrpc":"1.0","method":"estimatefee","params":[6],"id":1}`,
unmarshalled: &btcjson.EstimateFeeCmd{
NumBlocks: 6,
},
},
{
name: "estimatesmartfee - no mode",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("estimatesmartfee", 6)
},
staticCmd: func() interface{} {
return btcjson.NewEstimateSmartFeeCmd(6, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"estimatesmartfee","params":[6],"id":1}`,
unmarshalled: &btcjson.EstimateSmartFeeCmd{
ConfTarget: 6,
EstimateMode: &btcjson.EstimateModeConservative,
},
},
{
name: "estimatesmartfee - economical mode",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("estimatesmartfee", 6, btcjson.EstimateModeEconomical)
},
staticCmd: func() interface{} {
return btcjson.NewEstimateSmartFeeCmd(6, &btcjson.EstimateModeEconomical)
},
marshalled: `{"jsonrpc":"1.0","method":"estimatesmartfee","params":[6,"ECONOMICAL"],"id":1}`,
unmarshalled: &btcjson.EstimateSmartFeeCmd{
ConfTarget: 6,
EstimateMode: &btcjson.EstimateModeEconomical,
},
},
{
name: "estimatepriority",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("estimatepriority", 6)
},
staticCmd: func() interface{} {
return btcjson.NewEstimatePriorityCmd(6)
},
marshalled: `{"jsonrpc":"1.0","method":"estimatepriority","params":[6],"id":1}`,
unmarshalled: &btcjson.EstimatePriorityCmd{
NumBlocks: 6,
},
},
{
name: "getaccount",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getaccount", "1Address")
},
staticCmd: func() interface{} {
return btcjson.NewGetAccountCmd("1Address")
},
marshalled: `{"jsonrpc":"1.0","method":"getaccount","params":["1Address"],"id":1}`,
unmarshalled: &btcjson.GetAccountCmd{
Address: "1Address",
},
},
{
name: "getaccountaddress",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getaccountaddress", "acct")
},
staticCmd: func() interface{} {
return btcjson.NewGetAccountAddressCmd("acct")
},
marshalled: `{"jsonrpc":"1.0","method":"getaccountaddress","params":["acct"],"id":1}`,
unmarshalled: &btcjson.GetAccountAddressCmd{
Account: "acct",
},
},
{
name: "getaddressesbyaccount",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getaddressesbyaccount", "acct")
},
staticCmd: func() interface{} {
return btcjson.NewGetAddressesByAccountCmd("acct")
},
marshalled: `{"jsonrpc":"1.0","method":"getaddressesbyaccount","params":["acct"],"id":1}`,
unmarshalled: &btcjson.GetAddressesByAccountCmd{
Account: "acct",
},
},
{
name: "getaddressinfo",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getaddressinfo", "1234")
},
staticCmd: func() interface{} {
return btcjson.NewGetAddressInfoCmd("1234")
},
marshalled: `{"jsonrpc":"1.0","method":"getaddressinfo","params":["1234"],"id":1}`,
unmarshalled: &btcjson.GetAddressInfoCmd{
Address: "1234",
},
},
{
name: "getbalance",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getbalance")
},
staticCmd: func() interface{} {
return btcjson.NewGetBalanceCmd(nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"getbalance","params":[],"id":1}`,
unmarshalled: &btcjson.GetBalanceCmd{
Account: nil,
MinConf: btcjson.Int(1),
},
},
{
name: "getbalance optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getbalance", "acct")
},
staticCmd: func() interface{} {
return btcjson.NewGetBalanceCmd(btcjson.String("acct"), nil)
},
marshalled: `{"jsonrpc":"1.0","method":"getbalance","params":["acct"],"id":1}`,
unmarshalled: &btcjson.GetBalanceCmd{
Account: btcjson.String("acct"),
MinConf: btcjson.Int(1),
},
},
{
name: "getbalance optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getbalance", "acct", 6)
},
staticCmd: func() interface{} {
return btcjson.NewGetBalanceCmd(btcjson.String("acct"), btcjson.Int(6))
},
marshalled: `{"jsonrpc":"1.0","method":"getbalance","params":["acct",6],"id":1}`,
unmarshalled: &btcjson.GetBalanceCmd{
Account: btcjson.String("acct"),
MinConf: btcjson.Int(6),
},
},
{
name: "getbalances",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getbalances")
},
staticCmd: func() interface{} {
return btcjson.NewGetBalancesCmd()
},
marshalled: `{"jsonrpc":"1.0","method":"getbalances","params":[],"id":1}`,
unmarshalled: &btcjson.GetBalancesCmd{},
},
{
name: "getnewaddress",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getnewaddress")
},
staticCmd: func() interface{} {
return btcjson.NewGetNewAddressCmd(nil)
},
marshalled: `{"jsonrpc":"1.0","method":"getnewaddress","params":[],"id":1}`,
unmarshalled: &btcjson.GetNewAddressCmd{
Account: nil,
},
},
{
name: "getnewaddress optional",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getnewaddress", "acct")
},
staticCmd: func() interface{} {
return btcjson.NewGetNewAddressCmd(btcjson.String("acct"))
},
marshalled: `{"jsonrpc":"1.0","method":"getnewaddress","params":["acct"],"id":1}`,
unmarshalled: &btcjson.GetNewAddressCmd{
Account: btcjson.String("acct"),
},
},
{
name: "getrawchangeaddress",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getrawchangeaddress")
},
staticCmd: func() interface{} {
return btcjson.NewGetRawChangeAddressCmd(nil)
},
marshalled: `{"jsonrpc":"1.0","method":"getrawchangeaddress","params":[],"id":1}`,
unmarshalled: &btcjson.GetRawChangeAddressCmd{
Account: nil,
},
},
{
name: "getrawchangeaddress optional",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getrawchangeaddress", "acct")
},
staticCmd: func() interface{} {
return btcjson.NewGetRawChangeAddressCmd(btcjson.String("acct"))
},
marshalled: `{"jsonrpc":"1.0","method":"getrawchangeaddress","params":["acct"],"id":1}`,
unmarshalled: &btcjson.GetRawChangeAddressCmd{
Account: btcjson.String("acct"),
},
},
{
name: "getreceivedbyaccount",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getreceivedbyaccount", "acct")
},
staticCmd: func() interface{} {
return btcjson.NewGetReceivedByAccountCmd("acct", nil)
},
marshalled: `{"jsonrpc":"1.0","method":"getreceivedbyaccount","params":["acct"],"id":1}`,
unmarshalled: &btcjson.GetReceivedByAccountCmd{
Account: "acct",
MinConf: btcjson.Int(1),
},
},
{
name: "getreceivedbyaccount optional",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getreceivedbyaccount", "acct", 6)
},
staticCmd: func() interface{} {
return btcjson.NewGetReceivedByAccountCmd("acct", btcjson.Int(6))
},
marshalled: `{"jsonrpc":"1.0","method":"getreceivedbyaccount","params":["acct",6],"id":1}`,
unmarshalled: &btcjson.GetReceivedByAccountCmd{
Account: "acct",
MinConf: btcjson.Int(6),
},
},
{
name: "getreceivedbyaddress",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getreceivedbyaddress", "1Address")
},
staticCmd: func() interface{} {
return btcjson.NewGetReceivedByAddressCmd("1Address", nil)
},
marshalled: `{"jsonrpc":"1.0","method":"getreceivedbyaddress","params":["1Address"],"id":1}`,
unmarshalled: &btcjson.GetReceivedByAddressCmd{
Address: "1Address",
MinConf: btcjson.Int(1),
},
},
{
name: "getreceivedbyaddress optional",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getreceivedbyaddress", "1Address", 6)
},
staticCmd: func() interface{} {
return btcjson.NewGetReceivedByAddressCmd("1Address", btcjson.Int(6))
},
marshalled: `{"jsonrpc":"1.0","method":"getreceivedbyaddress","params":["1Address",6],"id":1}`,
unmarshalled: &btcjson.GetReceivedByAddressCmd{
Address: "1Address",
MinConf: btcjson.Int(6),
},
},
{
name: "gettransaction",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("gettransaction", "123")
},
staticCmd: func() interface{} {
return btcjson.NewGetTransactionCmd("123", nil)
},
marshalled: `{"jsonrpc":"1.0","method":"gettransaction","params":["123"],"id":1}`,
unmarshalled: &btcjson.GetTransactionCmd{
Txid: "123",
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "gettransaction optional",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("gettransaction", "123", true)
},
staticCmd: func() interface{} {
return btcjson.NewGetTransactionCmd("123", btcjson.Bool(true))
},
marshalled: `{"jsonrpc":"1.0","method":"gettransaction","params":["123",true],"id":1}`,
unmarshalled: &btcjson.GetTransactionCmd{
Txid: "123",
IncludeWatchOnly: btcjson.Bool(true),
},
},
{
name: "getwalletinfo",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("getwalletinfo")
},
staticCmd: func() interface{} {
return btcjson.NewGetWalletInfoCmd()
},
marshalled: `{"jsonrpc":"1.0","method":"getwalletinfo","params":[],"id":1}`,
unmarshalled: &btcjson.GetWalletInfoCmd{},
},
{
name: "importprivkey",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("importprivkey", "abc")
},
staticCmd: func() interface{} {
return btcjson.NewImportPrivKeyCmd("abc", nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"importprivkey","params":["abc"],"id":1}`,
unmarshalled: &btcjson.ImportPrivKeyCmd{
PrivKey: "abc",
Label: nil,
Rescan: btcjson.Bool(true),
},
},
{
name: "importprivkey optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("importprivkey", "abc", "label")
},
staticCmd: func() interface{} {
return btcjson.NewImportPrivKeyCmd("abc", btcjson.String("label"), nil)
},
marshalled: `{"jsonrpc":"1.0","method":"importprivkey","params":["abc","label"],"id":1}`,
unmarshalled: &btcjson.ImportPrivKeyCmd{
PrivKey: "abc",
Label: btcjson.String("label"),
Rescan: btcjson.Bool(true),
},
},
{
name: "importprivkey optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("importprivkey", "abc", "label", false)
},
staticCmd: func() interface{} {
return btcjson.NewImportPrivKeyCmd("abc", btcjson.String("label"), btcjson.Bool(false))
},
marshalled: `{"jsonrpc":"1.0","method":"importprivkey","params":["abc","label",false],"id":1}`,
unmarshalled: &btcjson.ImportPrivKeyCmd{
PrivKey: "abc",
Label: btcjson.String("label"),
Rescan: btcjson.Bool(false),
},
},
{
name: "keypoolrefill",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("keypoolrefill")
},
staticCmd: func() interface{} {
return btcjson.NewKeyPoolRefillCmd(nil)
},
marshalled: `{"jsonrpc":"1.0","method":"keypoolrefill","params":[],"id":1}`,
unmarshalled: &btcjson.KeyPoolRefillCmd{
NewSize: btcjson.Uint(100),
},
},
{
name: "keypoolrefill optional",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("keypoolrefill", 200)
},
staticCmd: func() interface{} {
return btcjson.NewKeyPoolRefillCmd(btcjson.Uint(200))
},
marshalled: `{"jsonrpc":"1.0","method":"keypoolrefill","params":[200],"id":1}`,
unmarshalled: &btcjson.KeyPoolRefillCmd{
NewSize: btcjson.Uint(200),
},
},
{
name: "listaccounts",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listaccounts")
},
staticCmd: func() interface{} {
return btcjson.NewListAccountsCmd(nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listaccounts","params":[],"id":1}`,
unmarshalled: &btcjson.ListAccountsCmd{
MinConf: btcjson.Int(1),
},
},
{
name: "listaccounts optional",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listaccounts", 6)
},
staticCmd: func() interface{} {
return btcjson.NewListAccountsCmd(btcjson.Int(6))
},
marshalled: `{"jsonrpc":"1.0","method":"listaccounts","params":[6],"id":1}`,
unmarshalled: &btcjson.ListAccountsCmd{
MinConf: btcjson.Int(6),
},
},
{
name: "listaddressgroupings",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listaddressgroupings")
},
staticCmd: func() interface{} {
return btcjson.NewListAddressGroupingsCmd()
},
marshalled: `{"jsonrpc":"1.0","method":"listaddressgroupings","params":[],"id":1}`,
unmarshalled: &btcjson.ListAddressGroupingsCmd{},
},
{
name: "listlockunspent",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listlockunspent")
},
staticCmd: func() interface{} {
return btcjson.NewListLockUnspentCmd()
},
marshalled: `{"jsonrpc":"1.0","method":"listlockunspent","params":[],"id":1}`,
unmarshalled: &btcjson.ListLockUnspentCmd{},
},
{
name: "listreceivedbyaccount",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listreceivedbyaccount")
},
staticCmd: func() interface{} {
return btcjson.NewListReceivedByAccountCmd(nil, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listreceivedbyaccount","params":[],"id":1}`,
unmarshalled: &btcjson.ListReceivedByAccountCmd{
MinConf: btcjson.Int(1),
IncludeEmpty: btcjson.Bool(false),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listreceivedbyaccount optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listreceivedbyaccount", 6)
},
staticCmd: func() interface{} {
return btcjson.NewListReceivedByAccountCmd(btcjson.Int(6), nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listreceivedbyaccount","params":[6],"id":1}`,
unmarshalled: &btcjson.ListReceivedByAccountCmd{
MinConf: btcjson.Int(6),
IncludeEmpty: btcjson.Bool(false),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listreceivedbyaccount optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listreceivedbyaccount", 6, true)
},
staticCmd: func() interface{} {
return btcjson.NewListReceivedByAccountCmd(btcjson.Int(6), btcjson.Bool(true), nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listreceivedbyaccount","params":[6,true],"id":1}`,
unmarshalled: &btcjson.ListReceivedByAccountCmd{
MinConf: btcjson.Int(6),
IncludeEmpty: btcjson.Bool(true),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listreceivedbyaccount optional3",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listreceivedbyaccount", 6, true, false)
},
staticCmd: func() interface{} {
return btcjson.NewListReceivedByAccountCmd(btcjson.Int(6), btcjson.Bool(true), btcjson.Bool(false))
},
marshalled: `{"jsonrpc":"1.0","method":"listreceivedbyaccount","params":[6,true,false],"id":1}`,
unmarshalled: &btcjson.ListReceivedByAccountCmd{
MinConf: btcjson.Int(6),
IncludeEmpty: btcjson.Bool(true),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listreceivedbyaddress",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listreceivedbyaddress")
},
staticCmd: func() interface{} {
return btcjson.NewListReceivedByAddressCmd(nil, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listreceivedbyaddress","params":[],"id":1}`,
unmarshalled: &btcjson.ListReceivedByAddressCmd{
MinConf: btcjson.Int(1),
IncludeEmpty: btcjson.Bool(false),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listreceivedbyaddress optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listreceivedbyaddress", 6)
},
staticCmd: func() interface{} {
return btcjson.NewListReceivedByAddressCmd(btcjson.Int(6), nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listreceivedbyaddress","params":[6],"id":1}`,
unmarshalled: &btcjson.ListReceivedByAddressCmd{
MinConf: btcjson.Int(6),
IncludeEmpty: btcjson.Bool(false),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listreceivedbyaddress optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listreceivedbyaddress", 6, true)
},
staticCmd: func() interface{} {
return btcjson.NewListReceivedByAddressCmd(btcjson.Int(6), btcjson.Bool(true), nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listreceivedbyaddress","params":[6,true],"id":1}`,
unmarshalled: &btcjson.ListReceivedByAddressCmd{
MinConf: btcjson.Int(6),
IncludeEmpty: btcjson.Bool(true),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listreceivedbyaddress optional3",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listreceivedbyaddress", 6, true, false)
},
staticCmd: func() interface{} {
return btcjson.NewListReceivedByAddressCmd(btcjson.Int(6), btcjson.Bool(true), btcjson.Bool(false))
},
marshalled: `{"jsonrpc":"1.0","method":"listreceivedbyaddress","params":[6,true,false],"id":1}`,
unmarshalled: &btcjson.ListReceivedByAddressCmd{
MinConf: btcjson.Int(6),
IncludeEmpty: btcjson.Bool(true),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listsinceblock",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listsinceblock")
},
staticCmd: func() interface{} {
return btcjson.NewListSinceBlockCmd(nil, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listsinceblock","params":[],"id":1}`,
unmarshalled: &btcjson.ListSinceBlockCmd{
BlockHash: nil,
TargetConfirmations: btcjson.Int(1),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listsinceblock optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listsinceblock", "123")
},
staticCmd: func() interface{} {
return btcjson.NewListSinceBlockCmd(btcjson.String("123"), nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listsinceblock","params":["123"],"id":1}`,
unmarshalled: &btcjson.ListSinceBlockCmd{
BlockHash: btcjson.String("123"),
TargetConfirmations: btcjson.Int(1),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listsinceblock optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listsinceblock", "123", 6)
},
staticCmd: func() interface{} {
return btcjson.NewListSinceBlockCmd(btcjson.String("123"), btcjson.Int(6), nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listsinceblock","params":["123",6],"id":1}`,
unmarshalled: &btcjson.ListSinceBlockCmd{
BlockHash: btcjson.String("123"),
TargetConfirmations: btcjson.Int(6),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listsinceblock optional3",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listsinceblock", "123", 6, true)
},
staticCmd: func() interface{} {
return btcjson.NewListSinceBlockCmd(btcjson.String("123"), btcjson.Int(6), btcjson.Bool(true))
},
marshalled: `{"jsonrpc":"1.0","method":"listsinceblock","params":["123",6,true],"id":1}`,
unmarshalled: &btcjson.ListSinceBlockCmd{
BlockHash: btcjson.String("123"),
TargetConfirmations: btcjson.Int(6),
IncludeWatchOnly: btcjson.Bool(true),
},
},
{
name: "listsinceblock pad null",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listsinceblock", "null", 1, false)
},
staticCmd: func() interface{} {
return btcjson.NewListSinceBlockCmd(nil, btcjson.Int(1), btcjson.Bool(false))
},
marshalled: `{"jsonrpc":"1.0","method":"listsinceblock","params":[null,1,false],"id":1}`,
unmarshalled: &btcjson.ListSinceBlockCmd{
BlockHash: nil,
TargetConfirmations: btcjson.Int(1),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listtransactions",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listtransactions")
},
staticCmd: func() interface{} {
return btcjson.NewListTransactionsCmd(nil, nil, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listtransactions","params":[],"id":1}`,
unmarshalled: &btcjson.ListTransactionsCmd{
Account: nil,
Count: btcjson.Int(10),
From: btcjson.Int(0),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listtransactions optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listtransactions", "acct")
},
staticCmd: func() interface{} {
return btcjson.NewListTransactionsCmd(btcjson.String("acct"), nil, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listtransactions","params":["acct"],"id":1}`,
unmarshalled: &btcjson.ListTransactionsCmd{
Account: btcjson.String("acct"),
Count: btcjson.Int(10),
From: btcjson.Int(0),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listtransactions optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listtransactions", "acct", 20)
},
staticCmd: func() interface{} {
return btcjson.NewListTransactionsCmd(btcjson.String("acct"), btcjson.Int(20), nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listtransactions","params":["acct",20],"id":1}`,
unmarshalled: &btcjson.ListTransactionsCmd{
Account: btcjson.String("acct"),
Count: btcjson.Int(20),
From: btcjson.Int(0),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listtransactions optional3",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listtransactions", "acct", 20, 1)
},
staticCmd: func() interface{} {
return btcjson.NewListTransactionsCmd(btcjson.String("acct"), btcjson.Int(20),
btcjson.Int(1), nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listtransactions","params":["acct",20,1],"id":1}`,
unmarshalled: &btcjson.ListTransactionsCmd{
Account: btcjson.String("acct"),
Count: btcjson.Int(20),
From: btcjson.Int(1),
IncludeWatchOnly: btcjson.Bool(false),
},
},
{
name: "listtransactions optional4",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listtransactions", "acct", 20, 1, true)
},
staticCmd: func() interface{} {
return btcjson.NewListTransactionsCmd(btcjson.String("acct"), btcjson.Int(20),
btcjson.Int(1), btcjson.Bool(true))
},
marshalled: `{"jsonrpc":"1.0","method":"listtransactions","params":["acct",20,1,true],"id":1}`,
unmarshalled: &btcjson.ListTransactionsCmd{
Account: btcjson.String("acct"),
Count: btcjson.Int(20),
From: btcjson.Int(1),
IncludeWatchOnly: btcjson.Bool(true),
},
},
{
name: "listunspent",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listunspent")
},
staticCmd: func() interface{} {
return btcjson.NewListUnspentCmd(nil, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listunspent","params":[],"id":1}`,
unmarshalled: &btcjson.ListUnspentCmd{
MinConf: btcjson.Int(1),
MaxConf: btcjson.Int(9999999),
Addresses: nil,
},
},
{
name: "listunspent optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listunspent", 6)
},
staticCmd: func() interface{} {
return btcjson.NewListUnspentCmd(btcjson.Int(6), nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listunspent","params":[6],"id":1}`,
unmarshalled: &btcjson.ListUnspentCmd{
MinConf: btcjson.Int(6),
MaxConf: btcjson.Int(9999999),
Addresses: nil,
},
},
{
name: "listunspent optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listunspent", 6, 100)
},
staticCmd: func() interface{} {
return btcjson.NewListUnspentCmd(btcjson.Int(6), btcjson.Int(100), nil)
},
marshalled: `{"jsonrpc":"1.0","method":"listunspent","params":[6,100],"id":1}`,
unmarshalled: &btcjson.ListUnspentCmd{
MinConf: btcjson.Int(6),
MaxConf: btcjson.Int(100),
Addresses: nil,
},
},
{
name: "listunspent optional3",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("listunspent", 6, 100, []string{"1Address", "1Address2"})
},
staticCmd: func() interface{} {
return btcjson.NewListUnspentCmd(btcjson.Int(6), btcjson.Int(100),
&[]string{"1Address", "1Address2"})
},
marshalled: `{"jsonrpc":"1.0","method":"listunspent","params":[6,100,["1Address","1Address2"]],"id":1}`,
unmarshalled: &btcjson.ListUnspentCmd{
MinConf: btcjson.Int(6),
MaxConf: btcjson.Int(100),
Addresses: &[]string{"1Address", "1Address2"},
},
},
{
name: "lockunspent",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("lockunspent", true, `[{"txid":"123","vout":1}]`)
},
staticCmd: func() interface{} {
txInputs := []btcjson.TransactionInput{
{Txid: "123", Vout: 1},
}
return btcjson.NewLockUnspentCmd(true, txInputs)
},
marshalled: `{"jsonrpc":"1.0","method":"lockunspent","params":[true,[{"txid":"123","vout":1}]],"id":1}`,
unmarshalled: &btcjson.LockUnspentCmd{
Unlock: true,
Transactions: []btcjson.TransactionInput{
{Txid: "123", Vout: 1},
},
},
},
{
name: "move",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("move", "from", "to", 0.5)
},
staticCmd: func() interface{} {
return btcjson.NewMoveCmd("from", "to", 0.5, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"move","params":["from","to",0.5],"id":1}`,
unmarshalled: &btcjson.MoveCmd{
FromAccount: "from",
ToAccount: "to",
Amount: 0.5,
MinConf: btcjson.Int(1),
Comment: nil,
},
},
{
name: "move optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("move", "from", "to", 0.5, 6)
},
staticCmd: func() interface{} {
return btcjson.NewMoveCmd("from", "to", 0.5, btcjson.Int(6), nil)
},
marshalled: `{"jsonrpc":"1.0","method":"move","params":["from","to",0.5,6],"id":1}`,
unmarshalled: &btcjson.MoveCmd{
FromAccount: "from",
ToAccount: "to",
Amount: 0.5,
MinConf: btcjson.Int(6),
Comment: nil,
},
},
{
name: "move optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("move", "from", "to", 0.5, 6, "comment")
},
staticCmd: func() interface{} {
return btcjson.NewMoveCmd("from", "to", 0.5, btcjson.Int(6), btcjson.String("comment"))
},
marshalled: `{"jsonrpc":"1.0","method":"move","params":["from","to",0.5,6,"comment"],"id":1}`,
unmarshalled: &btcjson.MoveCmd{
FromAccount: "from",
ToAccount: "to",
Amount: 0.5,
MinConf: btcjson.Int(6),
Comment: btcjson.String("comment"),
},
},
{
name: "sendfrom",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("sendfrom", "from", "1Address", 0.5)
},
staticCmd: func() interface{} {
return btcjson.NewSendFromCmd("from", "1Address", 0.5, nil, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"sendfrom","params":["from","1Address",0.5],"id":1}`,
unmarshalled: &btcjson.SendFromCmd{
FromAccount: "from",
ToAddress: "1Address",
Amount: 0.5,
MinConf: btcjson.Int(1),
Comment: nil,
CommentTo: nil,
},
},
{
name: "sendfrom optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("sendfrom", "from", "1Address", 0.5, 6)
},
staticCmd: func() interface{} {
return btcjson.NewSendFromCmd("from", "1Address", 0.5, btcjson.Int(6), nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"sendfrom","params":["from","1Address",0.5,6],"id":1}`,
unmarshalled: &btcjson.SendFromCmd{
FromAccount: "from",
ToAddress: "1Address",
Amount: 0.5,
MinConf: btcjson.Int(6),
Comment: nil,
CommentTo: nil,
},
},
{
name: "sendfrom optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("sendfrom", "from", "1Address", 0.5, 6, "comment")
},
staticCmd: func() interface{} {
return btcjson.NewSendFromCmd("from", "1Address", 0.5, btcjson.Int(6),
btcjson.String("comment"), nil)
},
marshalled: `{"jsonrpc":"1.0","method":"sendfrom","params":["from","1Address",0.5,6,"comment"],"id":1}`,
unmarshalled: &btcjson.SendFromCmd{
FromAccount: "from",
ToAddress: "1Address",
Amount: 0.5,
MinConf: btcjson.Int(6),
Comment: btcjson.String("comment"),
CommentTo: nil,
},
},
{
name: "sendfrom optional3",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("sendfrom", "from", "1Address", 0.5, 6, "comment", "commentto")
},
staticCmd: func() interface{} {
return btcjson.NewSendFromCmd("from", "1Address", 0.5, btcjson.Int(6),
btcjson.String("comment"), btcjson.String("commentto"))
},
marshalled: `{"jsonrpc":"1.0","method":"sendfrom","params":["from","1Address",0.5,6,"comment","commentto"],"id":1}`,
unmarshalled: &btcjson.SendFromCmd{
FromAccount: "from",
ToAddress: "1Address",
Amount: 0.5,
MinConf: btcjson.Int(6),
Comment: btcjson.String("comment"),
CommentTo: btcjson.String("commentto"),
},
},
{
name: "sendmany",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("sendmany", "from", `{"1Address":0.5}`)
},
staticCmd: func() interface{} {
amounts := map[string]float64{"1Address": 0.5}
return btcjson.NewSendManyCmd("from", amounts, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"sendmany","params":["from",{"1Address":0.5}],"id":1}`,
unmarshalled: &btcjson.SendManyCmd{
FromAccount: "from",
Amounts: map[string]float64{"1Address": 0.5},
MinConf: btcjson.Int(1),
Comment: nil,
},
},
{
name: "sendmany optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("sendmany", "from", `{"1Address":0.5}`, 6)
},
staticCmd: func() interface{} {
amounts := map[string]float64{"1Address": 0.5}
return btcjson.NewSendManyCmd("from", amounts, btcjson.Int(6), nil)
},
marshalled: `{"jsonrpc":"1.0","method":"sendmany","params":["from",{"1Address":0.5},6],"id":1}`,
unmarshalled: &btcjson.SendManyCmd{
FromAccount: "from",
Amounts: map[string]float64{"1Address": 0.5},
MinConf: btcjson.Int(6),
Comment: nil,
},
},
{
name: "sendmany optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("sendmany", "from", `{"1Address":0.5}`, 6, "comment")
},
staticCmd: func() interface{} {
amounts := map[string]float64{"1Address": 0.5}
return btcjson.NewSendManyCmd("from", amounts, btcjson.Int(6), btcjson.String("comment"))
},
marshalled: `{"jsonrpc":"1.0","method":"sendmany","params":["from",{"1Address":0.5},6,"comment"],"id":1}`,
unmarshalled: &btcjson.SendManyCmd{
FromAccount: "from",
Amounts: map[string]float64{"1Address": 0.5},
MinConf: btcjson.Int(6),
Comment: btcjson.String("comment"),
},
},
{
name: "sendtoaddress",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("sendtoaddress", "1Address", 0.5)
},
staticCmd: func() interface{} {
return btcjson.NewSendToAddressCmd("1Address", 0.5, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"sendtoaddress","params":["1Address",0.5],"id":1}`,
unmarshalled: &btcjson.SendToAddressCmd{
Address: "1Address",
Amount: 0.5,
Comment: nil,
CommentTo: nil,
},
},
{
name: "sendtoaddress optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("sendtoaddress", "1Address", 0.5, "comment", "commentto")
},
staticCmd: func() interface{} {
return btcjson.NewSendToAddressCmd("1Address", 0.5, btcjson.String("comment"),
btcjson.String("commentto"))
},
marshalled: `{"jsonrpc":"1.0","method":"sendtoaddress","params":["1Address",0.5,"comment","commentto"],"id":1}`,
unmarshalled: &btcjson.SendToAddressCmd{
Address: "1Address",
Amount: 0.5,
Comment: btcjson.String("comment"),
CommentTo: btcjson.String("commentto"),
},
},
{
name: "setaccount",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("setaccount", "1Address", "acct")
},
staticCmd: func() interface{} {
return btcjson.NewSetAccountCmd("1Address", "acct")
},
marshalled: `{"jsonrpc":"1.0","method":"setaccount","params":["1Address","acct"],"id":1}`,
unmarshalled: &btcjson.SetAccountCmd{
Address: "1Address",
Account: "acct",
},
},
{
name: "settxfee",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("settxfee", 0.0001)
},
staticCmd: func() interface{} {
return btcjson.NewSetTxFeeCmd(0.0001)
},
marshalled: `{"jsonrpc":"1.0","method":"settxfee","params":[0.0001],"id":1}`,
unmarshalled: &btcjson.SetTxFeeCmd{
Amount: 0.0001,
},
},
{
name: "signmessage",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("signmessage", "1Address", "message")
},
staticCmd: func() interface{} {
return btcjson.NewSignMessageCmd("1Address", "message")
},
marshalled: `{"jsonrpc":"1.0","method":"signmessage","params":["1Address","message"],"id":1}`,
unmarshalled: &btcjson.SignMessageCmd{
Address: "1Address",
Message: "message",
},
},
{
name: "signrawtransaction",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("signrawtransaction", "001122")
},
staticCmd: func() interface{} {
return btcjson.NewSignRawTransactionCmd("001122", nil, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"signrawtransaction","params":["001122"],"id":1}`,
unmarshalled: &btcjson.SignRawTransactionCmd{
RawTx: "001122",
Inputs: nil,
PrivKeys: nil,
Flags: btcjson.String("ALL"),
},
},
{
name: "signrawtransaction optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("signrawtransaction", "001122", `[{"txid":"123","vout":1,"scriptPubKey":"00","redeemScript":"01"}]`)
},
staticCmd: func() interface{} {
txInputs := []btcjson.RawTxInput{
{
Txid: "123",
Vout: 1,
ScriptPubKey: "00",
RedeemScript: "01",
},
}
return btcjson.NewSignRawTransactionCmd("001122", &txInputs, nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"signrawtransaction","params":["001122",[{"txid":"123","vout":1,"scriptPubKey":"00","redeemScript":"01"}]],"id":1}`,
unmarshalled: &btcjson.SignRawTransactionCmd{
RawTx: "001122",
Inputs: &[]btcjson.RawTxInput{
{
Txid: "123",
Vout: 1,
ScriptPubKey: "00",
RedeemScript: "01",
},
},
PrivKeys: nil,
Flags: btcjson.String("ALL"),
},
},
{
name: "signrawtransaction optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("signrawtransaction", "001122", `[]`, `["abc"]`)
},
staticCmd: func() interface{} {
txInputs := []btcjson.RawTxInput{}
privKeys := []string{"abc"}
return btcjson.NewSignRawTransactionCmd("001122", &txInputs, &privKeys, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"signrawtransaction","params":["001122",[],["abc"]],"id":1}`,
unmarshalled: &btcjson.SignRawTransactionCmd{
RawTx: "001122",
Inputs: &[]btcjson.RawTxInput{},
PrivKeys: &[]string{"abc"},
Flags: btcjson.String("ALL"),
},
},
{
name: "signrawtransaction optional3",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("signrawtransaction", "001122", `[]`, `[]`, "ALL")
},
staticCmd: func() interface{} {
txInputs := []btcjson.RawTxInput{}
privKeys := []string{}
return btcjson.NewSignRawTransactionCmd("001122", &txInputs, &privKeys,
btcjson.String("ALL"))
},
marshalled: `{"jsonrpc":"1.0","method":"signrawtransaction","params":["001122",[],[],"ALL"],"id":1}`,
unmarshalled: &btcjson.SignRawTransactionCmd{
RawTx: "001122",
Inputs: &[]btcjson.RawTxInput{},
PrivKeys: &[]string{},
Flags: btcjson.String("ALL"),
},
},
{
name: "signrawtransactionwithwallet",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("signrawtransactionwithwallet", "001122")
},
staticCmd: func() interface{} {
return btcjson.NewSignRawTransactionWithWalletCmd("001122", nil, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"signrawtransactionwithwallet","params":["001122"],"id":1}`,
unmarshalled: &btcjson.SignRawTransactionWithWalletCmd{
RawTx: "001122",
Inputs: nil,
SigHashType: btcjson.String("ALL"),
},
},
{
name: "signrawtransactionwithwallet optional1",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("signrawtransactionwithwallet", "001122", `[{"txid":"123","vout":1,"scriptPubKey":"00","redeemScript":"01","witnessScript":"02","amount":1.5}]`)
},
staticCmd: func() interface{} {
txInputs := []btcjson.RawTxWitnessInput{
{
Txid: "123",
Vout: 1,
ScriptPubKey: "00",
RedeemScript: btcjson.String("01"),
WitnessScript: btcjson.String("02"),
Amount: btcjson.Float64(1.5),
},
}
return btcjson.NewSignRawTransactionWithWalletCmd("001122", &txInputs, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"signrawtransactionwithwallet","params":["001122",[{"txid":"123","vout":1,"scriptPubKey":"00","redeemScript":"01","witnessScript":"02","amount":1.5}]],"id":1}`,
unmarshalled: &btcjson.SignRawTransactionWithWalletCmd{
RawTx: "001122",
Inputs: &[]btcjson.RawTxWitnessInput{
{
Txid: "123",
Vout: 1,
ScriptPubKey: "00",
RedeemScript: btcjson.String("01"),
WitnessScript: btcjson.String("02"),
Amount: btcjson.Float64(1.5),
},
},
SigHashType: btcjson.String("ALL"),
},
},
{
name: "signrawtransactionwithwallet optional1 with blank fields in input",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("signrawtransactionwithwallet", "001122", `[{"txid":"123","vout":1,"scriptPubKey":"00","redeemScript":"01"}]`)
},
staticCmd: func() interface{} {
txInputs := []btcjson.RawTxWitnessInput{
{
Txid: "123",
Vout: 1,
ScriptPubKey: "00",
RedeemScript: btcjson.String("01"),
},
}
return btcjson.NewSignRawTransactionWithWalletCmd("001122", &txInputs, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"signrawtransactionwithwallet","params":["001122",[{"txid":"123","vout":1,"scriptPubKey":"00","redeemScript":"01"}]],"id":1}`,
unmarshalled: &btcjson.SignRawTransactionWithWalletCmd{
RawTx: "001122",
Inputs: &[]btcjson.RawTxWitnessInput{
{
Txid: "123",
Vout: 1,
ScriptPubKey: "00",
RedeemScript: btcjson.String("01"),
},
},
SigHashType: btcjson.String("ALL"),
},
},
{
name: "signrawtransactionwithwallet optional2",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("signrawtransactionwithwallet", "001122", `[]`, "ALL")
},
staticCmd: func() interface{} {
txInputs := []btcjson.RawTxWitnessInput{}
return btcjson.NewSignRawTransactionWithWalletCmd("001122", &txInputs, btcjson.String("ALL"))
},
marshalled: `{"jsonrpc":"1.0","method":"signrawtransactionwithwallet","params":["001122",[],"ALL"],"id":1}`,
unmarshalled: &btcjson.SignRawTransactionWithWalletCmd{
RawTx: "001122",
Inputs: &[]btcjson.RawTxWitnessInput{},
SigHashType: btcjson.String("ALL"),
},
},
{
name: "walletlock",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("walletlock")
},
staticCmd: func() interface{} {
return btcjson.NewWalletLockCmd()
},
marshalled: `{"jsonrpc":"1.0","method":"walletlock","params":[],"id":1}`,
unmarshalled: &btcjson.WalletLockCmd{},
},
{
name: "walletpassphrase",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("walletpassphrase", "pass", 60)
},
staticCmd: func() interface{} {
return btcjson.NewWalletPassphraseCmd("pass", 60)
},
marshalled: `{"jsonrpc":"1.0","method":"walletpassphrase","params":["pass",60],"id":1}`,
unmarshalled: &btcjson.WalletPassphraseCmd{
Passphrase: "pass",
Timeout: 60,
},
},
{
name: "walletpassphrasechange",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd("walletpassphrasechange", "old", "new")
},
staticCmd: func() interface{} {
return btcjson.NewWalletPassphraseChangeCmd("old", "new")
},
marshalled: `{"jsonrpc":"1.0","method":"walletpassphrasechange","params":["old","new"],"id":1}`,
unmarshalled: &btcjson.WalletPassphraseChangeCmd{
OldPassphrase: "old",
NewPassphrase: "new",
},
},
{
name: "importmulti with descriptor + options",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd(
"importmulti",
// Cannot use a native string, due to special types like timestamp.
[]btcjson.ImportMultiRequest{
{Descriptor: btcjson.String("123"), Timestamp: btcjson.TimestampOrNow{Value: 0}},
},
`{"rescan": true}`,
)
},
staticCmd: func() interface{} {
requests := []btcjson.ImportMultiRequest{
{Descriptor: btcjson.String("123"), Timestamp: btcjson.TimestampOrNow{Value: 0}},
}
options := btcjson.ImportMultiOptions{Rescan: true}
return btcjson.NewImportMultiCmd(requests, &options)
},
marshalled: `{"jsonrpc":"1.0","method":"importmulti","params":[[{"desc":"123","timestamp":0}],{"rescan":true}],"id":1}`,
unmarshalled: &btcjson.ImportMultiCmd{
Requests: []btcjson.ImportMultiRequest{
{
Descriptor: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
},
},
Options: &btcjson.ImportMultiOptions{Rescan: true},
},
},
{
name: "importmulti with descriptor + no options",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd(
"importmulti",
// Cannot use a native string, due to special types like timestamp.
[]btcjson.ImportMultiRequest{
{
Descriptor: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
WatchOnly: btcjson.Bool(false),
Internal: btcjson.Bool(true),
Label: btcjson.String("aaa"),
KeyPool: btcjson.Bool(false),
},
},
)
},
staticCmd: func() interface{} {
requests := []btcjson.ImportMultiRequest{
{
Descriptor: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
WatchOnly: btcjson.Bool(false),
Internal: btcjson.Bool(true),
Label: btcjson.String("aaa"),
KeyPool: btcjson.Bool(false),
},
}
return btcjson.NewImportMultiCmd(requests, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"importmulti","params":[[{"desc":"123","timestamp":0,"internal":true,"watchonly":false,"label":"aaa","keypool":false}]],"id":1}`,
unmarshalled: &btcjson.ImportMultiCmd{
Requests: []btcjson.ImportMultiRequest{
{
Descriptor: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
WatchOnly: btcjson.Bool(false),
Internal: btcjson.Bool(true),
Label: btcjson.String("aaa"),
KeyPool: btcjson.Bool(false),
},
},
},
},
{
name: "importmulti with descriptor + string timestamp",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd(
"importmulti",
// Cannot use a native string, due to special types like timestamp.
[]btcjson.ImportMultiRequest{
{
Descriptor: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: "now"},
},
},
)
},
staticCmd: func() interface{} {
requests := []btcjson.ImportMultiRequest{
{Descriptor: btcjson.String("123"), Timestamp: btcjson.TimestampOrNow{Value: "now"}},
}
return btcjson.NewImportMultiCmd(requests, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"importmulti","params":[[{"desc":"123","timestamp":"now"}]],"id":1}`,
unmarshalled: &btcjson.ImportMultiCmd{
Requests: []btcjson.ImportMultiRequest{
{Descriptor: btcjson.String("123"), Timestamp: btcjson.TimestampOrNow{Value: "now"}},
},
},
},
{
name: "importmulti with scriptPubKey script",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd(
"importmulti",
// Cannot use a native string, due to special types like timestamp and scriptPubKey
[]btcjson.ImportMultiRequest{
{
ScriptPubKey: &btcjson.ScriptPubKey{Value: "script"},
RedeemScript: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
PubKeys: &[]string{"aaa"},
},
},
)
},
staticCmd: func() interface{} {
requests := []btcjson.ImportMultiRequest{
{
ScriptPubKey: &btcjson.ScriptPubKey{Value: "script"},
RedeemScript: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
PubKeys: &[]string{"aaa"},
},
}
return btcjson.NewImportMultiCmd(requests, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"importmulti","params":[[{"scriptPubKey":"script","timestamp":0,"redeemscript":"123","pubkeys":["aaa"]}]],"id":1}`,
unmarshalled: &btcjson.ImportMultiCmd{
Requests: []btcjson.ImportMultiRequest{
{
ScriptPubKey: &btcjson.ScriptPubKey{Value: "script"},
RedeemScript: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
PubKeys: &[]string{"aaa"},
},
},
},
},
{
name: "importmulti with scriptPubKey address",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd(
"importmulti",
// Cannot use a native string, due to special types like timestamp and scriptPubKey
[]btcjson.ImportMultiRequest{
{
ScriptPubKey: &btcjson.ScriptPubKey{Value: btcjson.ScriptPubKeyAddress{Address: "addr"}},
WitnessScript: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
Keys: &[]string{"aaa"},
},
},
)
},
staticCmd: func() interface{} {
requests := []btcjson.ImportMultiRequest{
{
ScriptPubKey: &btcjson.ScriptPubKey{Value: btcjson.ScriptPubKeyAddress{Address: "addr"}},
WitnessScript: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
Keys: &[]string{"aaa"},
},
}
return btcjson.NewImportMultiCmd(requests, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"importmulti","params":[[{"scriptPubKey":{"address":"addr"},"timestamp":0,"witnessscript":"123","keys":["aaa"]}]],"id":1}`,
unmarshalled: &btcjson.ImportMultiCmd{
Requests: []btcjson.ImportMultiRequest{
{
ScriptPubKey: &btcjson.ScriptPubKey{Value: btcjson.ScriptPubKeyAddress{Address: "addr"}},
WitnessScript: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
Keys: &[]string{"aaa"},
},
},
},
},
{
name: "importmulti with ranged (int) descriptor",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd(
"importmulti",
// Cannot use a native string, due to special types like timestamp.
[]btcjson.ImportMultiRequest{
{
Descriptor: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
Range: &btcjson.DescriptorRange{Value: 7},
},
},
)
},
staticCmd: func() interface{} {
requests := []btcjson.ImportMultiRequest{
{
Descriptor: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
Range: &btcjson.DescriptorRange{Value: 7},
},
}
return btcjson.NewImportMultiCmd(requests, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"importmulti","params":[[{"desc":"123","timestamp":0,"range":7}]],"id":1}`,
unmarshalled: &btcjson.ImportMultiCmd{
Requests: []btcjson.ImportMultiRequest{
{
Descriptor: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
Range: &btcjson.DescriptorRange{Value: 7},
},
},
},
},
{
name: "importmulti with ranged (slice) descriptor",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd(
"importmulti",
// Cannot use a native string, due to special types like timestamp.
[]btcjson.ImportMultiRequest{
{
Descriptor: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
Range: &btcjson.DescriptorRange{Value: []int{1, 7}},
},
},
)
},
staticCmd: func() interface{} {
requests := []btcjson.ImportMultiRequest{
{
Descriptor: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
Range: &btcjson.DescriptorRange{Value: []int{1, 7}},
},
}
return btcjson.NewImportMultiCmd(requests, nil)
},
marshalled: `{"jsonrpc":"1.0","method":"importmulti","params":[[{"desc":"123","timestamp":0,"range":[1,7]}]],"id":1}`,
unmarshalled: &btcjson.ImportMultiCmd{
Requests: []btcjson.ImportMultiRequest{
{
Descriptor: btcjson.String("123"),
Timestamp: btcjson.TimestampOrNow{Value: 0},
Range: &btcjson.DescriptorRange{Value: []int{1, 7}},
},
},
},
},
{
name: "walletcreatefundedpsbt",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd(
"walletcreatefundedpsbt",
[]btcjson.PsbtInput{
{
Txid: "1234",
Vout: 0, | Sequence: 0,
},
},
[]btcjson.PsbtOutput{
btcjson.NewPsbtOutput("1234", btcutil.Amount(1234)),
btcjson.NewPsbtDataOutput([]byte{1, 2, 3, 4}),
},
btcjson.Uint32(1),
btcjson.WalletCreateFundedPsbtOpts{},
btcjson.Bool(true),
)
},
staticCmd: func() interface{} {
return btcjson.NewWalletCreateFundedPsbtCmd(
[]btcjson.PsbtInput{
{
Txid: "1234",
Vout: 0,
Sequence: 0,
},
},
[]btcjson.PsbtOutput{
btcjson.NewPsbtOutput("1234", btcutil.Amount(1234)),
btcjson.NewPsbtDataOutput([]byte{1, 2, 3, 4}),
},
btcjson.Uint32(1),
&btcjson.WalletCreateFundedPsbtOpts{},
btcjson.Bool(true),
)
},
marshalled: `{"jsonrpc":"1.0","method":"walletcreatefundedpsbt","params":[[{"txid":"1234","vout":0,"sequence":0}],[{"1234":0.00001234},{"data":"01020304"}],1,{},true],"id":1}`,
unmarshalled: &btcjson.WalletCreateFundedPsbtCmd{
Inputs: []btcjson.PsbtInput{
{
Txid: "1234",
Vout: 0,
Sequence: 0,
},
},
Outputs: []btcjson.PsbtOutput{
btcjson.NewPsbtOutput("1234", btcutil.Amount(1234)),
btcjson.NewPsbtDataOutput([]byte{1, 2, 3, 4}),
},
Locktime: btcjson.Uint32(1),
Options: &btcjson.WalletCreateFundedPsbtOpts{},
Bip32Derivs: btcjson.Bool(true),
},
},
{
name: "walletprocesspsbt",
newCmd: func() (interface{}, error) {
return btcjson.NewCmd(
"walletprocesspsbt", "1234", btcjson.Bool(true), btcjson.String("ALL"), btcjson.Bool(true))
},
staticCmd: func() interface{} {
return btcjson.NewWalletProcessPsbtCmd(
"1234", btcjson.Bool(true), btcjson.String("ALL"), btcjson.Bool(true))
},
marshalled: `{"jsonrpc":"1.0","method":"walletprocesspsbt","params":["1234",true,"ALL",true],"id":1}`,
unmarshalled: &btcjson.WalletProcessPsbtCmd{
Psbt: "1234",
Sign: btcjson.Bool(true),
SighashType: btcjson.String("ALL"),
Bip32Derivs: btcjson.Bool(true),
},
},
}
t.Logf("Running %d tests", len(tests))
for i, test := range tests {
// Marshal the command as created by the new static command
// creation function.
marshalled, err := btcjson.MarshalCmd(btcjson.RpcVersion1, testID, test.staticCmd())
if err != nil {
t.Errorf("MarshalCmd #%d (%s) unexpected error: %v", i,
test.name, err)
continue
}
if !bytes.Equal(marshalled, []byte(test.marshalled)) {
t.Errorf("Test #%d (%s) unexpected marshalled data - "+
"got %s, want %s", i, test.name, marshalled,
test.marshalled)
continue
}
// Ensure the command is created without error via the generic
// new command creation function.
cmd, err := test.newCmd()
if err != nil {
t.Errorf("Test #%d (%s) unexpected NewCmd error: %v ",
i, test.name, err)
}
// Marshal the command as created by the generic new command
// creation function.
marshalled, err = btcjson.MarshalCmd(btcjson.RpcVersion1, testID, cmd)
if err != nil {
t.Errorf("MarshalCmd #%d (%s) unexpected error: %v", i,
test.name, err)
continue
}
if !bytes.Equal(marshalled, []byte(test.marshalled)) {
t.Errorf("Test #%d (%s) unexpected marshalled data - "+
"got %s, want %s", i, test.name, marshalled,
test.marshalled)
continue
}
var request btcjson.Request
if err := json.Unmarshal(marshalled, &request); err != nil {
t.Errorf("Test #%d (%s) unexpected error while "+
"unmarshalling JSON-RPC request: %v", i,
test.name, err)
continue
}
cmd, err = btcjson.UnmarshalCmd(&request)
if err != nil {
t.Errorf("UnmarshalCmd #%d (%s) unexpected error: %v", i,
test.name, err)
continue
}
if !reflect.DeepEqual(cmd, test.unmarshalled) {
t.Errorf("Test #%d (%s) unexpected unmarshalled command "+
"- got %s, want %s", i, test.name,
fmt.Sprintf("(%T) %+[1]v", cmd),
fmt.Sprintf("(%T) %+[1]v\n", test.unmarshalled))
continue
}
}
} | |
actions.service.ts | import { Injectable } from '@angular/core';
import { PPerspective } from './models/perspective.model';
import { PDimension } from './models/dimension.model';
import { PValue } from './models/value.model';
import { AngularFirestore, AngularFirestoreCollection, AngularFirestoreDocument } from 'angularfire2/firestore';
import { Observable } from 'rxjs';
import { map } from 'rxjs/operators';
@Injectable({
providedIn: 'root'
})
export class | {
ppersCollection: AngularFirestoreCollection<any>;
ppersDocument: AngularFirestoreDocument<any>;
pdimsCollection: AngularFirestoreCollection<any>;
pdimsDocument: AngularFirestoreDocument<any>;
pvalsCollection: AngularFirestoreCollection<any>;
pvalsDocument: AngularFirestoreDocument<any>;
constructor(private afs: AngularFirestore) {
this.ppersCollection = this.afs.collection('perspectives', (ref) => ref.orderBy('time', 'desc').limit(5));
this.pdimsCollection = this.afs.collection('dimensions', (ref) => ref.orderBy('time', 'desc').limit(5));
this.pvalsCollection = this.afs.collection('values', (ref) => ref.orderBy('time', 'desc').limit(5));
}
getAllPPers(): Observable<any[]> {
// ['added', 'modified', 'removed']
return this.ppersCollection.snapshotChanges().pipe(
map((actions) => {
return actions.map((a) => {
const data = a.payload.doc.data();
return { id: a.payload.doc.id, ...data };
});
})
);
}
getAllPDimss(): Observable<any[]> {
// ['added', 'modified', 'removed']
return this.pdimsCollection.snapshotChanges().pipe(
map((actions) => {
return actions.map((a) => {
const data = a.payload.doc.data();
return { id: a.payload.doc.id, ...data };
});
})
);
}
getAllPVals(): Observable<any[]> {
// ['added', 'modified', 'removed']
return this.pvalsCollection.snapshotChanges().pipe(
map((actions) => {
return actions.map((a) => {
const data = a.payload.doc.data();
return { id: a.payload.doc.id, ...data };
});
})
);
}
getPPers(id: string) {
return this.afs.doc<any>(`perspectives/${id}`);
}
getPDims(id: string) {
return this.afs.doc<any>(`dimensions/${id}`);
}
getPVals(id: string) {
return this.afs.doc<any>(`values/${id}`);
}
updatePPers(id: string, data: any) {
return this.getPPers(id).update(data);
}
updatePDims(id: string, data: any) {
return this.getPDims(id).update(data);
}
updatePVals(id: string, data: any) {
return this.getPVals(id).update(data);
}
deletePPers(id: string) {
return this.getPPers(id).delete();
}
deletePDims(id: string) {
return this.getPDims(id).delete();
}
deletePVals(id: string) {
return this.getPVals(id).delete();
}
//Save NEW PERSPECTIVE
// Check if values/dimensions are new
newPPerspective(perspective: PPerspective) {
return this.ppersCollection.add(perspective);
}
updatePPerspective(pperspective: PPerspective) {
}
deletePPerspective(pperspective: PPerspective) {
}
newPDimension(pdimension: PDimension) {
}
updatePDimension(pdimension: PDimension) {
}
deletePDimension(pdimension: PDimension) {
}
newPValue(pvalue: PValue) {
}
updatePValue(pvalue: PValue) {
}
deletePValue(pvalue: PValue) {
}
}
| ActionsService |
tennisball-outline.js | import * as React from "react";
import Svg, { Circle, Path } from "react-native-svg";
function SvgTennisballOutline(props) {
return <Svg xmlns="http://www.w3.org/2000/svg" width="1em" height="1em" viewBox="0 0 512 512" {...props}><Circle cx={256} cy={256} r={208} fill="none" stroke={props.fill} strokeLinecap="round" strokeLinejoin="round" strokeWidth={32} /><Path d="M461.43 271.44c-5.09.37-8.24.56-13.43.56-114.88 0-208-93.12-208-208 0-5.37.2-8.69.6-14M49.65 240.56S58.84 240 64 240c114.88 0 208 93.12 208 208 0 5.38-.61 14-.61 14" fill="none" stroke={props.fill} strokeLinecap="round" strokeLinejoin="round" strokeWidth={32} /></Svg>; | }
export default SvgTennisballOutline; |
|
timer.go | /*
Copyright 2019 The HAProxy Ingress Controller Authors.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package utils
import (
"fmt"
"strings"
"time"
)
// Timer ...
type Timer struct {
Start time.Time
Ticks []*Tick
observer func(task string, duration time.Duration)
}
// Tick ...
type Tick struct {
Event string
When time.Time
}
// NewTimer ...
func | (observer func(task string, duration time.Duration)) *Timer {
return &Timer{
Start: time.Now(),
observer: observer,
}
}
// Tick ...
func (t *Timer) Tick(eventLabel string) {
now := time.Now()
if t.observer != nil {
var last time.Time
if len(t.Ticks) > 0 {
last = t.Ticks[len(t.Ticks)-1].When
} else {
last = t.Start
}
t.observer(eventLabel, now.Sub(last))
}
t.Ticks = append(t.Ticks, &Tick{
Event: eventLabel,
When: now,
})
}
// AsString ...
func (t *Timer) AsString(totalLabel string) string {
out := make([]string, 0, len(t.Ticks)+1)
last := t.Start
for _, tick := range t.Ticks {
out = append(out, fmt.Sprintf("%s=%fms", tick.Event, tick.When.Sub(last).Seconds()*1000))
last = tick.When
}
if totalLabel != "" {
out = append(out, fmt.Sprintf("%s=%fms", totalLabel, last.Sub(t.Start).Seconds()*1000))
}
return strings.Join(out, " ")
}
| NewTimer |
_site.js | (function($) {
'use strict';
var $body = $('body'),
$doc = $(document);
| $body.toggleClass('nav-opened');
});
$doc.on('click', '[data-toggle="search"]', function(e) {
e.preventDefault();
$body.toggleClass('search-opened');
});
var titleParallax = {
textParallax : document.querySelector('.js-text-parallax'),
init: function() {
window.addEventListener("scroll", function(event) {
titleParallax.update(this.scrollY);
}, false);
},
update: function(scroll) {
this.textParallax.style['-webkit-transform'] = 'translateY('+ (scroll * .5) +'px)';
this.textParallax.style['-moz-transform'] = 'translateY('+ (scroll * .5) +'px)';
this.textParallax.style['transform'] = 'translateY('+ (scroll * .5) +'px)';
this.textParallax.style['opacity'] = (1 - ( scroll * .005 ));
}
};
titleParallax.init();
})(jQuery);
$(function() {
var $results = $('.search__results'),
$input = $('.search__input'),
search_term;
$input.on('keypress', function() {
search_term = $(this).val().toLowerCase();
return $.getJSON('/search.json', function(data) {
var i, j, k, len, len1, result, results, results1, value;
results = [];
$results.html('');
for (j = 0, len = data.length; j < len; j++) {
i = data[j];
value = 0;
if (i.title.toLowerCase().split(search_term).length - 1 !== 0) {
value = 10;
}
if (value !== 0) {
i.value = value;
results.push(i);
}
}
if (results.length > 0) {
results1 = [];
for (k = 0, len1 = results.length; k < len1; k++) {
result = results[k];
results1.push($results.append('<li><a href="' + result.url + '" title="' + result.title + '">' + result.title + '</a></li>'));
}
return results1;
} else {
$results.append('<li>No results found. Sorry.</li>');
}
});
});
}); | $doc.on('click', '[data-toggle="nav"], .nav-opened .site-main', function(e) {
e.preventDefault(); |
cs.js | /*
Copyright (c) 2003-2015, CKSource - Frederico Knabben. All rights reserved.
For licensing, see LICENSE.md or http://ckeditor.com/license | */
CKEDITOR.plugins.setLang("language","cs",{button:"Nastavit jazyk",remove:"Odstranit jazyk"}); |
|
model_niatelemetry_syslog_sys_msg_fac_filter_response.go | /*
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document.
API version: 1.0.9-6484
Contact: [email protected]
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package intersight
import (
"encoding/json"
"fmt"
)
// NiatelemetrySyslogSysMsgFacFilterResponse - The response body of a HTTP GET request for the 'niatelemetry.SyslogSysMsgFacFilter' resource. The value may be one of the following types. 1. When 'tag' is specified in the URL query, the response schema is a summary of the tag usage. 1. When '$apply' is specified in the URL query, the response schema is dynamically-generated schema based on the $apply value. 1. When '$count' is specified in the URL query, the response is a simple object providing the count of the resources. 1. In all other cases, the response is a list of 'niatelemetry.SyslogSysMsgFacFilter' resources.
type NiatelemetrySyslogSysMsgFacFilterResponse struct {
MoAggregateTransform *MoAggregateTransform
MoDocumentCount *MoDocumentCount
MoTagSummary *MoTagSummary
NiatelemetrySyslogSysMsgFacFilterList *NiatelemetrySyslogSysMsgFacFilterList
}
// MoAggregateTransformAsNiatelemetrySyslogSysMsgFacFilterResponse is a convenience function that returns MoAggregateTransform wrapped in NiatelemetrySyslogSysMsgFacFilterResponse
func MoAggregateTransformAsNiatelemetrySyslogSysMsgFacFilterResponse(v *MoAggregateTransform) NiatelemetrySyslogSysMsgFacFilterResponse {
return NiatelemetrySyslogSysMsgFacFilterResponse{MoAggregateTransform: v}
}
// MoDocumentCountAsNiatelemetrySyslogSysMsgFacFilterResponse is a convenience function that returns MoDocumentCount wrapped in NiatelemetrySyslogSysMsgFacFilterResponse
func MoDocumentCountAsNiatelemetrySyslogSysMsgFacFilterResponse(v *MoDocumentCount) NiatelemetrySyslogSysMsgFacFilterResponse {
return NiatelemetrySyslogSysMsgFacFilterResponse{MoDocumentCount: v}
}
// MoTagSummaryAsNiatelemetrySyslogSysMsgFacFilterResponse is a convenience function that returns MoTagSummary wrapped in NiatelemetrySyslogSysMsgFacFilterResponse
func MoTagSummaryAsNiatelemetrySyslogSysMsgFacFilterResponse(v *MoTagSummary) NiatelemetrySyslogSysMsgFacFilterResponse {
return NiatelemetrySyslogSysMsgFacFilterResponse{MoTagSummary: v}
}
// NiatelemetrySyslogSysMsgFacFilterListAsNiatelemetrySyslogSysMsgFacFilterResponse is a convenience function that returns NiatelemetrySyslogSysMsgFacFilterList wrapped in NiatelemetrySyslogSysMsgFacFilterResponse
func NiatelemetrySyslogSysMsgFacFilterListAsNiatelemetrySyslogSysMsgFacFilterResponse(v *NiatelemetrySyslogSysMsgFacFilterList) NiatelemetrySyslogSysMsgFacFilterResponse {
return NiatelemetrySyslogSysMsgFacFilterResponse{NiatelemetrySyslogSysMsgFacFilterList: v}
}
// Unmarshal JSON data into one of the pointers in the struct
func (dst *NiatelemetrySyslogSysMsgFacFilterResponse) UnmarshalJSON(data []byte) error {
var err error
// use discriminator value to speed up the lookup
var jsonDict map[string]interface{}
err = json.Unmarshal(data, &jsonDict)
if err != nil {
return fmt.Errorf("Failed to unmarshal JSON into map for the discriminator lookup.")
}
// check if the discriminator value is 'mo.AggregateTransform'
if jsonDict["ObjectType"] == "mo.AggregateTransform" {
// try to unmarshal JSON data into MoAggregateTransform
err = json.Unmarshal(data, &dst.MoAggregateTransform)
if err == nil {
return nil // data stored in dst.MoAggregateTransform, return on the first match
} else {
dst.MoAggregateTransform = nil
return fmt.Errorf("Failed to unmarshal NiatelemetrySyslogSysMsgFacFilterResponse as MoAggregateTransform: %s", err.Error())
}
}
// check if the discriminator value is 'mo.DocumentCount'
if jsonDict["ObjectType"] == "mo.DocumentCount" {
// try to unmarshal JSON data into MoDocumentCount
err = json.Unmarshal(data, &dst.MoDocumentCount)
if err == nil {
return nil // data stored in dst.MoDocumentCount, return on the first match
} else {
dst.MoDocumentCount = nil
return fmt.Errorf("Failed to unmarshal NiatelemetrySyslogSysMsgFacFilterResponse as MoDocumentCount: %s", err.Error())
}
}
// check if the discriminator value is 'mo.TagSummary'
if jsonDict["ObjectType"] == "mo.TagSummary" {
// try to unmarshal JSON data into MoTagSummary
err = json.Unmarshal(data, &dst.MoTagSummary)
if err == nil {
return nil // data stored in dst.MoTagSummary, return on the first match
} else {
dst.MoTagSummary = nil
return fmt.Errorf("Failed to unmarshal NiatelemetrySyslogSysMsgFacFilterResponse as MoTagSummary: %s", err.Error())
}
}
// check if the discriminator value is 'niatelemetry.SyslogSysMsgFacFilter.List'
if jsonDict["ObjectType"] == "niatelemetry.SyslogSysMsgFacFilter.List" {
// try to unmarshal JSON data into NiatelemetrySyslogSysMsgFacFilterList
err = json.Unmarshal(data, &dst.NiatelemetrySyslogSysMsgFacFilterList)
if err == nil {
return nil // data stored in dst.NiatelemetrySyslogSysMsgFacFilterList, return on the first match
} else {
dst.NiatelemetrySyslogSysMsgFacFilterList = nil
return fmt.Errorf("Failed to unmarshal NiatelemetrySyslogSysMsgFacFilterResponse as NiatelemetrySyslogSysMsgFacFilterList: %s", err.Error())
}
}
return nil
}
// Marshal data from the first non-nil pointers in the struct to JSON
func (src NiatelemetrySyslogSysMsgFacFilterResponse) MarshalJSON() ([]byte, error) {
if src.MoAggregateTransform != nil {
return json.Marshal(&src.MoAggregateTransform)
}
if src.MoDocumentCount != nil {
return json.Marshal(&src.MoDocumentCount)
}
if src.MoTagSummary != nil {
return json.Marshal(&src.MoTagSummary)
}
if src.NiatelemetrySyslogSysMsgFacFilterList != nil {
return json.Marshal(&src.NiatelemetrySyslogSysMsgFacFilterList)
}
return nil, nil // no data in oneOf schemas
}
// Get the actual instance
func (obj *NiatelemetrySyslogSysMsgFacFilterResponse) GetActualInstance() interface{} {
if obj.MoAggregateTransform != nil {
return obj.MoAggregateTransform
}
if obj.MoDocumentCount != nil {
return obj.MoDocumentCount
}
if obj.MoTagSummary != nil {
return obj.MoTagSummary
}
if obj.NiatelemetrySyslogSysMsgFacFilterList != nil {
return obj.NiatelemetrySyslogSysMsgFacFilterList
}
// all schemas are nil
return nil
}
type NullableNiatelemetrySyslogSysMsgFacFilterResponse struct {
value *NiatelemetrySyslogSysMsgFacFilterResponse
isSet bool
}
func (v NullableNiatelemetrySyslogSysMsgFacFilterResponse) Get() *NiatelemetrySyslogSysMsgFacFilterResponse {
return v.value
}
func (v *NullableNiatelemetrySyslogSysMsgFacFilterResponse) Set(val *NiatelemetrySyslogSysMsgFacFilterResponse) {
v.value = val
v.isSet = true
}
func (v NullableNiatelemetrySyslogSysMsgFacFilterResponse) IsSet() bool {
return v.isSet
}
func (v *NullableNiatelemetrySyslogSysMsgFacFilterResponse) Unset() {
v.value = nil
v.isSet = false
}
func NewNullableNiatelemetrySyslogSysMsgFacFilterResponse(val *NiatelemetrySyslogSysMsgFacFilterResponse) *NullableNiatelemetrySyslogSysMsgFacFilterResponse |
func (v NullableNiatelemetrySyslogSysMsgFacFilterResponse) MarshalJSON() ([]byte, error) {
return json.Marshal(v.value)
}
func (v *NullableNiatelemetrySyslogSysMsgFacFilterResponse) UnmarshalJSON(src []byte) error {
v.isSet = true
return json.Unmarshal(src, &v.value)
}
| {
return &NullableNiatelemetrySyslogSysMsgFacFilterResponse{value: val, isSet: true}
} |
common.go | package reflection
import (
"reflect"
"runtime"
)
// See: https://stackoverflow.com/a/7053871/849021
func GetFunctionName(fn interface{}) string {
runtimeFn := runtime.FuncForPC(reflect.ValueOf(fn).Pointer())
if runtimeFn == nil {
return "<unknown function>"
}
return runtimeFn.Name()
}
func IsNil(value reflect.Value) bool {
// https://golang.org/pkg/reflect/#Value.IsNil
switch value.Kind() {
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Slice, reflect.Ptr:
return value.IsNil()
default:
return false
}
}
// See: https://stackoverflow.com/questions/23555241/golang-reflection-how-to-get-zero-value-of-a-field-type
func IsZero(value reflect.Value) bool {
switch value.Kind() {
case reflect.Chan, reflect.Func, reflect.Interface, reflect.Map, reflect.Slice:
return value.IsNil()
case reflect.Ptr:
return value.IsNil() || IsZero(value.Elem())
case reflect.Array:
length := value.Len()
for i := 0; i < length; i++ {
if !IsZero(value.Index(i)) {
return false
}
}
return true
case reflect.Struct:
numField := value.NumField()
for i := 0; i < numField; i++ {
if !IsZero(value.Field(i)) {
return false
}
}
return true
default:
zero := reflect.Zero(value.Type()).Interface()
return value.Interface() == zero
}
}
func GetEntityName(entityPtr interface{}) string {
var name string
entity := reflect.ValueOf(entityPtr).Elem()
field := entity.FieldByName("Name")
if field.IsValid() |
return name
}
func GetEntityParent(entityPtr interface{}) interface{} {
var parent interface{}
entity := reflect.ValueOf(entityPtr).Elem()
field := entity.FieldByName("Parent")
if field.IsNil() {
return nil
} else if field.IsValid() {
//if field.Kind() == reflect.Ptr {
// field = field.Elem()
//}
if field.CanInterface() {
parent = field.Interface()
}
} else {
return nil
}
return parent
}
| {
if field.Kind() == reflect.Ptr {
field = field.Elem()
}
if field.CanInterface() {
ifld := field.Interface()
name = ifld.(string)
}
} |
gen.py | #!/usr/bin/env pypy
from sys import argv
from random import *
n = int(argv[1])
print 1
print n, n**2
for i in xrange(n):
P = range(1, n + 1) | for x in P:
line += "0 " * i + str(x) + " " + "0 " * (n - i - 1)
print line | shuffle(P)
line = "" |
saml_sp.go | // Copyright 2021 The Casdoor Authors. All Rights Reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package object
import (
"crypto/tls"
"crypto/x509"
"encoding/base64"
"fmt"
"net/url"
"regexp"
"strings"
"github.com/casdoor/casdoor/conf"
saml2 "github.com/russellhaering/gosaml2"
dsig "github.com/russellhaering/goxmldsig"
)
func ParseSamlResponse(samlResponse string, providerType string) (string, error) {
samlResponse, _ = url.QueryUnescape(samlResponse)
sp, err := buildSp(&Provider{Type: providerType}, samlResponse)
if err != nil {
return "", err
}
assertionInfo, err := sp.RetrieveAssertionInfo(samlResponse)
if err != nil {
panic(err)
}
return assertionInfo.NameID, nil
}
func GenerateSamlLoginUrl(id, relayState string) (string, string, error) {
provider := GetProvider(id)
if provider.Category != "SAML" {
return "", "", fmt.Errorf("Provider %s's category is not SAML", provider.Name)
}
sp, err := buildSp(provider, "")
if err != nil {
return "", "", err
}
auth := ""
method := ""
if provider.EnableSignAuthnRequest {
post, err := sp.BuildAuthBodyPost(relayState)
if err != nil {
return "", "", err
}
auth = string(post[:])
method = "POST"
} else {
auth, err = sp.BuildAuthURL(relayState)
if err != nil {
return "", "", err
}
method = "GET"
}
return auth, method, nil
}
func buildSp(provider *Provider, samlResponse string) (*saml2.SAMLServiceProvider, error) {
certStore := dsig.MemoryX509CertificateStore{
Roots: []*x509.Certificate{},
}
origin := conf.GetConfigString("origin")
certEncodedData := ""
if samlResponse != "" {
certEncodedData = parseSamlResponse(samlResponse, provider.Type)
} else if provider.IdP != "" {
certEncodedData = provider.IdP
}
certData, err := base64.StdEncoding.DecodeString(certEncodedData)
if err != nil |
idpCert, err := x509.ParseCertificate(certData)
if err != nil {
return nil, err
}
certStore.Roots = append(certStore.Roots, idpCert)
sp := &saml2.SAMLServiceProvider{
ServiceProviderIssuer: fmt.Sprintf("%s/api/acs", origin),
AssertionConsumerServiceURL: fmt.Sprintf("%s/api/acs", origin),
IDPCertificateStore: &certStore,
SignAuthnRequests: false,
SPKeyStore: dsig.RandomKeyStoreForTest(),
}
if provider.Endpoint != "" {
sp.IdentityProviderSSOURL = provider.Endpoint
sp.IdentityProviderIssuer = provider.IssuerUrl
}
if provider.EnableSignAuthnRequest {
sp.SignAuthnRequests = true
sp.SPKeyStore = buildSpKeyStore()
}
return sp, nil
}
func parseSamlResponse(samlResponse string, providerType string) string {
de, err := base64.StdEncoding.DecodeString(samlResponse)
if err != nil {
panic(err)
}
deStr := strings.Replace(string(de), "\n", "", -1)
tagMap := map[string]string{
"Aliyun IDaaS": "ds",
"Keycloak": "dsig",
}
tag := tagMap[providerType]
expression := fmt.Sprintf("<%s:X509Certificate>([\\s\\S]*?)</%s:X509Certificate>", tag, tag)
res := regexp.MustCompile(expression).FindStringSubmatch(deStr)
return res[1]
}
func buildSpKeyStore() dsig.X509KeyStore {
keyPair, err := tls.LoadX509KeyPair("object/token_jwt_key.pem", "object/token_jwt_key.key")
if err != nil {
panic(err)
}
return &dsig.TLSCertKeyStore{
PrivateKey: keyPair.PrivateKey,
Certificate: keyPair.Certificate,
}
}
| {
return nil, err
} |
scheduledTasks.py | from apscheduler.schedulers.background import BackgroundScheduler
from flask import Flask, request
import firebase_admin
from firebase_admin import credentials
from firebase_admin import firestore
import os
import os.path as path
import json
import csv
from scraper import scrape
from geocoder import get_lat_long
from optimalCenter import optimal_center_formula
#county.csv
#county_coords.csv
firebase_credentials_path = path.abspath(path.join(__file__ ,"../../.."))
firebase_credentials_path += "/mental-health-redistribution-firebase-adminsdk-j3xlw-a8e9757a35.json"
cred = credentials.Certificate(firebase_credentials_path)
firebase_admin.initialize_app(cred)
cred = credentials.ApplicationDefault()
db = firestore.client()
county_dictionary = {}
with open('county.csv', mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
for row in csv_reader:
county_dictionary[row["Location"]] = row
county_coords = {}
with open('county_coords.csv', mode='r') as csv_file:
csv_reader = csv.DictReader(csv_file)
for row in csv_reader:
identifier = row["COUNAME"] + " County, " + row["STNAME"]
county_coords[identifier] = {}
county_coords[identifier]["lat"] = row["LATITUDE"]
county_coords[identifier]["lon"] = row["LONGITUDE"]
#county_dictionary[row[""]] = row
def store_scraped_in_google(address, name, lat, lon):
doc_ref = db.collection(u'potentialLocations').document(name)
doc_ref.set({
u'Address': address,
u'lat': lat,
u'lon': lon
})
def scraper():
#delete_all_potential_locations()
loopnetListings = scrape()
j = 0
name_set = {}
for index, row in loopnetListings.iterrows():
address_map = {}
address = row[0]
address_map['address'] = address
components = address.split(",")
try:
address_map['street'] = components[0]
address_map['city'] = components[1]
address_map['state'] = components[2]
except:
print("Exception: invalid format of address")
continue
name = row[1]
if name_set.get(name) == None:
name_set[name] = 1
else:
name = name + " " + str(name_set.get(name))
lat, lon = get_lat_long(address)
try:
store_scraped_in_google(address_map, name, lat, lon)
except:
print("Exception: Could not store in Google")
scheduler = BackgroundScheduler(daemon=True)
scheduler.add_job(scraper,'interval',minutes=1440)
#scheduler.add_job(scraper,'interval',minutes=5)
scheduler.start()
#http://127.0.0.1:5000/get_optimal_centers_list
app = Flask(__name__)
@app.route("/potential_mental_health_centers")
def potential_mental_health_centers():
collection = db.collection(u'potentialLocations').where(u'lat', u'!=', 0).stream()
response = []
for doc in collection:
response.append(doc.to_dict())
return json.dumps(response)
@app.route("/current_mental_health_centers")
def current_mental_health_centers():
collection = db.collection(u'currentLocations').where(u'lat', u'!=', 0).stream()
response = []
for doc in collection:
response.append(doc.to_dict())
return json.dumps(response)
@app.route("/county_info")
def | ():
return json.dumps(county_dictionary[request.args.get('county')])
@app.route("/optimal_centers")
def optimal_centers():
county_list = request.args.getlist('counties')
response = {}
potential_locations = db.collection(u'potentialLocations').where(u'lat', u'!=', 0).stream()
for doc in potential_locations:
potential_lat = float(doc.to_dict()["lat"])
potential_lon = float(doc.to_dict()["lon"])
score = 0
for county in county_list:
county_lat = float(county_coords[county]["lat"])
county_lon = float(county_coords[county]["lon"])
county_classification = float(county_dictionary[county]["Mental Health Need Classification"])
score += optimal_center_formula(county_lat, county_lon, potential_lat, potential_lon, county_classification)
score = score/len(county_list)
response[str(doc.id)] = {}
response[str(doc.id)]["details"] = doc.to_dict()
response[str(doc.id)]["score"] = score
return json.dumps(response)
if __name__ == "__main__":
app.run()
| county_info |
user.py | """
Define the User model
"""
from . import db
from .abc import BaseModel, MetaBaseModel
| __tablename__ = "user"
key = db.Column(db.Integer, primary_key=True)
first_name = db.Column(db.String(300), primary_key=False)
last_name = db.Column(db.String(300), primary_key=False)
age = db.Column(db.Integer, nullable=True)
def __init__(self, key, first_name, last_name, age=None):
""" Create a new User """
self.key = key
self.first_name = first_name
self.last_name = last_name
self.age = age | class User(db.Model, BaseModel, metaclass=MetaBaseModel):
""" The User model """
|
link.rs | use std::path::Path;
pub fn | (target_os: &str, target_triple: &str, out_dir: &Path) {
println!(
"cargo:rustc-link-search=native={}",
out_dir.join("build").display()
);
println!(
"cargo:rustc-link-search=native={}",
out_dir.join("build").join("Release").display()
);
println!(
"cargo:rustc-link-search=native={}",
out_dir.join("lib").display()
);
println!(
"cargo:rustc-link-search=native={}",
out_dir.join("lib64").display()
);
println!(
"cargo:rustc-link-search=native={}",
out_dir.join("lib").join("Release").display()
);
println!(
"cargo:rustc-link-search=native={}",
out_dir.join("lib64").join("Release").display()
);
if !cfg!(feature = "fltk-shared") {
println!("cargo:rustc-link-lib=static=cfltk");
} else {
println!("cargo:rustc-link-lib=dylib=cfltk");
}
if !cfg!(feature = "fltk-shared") {
println!("cargo:rustc-link-lib=static=fltk");
if !cfg!(features = "no-images") {
println!("cargo:rustc-link-lib=static=fltk_images");
if cfg!(feature = "system-libpng")
|| (!target_triple.contains("apple")
&& !target_triple.contains("windows")
&& !target_triple.contains("android"))
{
println!("cargo:rustc-link-lib=dylib=png");
} else {
println!("cargo:rustc-link-lib=static=fltk_png");
}
if cfg!(feature = "system-libjpeg") {
println!("cargo:rustc-link-lib=dylib=jpeg");
} else {
println!("cargo:rustc-link-lib=static=fltk_jpeg");
}
if cfg!(feature = "system-zlib") {
println!("cargo:rustc-link-lib=dylib=z");
} else {
println!("cargo:rustc-link-lib=static=fltk_z");
}
}
if cfg!(feature = "enable-glwindow") {
println!("cargo:rustc-link-lib=static=fltk_gl");
match target_os {
"macos" => println!("cargo:rustc-link-lib=framework=OpenGL"),
"windows" => {
println!("cargo:rustc-link-lib=dylib=opengl32");
println!("cargo:rustc-link-lib=dylib=glu32");
}
_ => {
println!("cargo:rustc-link-lib=dylib=GL");
println!("cargo:rustc-link-lib=dylib=GLU");
}
}
}
match target_os {
"macos" => {
println!("cargo:rustc-link-lib=framework=Carbon");
println!("cargo:rustc-link-lib=framework=Cocoa");
println!("cargo:rustc-link-lib=framework=ApplicationServices");
}
"windows" => {
println!("cargo:rustc-link-lib=dylib=gdiplus");
println!("cargo:rustc-link-lib=dylib=ws2_32");
println!("cargo:rustc-link-lib=dylib=comctl32");
println!("cargo:rustc-link-lib=dylib=gdi32");
println!("cargo:rustc-link-lib=dylib=oleaut32");
println!("cargo:rustc-link-lib=dylib=ole32");
println!("cargo:rustc-link-lib=dylib=uuid");
println!("cargo:rustc-link-lib=dylib=shell32");
println!("cargo:rustc-link-lib=dylib=advapi32");
println!("cargo:rustc-link-lib=dylib=comdlg32");
println!("cargo:rustc-link-lib=dylib=winspool");
println!("cargo:rustc-link-lib=dylib=user32");
println!("cargo:rustc-link-lib=dylib=kernel32");
println!("cargo:rustc-link-lib=dylib=odbc32");
}
"android" => {
println!("cargo:rustc-link-lib=log");
println!("cargo:rustc-link-lib=android");
println!("cargo:rustc-link-lib=c++_shared");
}
"ios" => {
// Experimental
println!("cargo:rustc-link-lib=framework=UIKit");
}
_ => {
println!("cargo:rustc-link-lib=dylib=pthread");
println!("cargo:rustc-link-lib=dylib=X11");
println!("cargo:rustc-link-lib=dylib=Xext");
println!("cargo:rustc-link-lib=dylib=Xinerama");
println!("cargo:rustc-link-lib=dylib=Xcursor");
println!("cargo:rustc-link-lib=dylib=Xrender");
println!("cargo:rustc-link-lib=dylib=Xfixes");
println!("cargo:rustc-link-lib=dylib=Xft");
println!("cargo:rustc-link-lib=dylib=fontconfig");
if !cfg!(feature = "no-pango") {
println!("cargo:rustc-link-lib=dylib=pango-1.0");
println!("cargo:rustc-link-lib=dylib=pangoxft-1.0");
println!("cargo:rustc-link-lib=dylib=gobject-2.0");
println!("cargo:rustc-link-lib=dylib=cairo");
println!("cargo:rustc-link-lib=dylib=pangocairo-1.0");
}
}
}
}
}
| link |
cloud_job_schedule.py | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class CloudJobSchedule(Model):
"""A job schedule that allows recurring jobs by specifying when to run jobs
and a specification used to create each job.
:param id: A string that uniquely identifies the schedule within the
account.
:type id: str
:param display_name: The display name for the schedule.
:type display_name: str
:param url: The URL of the job schedule.
:type url: str
:param e_tag: The ETag of the job schedule. This is an opaque string. You
can use it to detect whether the job schedule has changed between
requests. In particular, you can be pass the ETag with an Update Job
Schedule request to specify that your changes should take effect only if
nobody else has modified the schedule in the meantime.
:type e_tag: str
:param last_modified: The last modified time of the job schedule. This is
the last time at which the schedule level data, such as the job
specification or recurrence information, changed. It does not factor in
job-level changes such as new jobs being created or jobs changing state.
:type last_modified: datetime
:param creation_time: The creation time of the job schedule.
:type creation_time: datetime
:param state: The current state of the job schedule. Possible values
include: 'active', 'completed', 'disabled', 'terminating', 'deleting'
:type state: str or ~azure.batch.models.JobScheduleState
:param state_transition_time: The time at which the job schedule entered
the current state.
:type state_transition_time: datetime
:param previous_state: The previous state of the job schedule. This
property is not present if the job schedule is in its initial active
state. Possible values include: 'active', 'completed', 'disabled',
'terminating', 'deleting'
:type previous_state: str or ~azure.batch.models.JobScheduleState
:param previous_state_transition_time: The time at which the job schedule
entered its previous state. This property is not present if the job
schedule is in its initial active state.
:type previous_state_transition_time: datetime
:param schedule: The schedule according to which jobs will be created.
:type schedule: ~azure.batch.models.Schedule
:param job_specification: The details of the jobs to be created on this
schedule.
:type job_specification: ~azure.batch.models.JobSpecification
:param execution_info: Information about jobs that have been and will be
run under this schedule. | :type metadata: list[~azure.batch.models.MetadataItem]
:param stats: The lifetime resource usage statistics for the job schedule.
The statistics may not be immediately available. The Batch service
performs periodic roll-up of statistics. The typical delay is about 30
minutes.
:type stats: ~azure.batch.models.JobScheduleStatistics
"""
_attribute_map = {
'id': {'key': 'id', 'type': 'str'},
'display_name': {'key': 'displayName', 'type': 'str'},
'url': {'key': 'url', 'type': 'str'},
'e_tag': {'key': 'eTag', 'type': 'str'},
'last_modified': {'key': 'lastModified', 'type': 'iso-8601'},
'creation_time': {'key': 'creationTime', 'type': 'iso-8601'},
'state': {'key': 'state', 'type': 'JobScheduleState'},
'state_transition_time': {'key': 'stateTransitionTime', 'type': 'iso-8601'},
'previous_state': {'key': 'previousState', 'type': 'JobScheduleState'},
'previous_state_transition_time': {'key': 'previousStateTransitionTime', 'type': 'iso-8601'},
'schedule': {'key': 'schedule', 'type': 'Schedule'},
'job_specification': {'key': 'jobSpecification', 'type': 'JobSpecification'},
'execution_info': {'key': 'executionInfo', 'type': 'JobScheduleExecutionInformation'},
'metadata': {'key': 'metadata', 'type': '[MetadataItem]'},
'stats': {'key': 'stats', 'type': 'JobScheduleStatistics'},
}
def __init__(self, id=None, display_name=None, url=None, e_tag=None, last_modified=None, creation_time=None, state=None, state_transition_time=None, previous_state=None, previous_state_transition_time=None, schedule=None, job_specification=None, execution_info=None, metadata=None, stats=None):
super(CloudJobSchedule, self).__init__()
self.id = id
self.display_name = display_name
self.url = url
self.e_tag = e_tag
self.last_modified = last_modified
self.creation_time = creation_time
self.state = state
self.state_transition_time = state_transition_time
self.previous_state = previous_state
self.previous_state_transition_time = previous_state_transition_time
self.schedule = schedule
self.job_specification = job_specification
self.execution_info = execution_info
self.metadata = metadata
self.stats = stats | :type execution_info: ~azure.batch.models.JobScheduleExecutionInformation
:param metadata: A list of name-value pairs associated with the schedule
as metadata. The Batch service does not assign any meaning to metadata; it
is solely for the use of user code. |
media.go | package nicocas
import (
"fmt"
"io/ioutil"
"net/http"
"time"
)
type media struct {
seqNo uint64
duration float64
position float64 // 現在の再生時刻
bandwidth int64
size int64
data []byte
}
type mediaReadError struct {
error
}
type mediaChunkOption struct {
seqNo uint64
duration float64
position float64
bandwidth int64
}
func cbMediaChunk(r | ponse, err error, this, opt interface{}, queuedAt, startedAt time.Time) {
w := this.(*NicoCasWork)
chunkOpt := opt.(mediaChunkOption)
var ok bool
var is404 bool
defer func() {
if ok {
w.mediaStatus.Store(chunkOpt.seqNo, true)
} else if is404 {
w.processingMedia.Delete(chunkOpt.seqNo)
w.mediaStatus.Store(chunkOpt.seqNo, true)
} else {
w.processingMedia.Delete(chunkOpt.seqNo)
w.mediaStatus.Delete(chunkOpt.seqNo)
}
}()
if err != nil {
w.chError <- mediaReadError{err}
return
}
defer res.Body.Close()
switch res.StatusCode {
case 200:
default:
if res.StatusCode == 404 {
is404 = true
}
w.chError <- mediaReadError{fmt.Errorf("StatusCode is %v: %v", res.StatusCode, res.Request.URL)}
return
}
if res.ContentLength < 10*1024*1024 {
bs, err := ioutil.ReadAll(res.Body)
if err != nil {
w.chError <- mediaReadError{err}
return
}
if res.ContentLength == int64(len(bs)) {
w.chMedia <- media{
seqNo: chunkOpt.seqNo,
duration: chunkOpt.duration,
position: chunkOpt.position,
bandwidth: chunkOpt.bandwidth,
size: int64(len(bs)),
data: bs,
}
ok = true
} else {
w.chError <- mediaReadError{fmt.Errorf("read error: %v != %v", res.ContentLength, len(bs))}
}
} else {
w.chError <- mediaReadError{fmt.Errorf("[FIXME] too large: %v", res.ContentLength)}
}
}
func (w *NicoCasWork) saveMedia(seqNo uint64, position, duration float64, bandwidth, size int64, data []byte) error {
return w.db.InsertMedia(seqNo, position, duration, bandwidth, size, data)
}
// チャンネルからシーケンスを受け取ってDBに入れていく
func (w *NicoCasWork) mediaLoop() {
// this is guard
w.mtxMediaLoop.Lock()
defer w.mtxMediaLoop.Unlock()
defer func() {
fmt.Printf("Closing mediaLoop\n")
select {
case w.mediaLoopClosed <- true:
case <-time.After(10 * time.Second):
fmt.Println("[FIXME] Closing mediaLoop")
}
}()
for {
select {
case media := <-w.chMedia:
fmt.Printf("inserting DB %v %v %v %v %v\n", media.seqNo, media.duration, media.position, media.size, media.bandwidth)
err := w.saveMedia(media.seqNo, media.position, media.duration, media.bandwidth, media.size, media.data)
w.processingMedia.Delete(media.seqNo)
if err != nil {
fmt.Println(err)
return
}
case <-w.closeMediaLoop:
return
case <-w.closed:
return
}
}
}
| es *http.Res |
ysharp_lexer.py | from sly import Lexer
class | (Lexer):
tokens = {
ID,
FLOAT,
INT,
FUNC,
CLASS,
STRING,
EQ_GREATER,
EQ_LESS,
EQEQ,
PYTHON_CODE,
COLON_COLON,
IF,
ELSE,
TRUE,
FALSE,
NOT_EQEQ,
WHILE,
BREAK,
FOR,
IN,
DEL,
RETURN,
NULL,
EQ_ADD,
EQ_SUB,
EQ_MUL,
EQ_DIV,
EQ_MOD,
IMPORT,
LIMPORT,
SANDBOX,
}
literals = {
"+",
"-",
"*",
"/",
"%",
"|",
"&",
"!",
">",
"<",
"=",
"(",
")",
"{",
"}",
";",
",",
":",
"[",
"]",
"\\",
}
ignore = " \t"
ignore_comment_slash = r"//.*"
FLOAT = r"\d*\.\d+"
INT = r"\d+"
PYTHON_CODE = r"`[.\W\w]*?`"
STRING = r"(\".*?(?<!\\)(\\\\)*\"|'.*?(?<!\\)(\\\\)*')"
ID = r"(--[a-zA-Z_]([a-zA-Z0-9_]|!)*--|[a-zA-Z_]([a-zA-Z0-9_]|!)*)"
ID["func"] = FUNC
ID["class"] = CLASS
ID["break"] = BREAK
ID["true"] = TRUE
ID["false"] = FALSE
ID["while"] = WHILE
ID["for"] = FOR
ID["in"] = IN
ID["if"] = IF
ID["else"] = ELSE
ID["del"] = DEL
ID["null"] = NULL
ID["return"] = RETURN
ID["import"] = IMPORT
ID["limport"] = LIMPORT
ID["sandbox"] = SANDBOX
COLON_COLON = r"::"
EQEQ = r"=="
NOT_EQEQ = r"!="
EQ_GREATER = r"=>"
EQ_LESS = r"=<"
EQ_ADD = r"\+="
EQ_SUB = r"-="
EQ_MUL = r"\*="
EQ_DIV = r"/="
EQ_MOD = r"%="
@_(r"\n+")
def ignore_newline(self, t):
self.lineno += len(t.value)
| YsharpLexer |
jobs.rs | use std::sync::Arc;
use chrono::Utc;
use serenity::model::id::UserId;
use serenity::prelude::*;
pub async fn log_system_load(ctx: Arc<Context>) {
let cpu_load = sys_info::loadavg().unwrap();
let mem_use = sys_info::mem_info().unwrap();
if let Err(why) = UserId(209721904662183937)
.create_dm_channel(&*ctx)
.await
.expect("expected opened dm channel with sudomann")
.send_message(&ctx, |m| {
m.embed(|e| {
e.title("System Resource Load");
e.field(
"CPU Load Average",
format!("{:.2}%", cpu_load.one * 10.0),
false,
);
e.field(
"Memory Usage",
format!(
"{:.2} MB Free out of {:.2} MB",
mem_use.free as f32 / 1000.0,
mem_use.total as f32 / 1000.0
),
false,
);
e
})
})
.await | {
eprintln!("Error sending message: {:?}", why);
};
}
/// Remove players from pug if they joined over 6 hours ago
pub async fn clear_out_stale_joins(_ctx: Arc<Context>) {
let current_time = Utc::now();
let _formatted_time = current_time.to_rfc2822();
// _ctx.set_activity(Activity::playing(&_formatted_time)).await;
} | |
tls.py | #!/usr/bin/env python
# Author: Trevor Perrin
# See the LICENSE file for legal information regarding use of this file.
import sys
import os
import os.path
import socket
import thread
import time
import getopt
import httplib
from SocketServer import *
from BaseHTTPServer import *
from SimpleHTTPServer import *
if __name__ != "__main__":
raise "This must be run as a command, not used as a module!"
from tlslite.api import *
from tlslite import __version__
try:
from TACKpy import TACK, TACK_Break_Sig, writeTextTACKStructures
except ImportError:
pass
def printUsage(s=None):
if m2cryptoLoaded:
crypto = "M2Crypto/OpenSSL"
elif pycryptoLoaded:
crypto = "Pycrypto"
else:
crypto = "Pure-python crypto"
if s:
print("ERROR: %s" % s)
print ""
print "Version: %s" % __version__
print ""
print "RNG: %s" % prngName
print ""
print "Modules:"
if m2cryptoLoaded:
print " M2Crypto : Loaded"
else:
print " M2Crypto : Not Loaded"
if pycryptoLoaded:
print " pycrypto : Loaded"
else:
print " pycrypto : Not Loaded"
if gmpyLoaded:
print " GMPY : Loaded"
else:
print " GMPY : Not Loaded"
print ""
print """Commands:
server
[-k KEY] [-c CERT] [-t TACK] [-b BREAKSIGS] [-v VERIFIERDB] [-d DIR]
[--reqcert] HOST:PORT
client
[-k KEY] [-c CERT] [-u USER] [-p PASS]
HOST:PORT
"""
sys.exit(-1)
def | (s):
"""Print error message and exit"""
sys.stderr.write("ERROR: %s\n" % s)
sys.exit(-1)
def handleArgs(argv, argString, flagsList=[]):
# Convert to getopt argstring format:
# Add ":" after each arg, ie "abc" -> "a:b:c:"
getOptArgString = ":".join(argString) + ":"
try:
opts, argv = getopt.getopt(argv, getOptArgString, flagsList)
except getopt.GetoptError as e:
printError(e)
# Default values if arg not present
privateKey = None
certChain = None
username = None
password = None
tack = None
breakSigs = None
verifierDB = None
reqCert = False
directory = None
for opt, arg in opts:
if opt == "-k":
s = open(arg, "rb").read()
privateKey = parsePEMKey(s, private=True)
elif opt == "-c":
s = open(arg, "rb").read()
x509 = X509()
x509.parse(s)
certChain = X509CertChain([x509])
elif opt == "-u":
username = arg
elif opt == "-p":
password = arg
elif opt == "-t":
if tackpyLoaded:
s = open(arg, "rU").read()
tack = TACK()
tack.parsePem(s)
elif opt == "-b":
if tackpyLoaded:
s = open(arg, "rU").read()
breakSigs = TACK_Break_Sig.parsePemList(s)
elif opt == "-v":
verifierDB = VerifierDB(arg)
verifierDB.open()
elif opt == "-d":
directory = arg
elif opt == "--reqcert":
reqCert = True
else:
assert(False)
if not argv:
printError("Missing address")
if len(argv) > 1:
printError("Too many arguments")
#Split address into hostname/port tuple
address = argv[0]
address = address.split(":")
if len(address) != 2:
raise SyntaxError("Must specify <host>:<port>")
address = (address[0], int(address[1]))
# Populate the return list
retList = [address]
if "k" in argString:
retList.append(privateKey)
if "c" in argString:
retList.append(certChain)
if "u" in argString:
retList.append(username)
if "p" in argString:
retList.append(password)
if "t" in argString:
retList.append(tack)
if "b" in argString:
retList.append(breakSigs)
if "v" in argString:
retList.append(verifierDB)
if "d" in argString:
retList.append(directory)
if "reqcert" in flagsList:
retList.append(reqCert)
return retList
def printGoodConnection(connection, seconds):
print " Handshake time: %.3f seconds" % seconds
print " Version: %s" % connection.getVersionName()
print(" Cipher: %s %s" % (connection.getCipherName(),
connection.getCipherImplementation()))
if connection.session.srpUsername:
print(" Client SRP username: %s" % connection.session.srpUsername)
if connection.session.clientCertChain:
print(" Client X.509 SHA1 fingerprint: %s" %
connection.session.clientCertChain.getFingerprint())
if connection.session.serverCertChain:
print(" Server X.509 SHA1 fingerprint: %s" %
connection.session.serverCertChain.getFingerprint())
if connection.session.tackExt:
if connection.session.tackExt.isEmpty():
emptyStr = "<empty TLS Extension>"
else:
if connection.session.tackInHelloExt:
emptyStr = "\n(via TLS Extension)"
else:
emptyStr = "\n(via TACK Certificate)"
print(" TACK: %s" % emptyStr)
print(writeTextTACKStructures(connection.session.tackExt.tack,
connection.session.tackExt.break_sigs))
def clientCmd(argv):
(address, privateKey, certChain, username, password) = \
handleArgs(argv, "kcup")
if (certChain and not privateKey) or (not certChain and privateKey):
raise SyntaxError("Must specify CERT and KEY together")
if (username and not password) or (not username and password):
raise SyntaxError("Must specify USER with PASS")
if certChain and username:
raise SyntaxError("Can use SRP or client cert for auth, not both")
#Connect to server
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.settimeout(5)
sock.connect(address)
connection = TLSConnection(sock)
try:
start = time.clock()
if username and password:
connection.handshakeClientSRP(username, password, reqTack=tackpyLoaded)
else:
connection.handshakeClientCert(certChain, privateKey, reqTack=tackpyLoaded)
stop = time.clock()
print "Handshake success"
except TLSLocalAlert, a:
if a.description == AlertDescription.user_canceled:
print str(a)
else:
raise
sys.exit(-1)
except TLSRemoteAlert, a:
if a.description == AlertDescription.unknown_psk_identity:
if username:
print "Unknown username"
else:
raise
elif a.description == AlertDescription.bad_record_mac:
if username:
print "Bad username or password"
else:
raise
elif a.description == AlertDescription.handshake_failure:
print "Unable to negotiate mutually acceptable parameters"
else:
raise
sys.exit(-1)
printGoodConnection(connection, stop - start)
connection.close()
def serverCmd(argv):
(address, privateKey, certChain, tack, breakSigs,
verifierDB, directory, reqCert) = handleArgs(argv, "kctbvd", ["reqcert"])
if (certChain and not privateKey) or (not certChain and privateKey):
raise SyntaxError("Must specify CERT and KEY together")
if tack and not certChain:
raise SyntaxError("Must specify CERT with TACK")
print("I am an HTTPS test server, I will listen on %s:%d" %
(address[0], address[1]))
if directory:
os.chdir(directory)
print("Serving files from %s" % os.getcwd())
if certChain and privateKey:
print("Using certificate and private key...")
if verifierDB:
print("Using verifier DB...")
if tack:
print("Using TACK...")
if breakSigs:
print("Using TACK Break Sigs...")
#############
sessionCache = SessionCache()
class MyHTTPServer(ThreadingMixIn, TLSSocketServerMixIn, HTTPServer):
def handshake(self, connection):
print "About to handshake..."
try:
start = time.clock()
connection.handshakeServer(certChain=certChain,
privateKey=privateKey,
verifierDB=verifierDB,
tack=tack,
breakSigs=breakSigs,
sessionCache=sessionCache)
stop = time.clock()
except TLSRemoteAlert as a:
if a.description == AlertDescription.user_canceled:
print str(a)
return False
else:
raise
except TLSLocalAlert as a:
if a.description == AlertDescription.unknown_psk_identity:
if username:
print "Unknown username"
return False
else:
raise
elif a.description == AlertDescription.bad_record_mac:
if username:
print "Bad username or password"
return False
else:
raise
elif a.description == AlertDescription.handshake_failure:
print "Unable to negotiate mutually acceptable parameters"
return False
else:
raise
connection.ignoreAbruptClose = True
printGoodConnection(connection, stop - start)
return True
httpd = MyHTTPServer(address, SimpleHTTPRequestHandler)
httpd.serve_forever()
if __name__ == '__main__':
if len(sys.argv) < 2:
printUsage("Missing command")
elif sys.argv[1] == "client"[:len(sys.argv[1])]:
clientCmd(sys.argv[2:])
elif sys.argv[1] == "server"[:len(sys.argv[1])]:
serverCmd(sys.argv[2:])
else:
printUsage("Unknown command: %s" % sys.argv[1])
| printError |
config.go | package tls
import (
"crypto/hmac"
"crypto/tls"
"crypto/x509"
"encoding/base64"
"strings"
"sync"
"time"
"github.com/v2fly/v2ray-core/v5/common/net"
"github.com/v2fly/v2ray-core/v5/common/protocol/tls/cert"
"github.com/v2fly/v2ray-core/v5/transport/internet"
)
var globalSessionCache = tls.NewLRUClientSessionCache(128)
const exp8357 = "experiment:8357"
// ParseCertificate converts a cert.Certificate to Certificate.
func ParseCertificate(c *cert.Certificate) *Certificate {
if c != nil {
certPEM, keyPEM := c.ToPEM()
return &Certificate{
Certificate: certPEM,
Key: keyPEM,
}
}
return nil
}
func (c *Config) loadSelfCertPool() (*x509.CertPool, error) {
root := x509.NewCertPool()
for _, cert := range c.Certificate {
if !root.AppendCertsFromPEM(cert.Certificate) {
return nil, newError("failed to append cert").AtWarning()
}
}
return root, nil
}
// BuildCertificates builds a list of TLS certificates from proto definition.
func (c *Config) BuildCertificates() []tls.Certificate {
certs := make([]tls.Certificate, 0, len(c.Certificate))
for _, entry := range c.Certificate {
if entry.Usage != Certificate_ENCIPHERMENT {
continue
}
keyPair, err := tls.X509KeyPair(entry.Certificate, entry.Key)
if err != nil {
newError("ignoring invalid X509 key pair").Base(err).AtWarning().WriteToLog()
continue
}
certs = append(certs, keyPair)
}
return certs
}
func isCertificateExpired(c *tls.Certificate) bool {
if c.Leaf == nil && len(c.Certificate) > 0 {
if pc, err := x509.ParseCertificate(c.Certificate[0]); err == nil {
c.Leaf = pc
}
}
// If leaf is not there, the certificate is probably not used yet. We trust user to provide a valid certificate.
return c.Leaf != nil && c.Leaf.NotAfter.Before(time.Now().Add(time.Minute*2))
}
func issueCertificate(rawCA *Certificate, domain string) (*tls.Certificate, error) {
parent, err := cert.ParseCertificate(rawCA.Certificate, rawCA.Key)
if err != nil {
return nil, newError("failed to parse raw certificate").Base(err)
}
newCert, err := cert.Generate(parent, cert.CommonName(domain), cert.DNSNames(domain))
if err != nil {
return nil, newError("failed to generate new certificate for ", domain).Base(err)
}
newCertPEM, newKeyPEM := newCert.ToPEM()
cert, err := tls.X509KeyPair(newCertPEM, newKeyPEM)
return &cert, err
}
func (c *Config) getCustomCA() []*Certificate {
certs := make([]*Certificate, 0, len(c.Certificate))
for _, certificate := range c.Certificate {
if certificate.Usage == Certificate_AUTHORITY_ISSUE {
certs = append(certs, certificate)
}
}
return certs
}
func getGetCertificateFunc(c *tls.Config, ca []*Certificate) func(hello *tls.ClientHelloInfo) (*tls.Certificate, error) {
var access sync.RWMutex
return func(hello *tls.ClientHelloInfo) (*tls.Certificate, error) {
domain := hello.ServerName
certExpired := false
access.RLock()
certificate, found := c.NameToCertificate[domain]
access.RUnlock()
if found {
if !isCertificateExpired(certificate) {
return certificate, nil
}
certExpired = true
}
if certExpired {
newCerts := make([]tls.Certificate, 0, len(c.Certificates))
access.Lock()
for _, certificate := range c.Certificates {
cert := certificate
if !isCertificateExpired(&cert) {
newCerts = append(newCerts, cert)
} else if cert.Leaf != nil {
expTime := cert.Leaf.NotAfter.Format(time.RFC3339)
newError("old certificate for ", domain, " (expire on ", expTime, ") discard").AtInfo().WriteToLog()
}
}
c.Certificates = newCerts
access.Unlock()
}
var issuedCertificate *tls.Certificate
// Create a new certificate from existing CA if possible
for _, rawCert := range ca {
if rawCert.Usage == Certificate_AUTHORITY_ISSUE {
newCert, err := issueCertificate(rawCert, domain)
if err != nil {
newError("failed to issue new certificate for ", domain).Base(err).WriteToLog()
continue
}
parsed, err := x509.ParseCertificate(newCert.Certificate[0])
if err == nil {
newCert.Leaf = parsed
expTime := parsed.NotAfter.Format(time.RFC3339)
newError("new certificate for ", domain, " (expire on ", expTime, ") issued").AtInfo().WriteToLog()
} else {
newError("failed to parse new certificate for ", domain).Base(err).WriteToLog()
}
access.Lock()
c.Certificates = append(c.Certificates, *newCert)
issuedCertificate = &c.Certificates[len(c.Certificates)-1]
access.Unlock()
break
}
}
if issuedCertificate == nil {
return nil, newError("failed to create a new certificate for ", domain)
}
access.Lock()
c.BuildNameToCertificate()
access.Unlock()
return issuedCertificate, nil
}
}
func (c *Config) IsExperiment8357() bool {
return strings.HasPrefix(c.ServerName, exp8357)
}
func (c *Config) parseServerName() string {
if c.IsExperiment8357() {
return c.ServerName[len(exp8357):]
}
return c.ServerName
}
func (c *Config) verifyPeerCert(rawCerts [][]byte, verifiedChains [][]*x509.Certificate) error {
if c.PinnedPeerCertificateChainSha256 != nil {
hashValue := GenerateCertChainHash(rawCerts)
for _, v := range c.PinnedPeerCertificateChainSha256 {
if hmac.Equal(hashValue, v) {
return nil
}
}
return newError("peer cert is unrecognized: ", base64.StdEncoding.EncodeToString(hashValue))
}
return nil
}
// GetTLSConfig converts this Config into tls.Config.
func (c *Config) GetTLSConfig(opts ...Option) *tls.Config {
root, err := c.getCertPool()
if err != nil {
newError("failed to load system root certificate").AtError().Base(err).WriteToLog()
}
if c == nil {
return &tls.Config{
ClientSessionCache: globalSessionCache,
RootCAs: root,
InsecureSkipVerify: false,
NextProtos: nil,
SessionTicketsDisabled: true,
}
}
config := &tls.Config{
ClientSessionCache: globalSessionCache,
RootCAs: root,
InsecureSkipVerify: c.AllowInsecure,
NextProtos: c.NextProtocol,
SessionTicketsDisabled: !c.EnableSessionResumption,
VerifyPeerCertificate: c.verifyPeerCert,
}
for _, opt := range opts {
opt(config)
}
config.Certificates = c.BuildCertificates()
config.BuildNameToCertificate()
caCerts := c.getCustomCA()
if len(caCerts) > 0 {
config.GetCertificate = getGetCertificateFunc(config, caCerts)
}
if sn := c.parseServerName(); len(sn) > 0 {
config.ServerName = sn
}
if len(config.NextProtos) == 0 {
config.NextProtos = []string{"h2", "http/1.1"}
}
return config
}
// Option for building TLS config.
type Option func(*tls.Config)
// WithDestination sets the server name in TLS config.
func WithDestination(dest net.Destination) Option {
return func(config *tls.Config) {
if dest.Address.Family().IsDomain() && config.ServerName == "" {
config.ServerName = dest.Address.Domain()
}
}
}
// WithNextProto sets the ALPN values in TLS config.
func WithNextProto(protocol ...string) Option {
return func(config *tls.Config) {
if len(config.NextProtos) == 0 {
config.NextProtos = protocol
}
}
}
// ConfigFromStreamSettings fetches Config from stream settings. Nil if not found.
func | (settings *internet.MemoryStreamConfig) *Config {
if settings == nil {
return nil
}
config, ok := settings.SecuritySettings.(*Config)
if !ok {
return nil
}
return config
}
| ConfigFromStreamSettings |
workbooks_workbook_tables_worksheet_charts_axes_categoryAxis_majorGridlines_UpdateFormat.js | /**
* Auto-generated action file for "Microsoft Graph API" API.
*
* Generated at: 2019-08-07T14:53:12.665Z
* Mass generator version: 1.1.0
*
* flowground :- Telekom iPaaS / microsoft-graph-api-connector
* Copyright © 2019, Deutsche Telekom AG
* contact: [email protected]
*
* All files of this connector are licensed under the Apache 2.0 License. For details
* see the file LICENSE on the toplevel directory.
*
*
* Operation: 'workbooks.workbook.tables.worksheet.charts.axes.categoryAxis.majorGridlines.UpdateFormat'
* Endpoint Path: '/workbooks/{driveItem-id}/workbook/tables/{workbookTable-id}/worksheet/charts/{workbookChart-id}/axes/categoryAxis/majorGridlines/format'
* Method: 'patch'
*
*/
const Swagger = require('swagger-client');
const processWrapper = require('../services/process-wrapper');
const spec = require('../spec.json');
// this wrapers offers a simplified emitData(data) function
module.exports.process = processWrapper(processAction);
// parameter names for this call
const PARAMETERS = [
"driveItem-id",
"workbookTable-id",
"workbookChart-id"
];
// mappings from connector field names to API field names
const FIELD_MAP = {
"driveItem_id": "driveItem-id",
"workbookTable_id": "workbookTable-id",
"workbookChart_id": "workbookChart-id",
"requestBody": "requestBody"
};
function p | msg, cfg) {
var isVerbose = process.env.debug || cfg.verbose;
if (isVerbose) {
console.log(`---MSG: ${JSON.stringify(msg)}`);
console.log(`---CFG: ${JSON.stringify(cfg)}`);
console.log(`---ENV: ${JSON.stringify(process.env)}`);
}
const contentType = 'application/json';
const body = msg.body;
mapFieldNames(body);
let parameters = {};
for(let param of PARAMETERS) {
parameters[param] = body[param];
}
// credentials for this operation
let securities = {};
let callParams = {
spec: spec,
operationId: 'workbooks.workbook.tables.worksheet.charts.axes.categoryAxis.majorGridlines.UpdateFormat',
pathName: '/workbooks/{driveItem-id}/workbook/tables/{workbookTable-id}/worksheet/charts/{workbookChart-id}/axes/categoryAxis/majorGridlines/format',
method: 'patch',
parameters: parameters,
requestContentType: contentType,
requestBody: body.requestBody,
securities: {authorized: securities},
server: spec.servers[cfg.server] || cfg.otherServer,
};
if (isVerbose) {
let out = Object.assign({}, callParams);
out.spec = '[omitted]';
console.log(`--SWAGGER CALL: ${JSON.stringify(out)}`);
}
// Call operation via Swagger client
return Swagger.execute(callParams).then(data => {
// emit a single message with data
this.emitData(data);
// if the response contains an array of entities, you can emit them one by one:
// data.obj.someItems.forEach((item) => {
// this.emitData(item);
// }
});
}
function mapFieldNames(obj) {
if(Array.isArray(obj)) {
obj.forEach(mapFieldNames);
}
else if(typeof obj === 'object' && obj) {
Object.keys(obj).forEach(key => {
mapFieldNames(obj[key]);
let goodKey = FIELD_MAP[key];
if(goodKey && goodKey !== key) {
obj[goodKey] = obj[key];
delete obj[key];
}
});
}
} | rocessAction( |
quiz.rs | use db::Database;
use hyper::{body, Body, HeaderMap, Response, StatusCode};
use model::quiz::Submission;
/// Attempts to create a new quiz. Returns the ObjectID of the document.
async fn try_submit_quiz(db: &Database, sub: &Submission) -> Result<[u8; 12], StatusCode> {
let choice_count = sub.quiz.choices.len();
if usize::from(sub.quiz.answer) >= choice_count || !(1..=25).contains(&choice_count) {
return Err(StatusCode::BAD_REQUEST);
}
match db.create_quiz(sub).await {
Ok(oid) => Ok(oid.bytes()),
Err(db::error::Error::AlreadyExists) => Err(StatusCode::FORBIDDEN),
_ => Err(StatusCode::INTERNAL_SERVER_ERROR),
}
}
pub async fn try_respond(body: Body, headers: &HeaderMap, db: &Database) -> Result<Response<Body>, StatusCode> | {
// Retrieve the session from the cookie
let session = super::util::session::extract_session(headers)?;
let oid = db::ObjectId::parse_str(session).map_err(|_| StatusCode::BAD_REQUEST)?;
// Check database if user ID is present
let user = db
.get_session(oid)
.await
.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?
.ok_or(StatusCode::UNAUTHORIZED)?
.as_user()
.ok_or(StatusCode::FORBIDDEN)?;
// Finally parse the JSON form submission
use body::Buf;
use model::quiz::Quiz;
let reader = body::aggregate(body).await.map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?.reader();
let quiz: Quiz = serde_json::from_reader(reader).map_err(|_| StatusCode::INTERNAL_SERVER_ERROR)?;
// Submit the quiz to the database
use alloc::vec::Vec;
let submission = Submission { id: user, quiz };
let oid: Vec<_> = try_submit_quiz(db, &submission).await?.into();
let mut res = Response::new(oid.into());
*res.status_mut() = StatusCode::CREATED;
Ok(res)
} |
|
reader.rs | //! BAM reader and iterators.
mod query;
mod records;
mod unmapped_records;
pub use self::{query::Query, records::Records, unmapped_records::UnmappedRecords};
use std::{
ffi::CStr,
io::{self, Read, Seek},
};
use byteorder::{LittleEndian, ReadBytesExt};
use noodles_bgzf::{self as bgzf, VirtualPosition};
use noodles_core::Region;
use noodles_sam::header::{ReferenceSequence, ReferenceSequences};
use super::{bai, Record, MAGIC_NUMBER};
/// A BAM reader.
///
/// A BAM file is an encoded and compressed version of a SAM file. While a SAM file has a header
/// and a list of records, a BAM is comprised of three parts:
///
/// 1. a SAM header,
/// 2. a list of reference sequences, and
/// 3. a list of encoded SAM records.
///
/// The reader reads records sequentially but can use virtual positions to seek to offsets from the
/// start of a seekable stream.
///
/// # Examples
///
/// ```no_run
/// # use std::{fs::File, io};
/// use noodles_bam as bam;
///
/// let mut reader = File::open("sample.bam").map(bam::Reader::new)?;
/// reader.read_header()?;
/// reader.read_reference_sequences()?;
///
/// for result in reader.records() {
/// let record = result?;
/// println!("{:?}", record);
/// }
///
/// # Ok::<(), io::Error>(())
/// ```
pub struct Reader<R>
where
R: Read,
{
inner: bgzf::Reader<R>,
}
impl<R> Reader<R>
where
R: Read,
{
/// Creates a BAM reader.
///
/// The given reader must be a raw BGZF stream, as the underlying reader wraps it in a decoder.
///
/// # Examples
///
/// ```no_run
/// # use std::{fs::File, io};
/// use noodles_bam as bam;
/// let mut _reader = File::open("sample.bam").map(bam::Reader::new)?;
/// # Ok::<(), io::Error>(())
/// ```
pub fn new(reader: R) -> Self {
Self {
inner: bgzf::Reader::new(reader),
}
}
/// Reads the raw SAM header.
///
/// The BAM magic number is also checked.
///
/// The position of the stream is expected to be at the start.
///
/// This returns the raw SAM header as a [`std::string::String`]. It can subsequently be parsed
/// as a [`noodles_sam::Header`].
///
/// # Examples
///
/// ```no_run
/// # use std::{fs::File, io};
/// use noodles_bam as bam;
/// let mut reader = File::open("sample.bam").map(bam::Reader::new)?;
/// let header = reader.read_header()?;
/// # Ok::<(), io::Error>(())
/// ```
pub fn read_header(&mut self) -> io::Result<String> {
let magic = read_magic(&mut self.inner)?;
if magic != MAGIC_NUMBER {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"invalid BAM header",
));
}
read_header(&mut self.inner)
}
/// Reads the binary reference sequences after the SAM header.
///
/// This is not the same as the `@SQ` records in the SAM header. A BAM has a list of reference
/// sequences containing name and length tuples after the SAM header and before the list of
/// records.
///
/// The position of the stream is expected to be directly after the header.
///
/// This returns a reference sequence dictionary ([`noodles_sam::header::ReferenceSequences`]),
/// which can be used to build a minimal [`noodles_sam::Header`] if the SAM header is empty.
///
/// # Examples
///
/// ```no_run
/// # use std::{fs::File, io};
/// use noodles_bam as bam;
/// let mut reader = File::open("sample.bam").map(bam::Reader::new)?;
/// reader.read_header()?;
/// let reference_sequences = reader.read_reference_sequences()?;
/// # Ok::<(), io::Error>(())
/// ```
pub fn read_reference_sequences(&mut self) -> io::Result<ReferenceSequences> {
let n_ref = self.inner.read_u32::<LittleEndian>()?;
let mut reference_sequences = ReferenceSequences::with_capacity(n_ref as usize);
for _ in 0..n_ref {
let reference_sequence = read_reference_sequence(&mut self.inner)?;
reference_sequences.insert(reference_sequence.name().into(), reference_sequence);
}
Ok(reference_sequences)
}
/// Reads a single record.
///
/// The record block size (`bs`) is read from the underlying stream, and `bs` additional bytes
/// are read into the given record.
///
/// The stream is expected to be directly after the reference sequences or at the start of
/// another record.
///
/// It is more ergonomic to read records using an iterator (see [`Self::records`] and
/// [`Self::query`]), but using this method directly allows the reuse of a single [`Record`]
/// buffer.
///
/// If successful, the record block size is returned. If a block size of 0 is returned, the
/// stream reached EOF.
///
/// # Examples
///
/// ```no_run
/// # use std::{fs::File, io};
/// use noodles_bam as bam;
///
/// let mut reader = File::open("sample.bam").map(bam::Reader::new)?;
/// reader.read_header()?;
/// reader.read_reference_sequences()?;
///
/// let mut record = bam::Record::default();
/// reader.read_record(&mut record)?;
///
/// # Ok::<(), io::Error>(())
/// ```
pub fn read_record(&mut self, record: &mut Record) -> io::Result<usize> {
let block_size = match self.inner.read_u32::<LittleEndian>() {
Ok(bs) => bs as usize,
Err(ref e) if e.kind() == io::ErrorKind::UnexpectedEof => return Ok(0),
Err(e) => return Err(e),
};
record.resize(block_size);
self.inner.read_exact(record)?;
Ok(block_size)
}
/// Returns an iterator over records starting from the current stream position.
///
/// The stream is expected to be directly after the reference sequences or at the start of
/// another record.
///
/// # Examples
///
/// ```no_run
/// # use std::{fs::File, io};
/// use noodles_bam as bam;
///
/// let mut reader = File::open("sample.bam").map(bam::Reader::new)?;
/// reader.read_header()?;
/// reader.read_reference_sequences()?;
///
/// for result in reader.records() {
/// let record = result?;
/// println!("{:?}", record);
/// }
/// # Ok::<(), io::Error>(())
/// ```
pub fn records(&mut self) -> Records<'_, R> {
Records::new(self)
}
/// Returns the current virtual position of the underlying BGZF reader.
///
/// # Examples
///
/// ```
/// # use std::io;
/// use noodles_bam as bam;
///
/// let data = Vec::new();
/// let reader = bam::Reader::new(&data[..]);
/// let virtual_position = reader.virtual_position();
///
/// assert_eq!(virtual_position.compressed(), 0);
/// assert_eq!(virtual_position.uncompressed(), 0);
/// # Ok::<(), io::Error>(())
/// ```
pub fn virtual_position(&self) -> VirtualPosition {
self.inner.virtual_position()
}
}
impl<R> Reader<R>
where
R: Read + Seek,
{
/// Seeks the underlying BGZF reader to the given virtual position.
///
/// Virtual positions typically come from the associated BAM index file.
///
/// # Examples
///
/// ```no_run
/// # use std::{fs::File, io};
/// use noodles_bam as bam;
/// use noodles_bgzf as bgzf;
///
/// let mut reader = File::open("sample.bam").map(bam::Reader::new)?;
///
/// let virtual_position = bgzf::VirtualPosition::from(102334155);
/// reader.seek(virtual_position)?;
/// # Ok::<(), io::Error>(())
/// ```
pub fn seek(&mut self, pos: VirtualPosition) -> io::Result<VirtualPosition> {
self.inner.seek(pos)
}
// Seeks to the first record by setting the cursor to the beginning of the stream and
// (re)reading the header and binary reference sequences.
fn seek_to_first_record(&mut self) -> io::Result<VirtualPosition> {
self.seek(VirtualPosition::default())?;
self.read_header()?;
self.read_reference_sequences()?;
Ok(self.virtual_position())
}
/// Returns an iterator over records that intersect the given region.
///
/// # Examples
///
/// ```no_run
/// # use std::fs::File;
/// use noodles_bam::{self as bam, bai};
/// use noodles_core::Region;
/// use noodles_sam as sam;
///
/// let mut reader = File::open("sample.bam").map(bam::Reader::new)?;
/// let header: sam::Header = reader.read_header()?.parse()?;
///
/// let reference_sequences = header.reference_sequences();
/// let index = bai::read("sample.bam.bai")?;
/// let region = Region::mapped("sq0", 17711, 28657);
/// let query = reader.query(&reference_sequences, &index, ®ion)?;
///
/// for result in query {
/// let record = result?;
/// println!("{:?}", record);
/// }
/// # Ok::<(), Box<dyn std::error::Error>>(())
/// ```
pub fn query(
&mut self,
reference_sequences: &ReferenceSequences,
index: &bai::Index,
region: &Region,
) -> io::Result<Query<'_, R>> {
let (i, start, end) = resolve_region(reference_sequences, region)?;
let index_reference_sequence = index.reference_sequences().get(i).ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!(
"could not find reference in index: {} >= {}",
i,
reference_sequences.len()
),
)
})?;
let query_bins = index_reference_sequence.query(start, end);
let chunks: Vec<_> = query_bins
.iter()
.flat_map(|bin| bin.chunks())
.cloned()
.collect();
let min_offset = index_reference_sequence.min_offset(start);
let merged_chunks = bai::optimize_chunks(&chunks, min_offset);
Ok(Query::new(self, merged_chunks, i, start, end))
}
/// Returns an iterator of unmapped records after querying for the unmapped region.
///
/// # Examples
///
/// ```no_run
/// # use std::{fs::File, io};
/// use noodles_bam::{self as bam, bai};
///
/// let mut reader = File::open("sample.bam").map(bam::Reader::new)?;
/// let index = bai::read("sample.bam.bai")?;
/// let query = reader.query_unmapped(&index)?;
///
/// for result in query {
/// let record = result?;
/// println!("{:?}", record);
/// }
/// # Ok::<(), io::Error>(())
/// ```
pub fn query_unmapped(&mut self, index: &bai::Index) -> io::Result<UnmappedRecords<'_, R>> |
}
fn read_magic<R>(reader: &mut R) -> io::Result<[u8; 4]>
where
R: Read,
{
let mut magic = [0; 4];
reader.read_exact(&mut magic)?;
Ok(magic)
}
fn read_header<R>(reader: &mut R) -> io::Result<String>
where
R: Read,
{
let l_text = reader.read_u32::<LittleEndian>()?;
let mut c_text = vec![0; l_text as usize];
reader.read_exact(&mut c_text)?;
// Headers are not necessarily NUL-terminated.
bytes_with_nul_to_string(&c_text).or_else(|_| {
String::from_utf8(c_text).map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
})
}
fn read_reference_sequence<R>(reader: &mut R) -> io::Result<ReferenceSequence>
where
R: Read,
{
let l_name = reader.read_u32::<LittleEndian>()?;
let mut c_name = vec![0; l_name as usize];
reader.read_exact(&mut c_name)?;
let name = bytes_with_nul_to_string(&c_name)?;
let l_ref = reader.read_u32::<LittleEndian>()?;
Ok(ReferenceSequence::new(name, l_ref as i32))
}
fn bytes_with_nul_to_string(buf: &[u8]) -> io::Result<String> {
CStr::from_bytes_with_nul(buf)
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
.and_then(|c_str| {
c_str
.to_str()
.map(|s| s.to_string())
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))
})
}
fn resolve_region(
reference_sequences: &ReferenceSequences,
region: &Region,
) -> io::Result<(usize, i32, i32)> {
match region {
Region::Mapped { name, start, end } => {
let i = reference_sequences.get_index_of(name).ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidInput,
format!(
"region reference sequence does not exist in reference sequences: {:?}",
region
),
)
})?;
Ok((i, *start, *end))
}
_ => Err(io::Error::new(
io::ErrorKind::InvalidData,
"region is not mapped",
)),
}
}
| {
let last_interval = index
.reference_sequences()
.iter()
.rev()
.find_map(|rs| rs.intervals().last());
if let Some(interval) = last_interval {
self.seek(*interval)?;
} else {
self.seek_to_first_record()?;
}
Ok(UnmappedRecords::new(self))
} |
d3_radialtidytree.js | 'use strict';
function D3_RadialTidyTree(selector, aProps)
{
var svg = d3.select(selector);
if (svg.empty() == true)
{
console.log('D3RadialTidyTree: Could not identify element based on selector: '+selector);
return;
}
svg.style("height","auto");
var nSvgWidth = svg.node().getBoundingClientRect().width;
var nSvgHeight = svg.node().getBoundingClientRect().height;
var aData = aProps['data'];
var strCssClassPrefix = GetStringValue(aProps['cssclassprefix']);
if (strCssClassPrefix == '')
{
strCssClassPrefix = 'd3-radialtidytree-';
}
var aDataHierarchical = d3.hierarchy(aData);
const fRadius = nSvgWidth / 2;
const root = d3.tree()
.size([2 * Math.PI, fRadius])
.separation((a, b) => (a.parent == b.parent ? 1 : 2) / a.depth)
(aDataHierarchical)
svg.attr("class", strCssClassPrefix+'svg');
const link = svg.append("g")
.attr("fill", "none")
.attr("stroke", "#555")
.attr("stroke-opacity", 0.4)
.attr("stroke-width", 1.5)
.selectAll("path")
.data(root.links())
.join("path")
.attr("d", d3.linkRadial()
.angle(d => d.x) |
const node = svg.append("g")
.attr("stroke-linejoin", "round")
.attr("stroke-width", 3)
.selectAll("g")
.data(root.descendants().reverse())
.join("g")
.attr("transform", d => `
rotate(${d.x * 180 / Math.PI - 90})
translate(${d.y},0)
`);
node.append("circle")
//.attr("fill", d => d.children ? "#555" : "#999")
.attr("class", d => d.children ? strCssClassPrefix+'path-end' : strCssClassPrefix+'path-start')
.attr("r", 2.5);
node.append("text")
.attr("dy", "0.31em")
.attr("x", d => d.x < Math.PI === !d.children ? 6 : -6)
.attr("text-anchor", d => d.x < Math.PI === !d.children ? "start" : "end")
.attr("transform", d => d.x >= Math.PI ? "rotate(180)" : null)
.text(d => d.data.label)
.clone(true).lower()
.attr("stroke", "white");
const svgnode = svg.node();
const box = svgnode.getBBox();
svgnode.setAttribute("viewBox", `${box.x} ${box.y} ${box.width} ${box.height}`);
} | .radius(d => d.y)
); |
utils.js | const Debug = require('debug');
const debug = Debug('iexec-server-js-client:utils');
const getAppBinaryFieldName = (_os, _cpu) => { | throw new Error('OS or CPU undefined');
}
const os = _os.toUpperCase();
const cpu = _cpu.toUpperCase();
if (os === 'JAVA') {
return 'javauri';
}
switch (os) {
case 'LINUX':
switch (cpu) {
case 'IX86':
return 'linux_ix86uri';
case 'PPC':
return 'linux_ppcuri';
case 'AMD64':
return 'linux_amd64uri';
case 'X86_64':
return 'linux_x86_64uri';
case 'IA64':
return 'linux_ia64uri';
default:
break;
}
break;
case 'WIN32':
switch (cpu) {
case 'IX86':
return 'win32_ix86uri';
case 'AMD64':
return 'win32_amd64uri';
case 'X86_64':
return 'win32_x86_64uri';
default:
break;
}
break;
case 'MACOSX':
switch (cpu) {
case 'IX86':
return 'macos_ix86uri';
case 'X86_64':
return 'macos_x86_64uri';
case 'PPC':
return 'macos_ppcuri';
default:
break;
}
break;
default:
break;
}
return undefined;
};
const getFieldValue = (obj, field) => {
const [objName] = Object.keys(obj.xwhep);
const fields = Object.keys(obj.xwhep[objName][0]);
if (!fields.includes(field)) throw Error(`getFieldValue() no ${field} in ${objName}`);
return obj.xwhep[objName][0][field][0];
};
const FETCH_INTERVAL = 5000;
const sleep = ms => new Promise(res => setTimeout(res, ms));
const waitFor = async (fn, uid, counter = 0) => {
try {
const work = await fn(uid);
debug('waitFor()', counter, uid, 'status', work.xwhep.work[0].status[0]);
const status = getFieldValue(work, 'status');
if (status === 'COMPLETED') return work;
if (status === 'ERROR') throw Error('Work status = ERROR');
await sleep(FETCH_INTERVAL);
return waitFor(fn, uid, counter + 1);
} catch (error) {
debug('waitFor()', uid, error);
throw error;
}
};
const waitForWorkResult = async (fn, txHash, counter = 0) => {
const workResult = await fn(txHash);
debug('counter', counter);
debug('workResult', workResult);
const status = workResult.status.toNumber();
if (status === 4) return workResult.uri;
if (status === 5) throw Error('Bridge computation failed');
await sleep(FETCH_INTERVAL);
return waitForWorkResult(fn, txHash, counter + 1);
};
module.exports = {
getAppBinaryFieldName,
getFieldValue,
waitFor,
waitForWorkResult,
}; | if (_os === undefined || _cpu === undefined) { |
lib.rs | extern crate proc_macro;
#[macro_use]
extern crate quote;
use proc_macro::TokenStream;
use proc_macro2::Span;
use syn::{FnArg, Ident, ItemFn};
mod parse;
#[proc_macro_attribute]
pub fn task(macro_args: TokenStream, input: TokenStream) -> TokenStream {
let fun: ItemFn = syn::parse_macro_input!(input as ItemFn);
let ident = fun.ident;
let block = fun.block;
let args = (*fun.decl).inputs;
let macro_args = syn::parse_macro_input!(macro_args as parse::MacroArgs);
let cloned: Vec<_> = args
.iter()
.map(|arg| match arg {
FnArg::Captured(capt) => {
let pat = capt.pat.clone();
quote!(let #pat = self.#pat.clone();)
}
_ => panic!("Cannot capture that"),
})
.collect();
let pub_fields: Vec<_> = args
.iter()
.map(|arg| match arg {
FnArg::Captured(capt) => {
let pat = capt.pat.clone();
let ty = capt.ty.clone();
quote!(pub #pat: #ty,)
}
_ => panic!("Cannot capture that"),
})
.collect();
let ctor_arg: Vec<_> = args
.iter()
.map(|arg| match arg {
FnArg::Captured(capt) => {
let pat = capt.pat.clone();
quote!(#pat)
}
_ => panic!("Cannot capture that"),
})
.collect();
let mod_root = if let parse::MacroArgs::Custom(root) = macro_args {
root
} else | ;
let module_ident = Ident::new(&format!("{}_mod", ident), Span::call_site());
quote!(
pub use #module_ident::#ident;
pub mod #module_ident {
use serde_derive::{Serialize, Deserialize};
use #mod_root::Task;
#[derive(Serialize, Deserialize)]
pub struct #ident{#( #pub_fields )*}
#[typetag::serde]
impl Task for #ident {
fn execute(&self) {
#( #cloned )*
#block
}
}
impl #ident {
pub fn new(#args) -> Self {
Self {
#( #ctor_arg, )*
}
}
}
}
)
.into()
}
| {
Ident::new("negi", Span::call_site())
} |
testsql.py | """Mypy style test cases for SQLAlchemy stubs and plugin."""
import os
import os.path
import sys
import pytest # type: ignore # no pytest in typeshed
from mypy.test.config import test_temp_dir
from mypy.test.data import DataDrivenTestCase, DataSuite
from mypy.test.helpers import assert_string_arrays_equal
from mypy.util import try_find_python2_interpreter
from mypy import api
this_file_dir = os.path.dirname(os.path.realpath(__file__))
prefix = os.path.dirname(this_file_dir)
inipath = os.path.abspath(os.path.join(prefix, 'test'))
# Locations of test data files such as test case descriptions (.test).
test_data_prefix = os.path.join(prefix, 'test', 'test-data')
class SQLDataSuite(DataSuite):
files = ['sqlalchemy-basics.test',
'sqlalchemy-sql-elements.test',
'sqlalchemy-sql-sqltypes.test',
'sqlalchemy-sql-selectable.test',
'sqlalchemy-sql-schema.test',
'sqlalchemy-plugin-features.test',
'sqlalchemy-plugin-query.test']
data_prefix = test_data_prefix
def run_case(self, testcase: DataDrivenTestCase) -> None:
| assert testcase.old_cwd is not None, "test was not properly set up"
mypy_cmdline = [
'--show-traceback',
'--no-silence-site-packages',
'--config-file={}/sqlalchemy.ini'.format(inipath),
]
py2 = testcase.name.lower().endswith('python2')
if py2:
if try_find_python2_interpreter() is None:
pytest.skip()
return
mypy_cmdline.append('--py2')
else:
mypy_cmdline.append('--python-version={}'.format('.'.join(map(str,
sys.version_info[:2]))))
# Write the program to a file.
program_path = os.path.join(test_temp_dir, 'main.py')
mypy_cmdline.append(program_path)
with open(program_path, 'w') as file:
for s in testcase.input:
file.write('{}\n'.format(s))
output = []
# Type check the program.
out, err, returncode = api.run(mypy_cmdline)
# split lines, remove newlines, and remove directory of test case
for line in (out + err).splitlines():
if line.startswith(test_temp_dir + os.sep):
output.append(line[len(test_temp_dir + os.sep):].rstrip("\r\n").replace('.py',
''))
else:
output.append(line.rstrip("\r\n"))
# Remove temp file.
os.remove(program_path)
assert_string_arrays_equal(testcase.output, output,
'Invalid output ({}, line {})'.format(
testcase.file, testcase.line)) |
|
interfaces.py | """
Copyright (c) 2015
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are met:
- Redistributions of source code must retain the above copyright notice,
this list of conditions and the following disclaimer.
- Redistributions in binary form must reproduce the above copyright notice,
this list of conditions and the following disclaimer in the documentation
and/or other materials provided with the distribution.
- Neither the name of the copyright holder nor the names of its contributors
may be used to endorse or promote products derived from this software
without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
SERVICES;LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
@authors: Sergei Garbuzov
@status: Development
@version: 1.1.0
firewall.py: Firewall specific properties and access methods
"""
import json
from pybvc.common.utils import strip_none, remove_empty_from_dict, dict_keys_underscored_to_dashed
#-------------------------------------------------------------------------------
# Class 'DataPlaneInterface'
#-------------------------------------------------------------------------------
class DataPlaneInterface():
''' Class representing a dataplane interface '''
def __init__(self, name):
''' Dataplane interface name '''
self.tagnode = name
''' Description for the interface '''
self.description = None
''' DHCPv6 options (container) '''
self.dhcpv6_options = None
''' IPv4 parameters (container) '''
self.ip = None
''' IPv6 parameters (container) '''
self.ipv6 = None
''' Maximum Transmission Unit (MTU) '''
self.mtu = None
''' Disable interface '''
self.disable = None
''' Virtual Interface (VIF) ID (list) '''
self.vif = []
''' Enable/Disable sflow for interface '''
self.sflow = None
''' IP address (list) '''
self.address = []
''' Media Access Control (MAC) address '''
self.mac = None
''' Ignore link state changes '''
self.disable_link_detect = None
''' This interface bridge group (container) '''
self.bridge_group = None
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def to_string(self):
""" Return this object as a string """
return str(vars(self))
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def to_json(self):
""" Return this object as JSON """
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_description(self, description):
self.description = description
# TBD
def set_dhcpv6_options(self, TBD):
pass
# TBD
def set_ipv4_options(self, TBD):
pass
# TBD
def set_ipv6_options(self, TBD):
pass
def set_mtu(self, mtu):
self.mtu = mtu
def set_disable(self, value):
if (value == True):
self.disable = ""
else:
self.disable = None
def set_vif(self, vif_id):
self.vif.append(vif_id)
def set_sflow(self, value):
if (value == True):
self.sflow = ""
else:
self.sflow = None
def set_address(self, address):
self.address.append(address)
def set_mac(self, mac):
self.mac = mac
def set_disable_link_detect(self, value):
if (value == True):
self.disable_link_detect = ""
else:
self.disable_link_detect = None
# TBD
def set_bridge_group(self, TBD):
pass
#-------------------------------------------------------------------------------
# Class 'OpenVpnInterface'
#-------------------------------------------------------------------------------
class OpenVpnInterface():
''' Class representing an OpenVPN tunnel interface '''
_mn1 = "vyatta-interfaces:interfaces"
_mn2 = "vyatta-interfaces-openvpn:openvpn"
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def __init__(self, name):
''' OpenVPN tunnel interface name '''
self.tagnode = name
''' Description for the interface '''
self.description = None
''' OpenVPN authentication method (container) '''
self.auth = None
''' Hashing algorithm option
enumeration: 'md5', 'sha1', 'sha256', 'sha512' '''
self.hash = None
''' Interface to be disabled '''
self.disable = None
''' Server-mode options (container) '''
self.server = None
''' OpenVPN interface device-type '''
self.device_type = None
''' File containing the secret key shared with remote end of tunnel '''
self.shared_secret_key_file = None
''' Data encryption algorithm option
enumeration: 'des', '3des', 'bf128', 'bf256', 'aes128', 'aes192', 'aes256' '''
self.encryption = None
''' Additional OpenVPN options (list) '''
self.openvpn_option = []
''' Local IP address or network address '''
self.local_address = None
''' Local port number to accept connections (range 1..65535) '''
self.local_port = None
''' Local IP address to accept connections (all if not set) '''
self.local_host = None
''' IP address of remote end of tunnel '''
self.remote_address = None
''' Remote port number to connect to '''
self.remote_port = None
''' Remote host to connect to (dynamic if not set) '''
self.remote_host = []
''' Transport Layer Security (TLS) options (container) '''
self.tls = TlsOptions()
''' OpenVPN mode of operation
enumeration: 'site-to-site', 'client', 'server' '''
self.mode = None
''' OpenVPN tunnel to be used as the default route (container)'''
self.replace_default_route = None
''' OpenVPN communication protocol
enumeration: 'udp', 'tcp-passive', 'tcp-active' '''
self.protocol = None
''' IPv4 parameters (container) '''
self.ip = None
''' IPv6 parameters (container) '''
self.ipv6 = None
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def to_string(self):
""" Return this object as a string """
return str(vars(self))
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def to_json(self):
""" Return this object as JSON """
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def get_payload(self):
""" Return this object as a payload for HTTP request """
s = self.to_json()
obj = json.loads(s)
obj1 = strip_none(obj)
obj2 = remove_empty_from_dict(obj1)
obj3 = dict_keys_underscored_to_dashed(obj2)
payload = {self._mn1: {self._mn2:[obj3]}}
return json.dumps(payload, default=lambda o: o.__dict__, sort_keys=True, indent=4)
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_description(self, description):
self.description = description
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_mode(self, mode):
self.mode = mode
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_shared_secret_key_file(self, path):
self.shared_secret_key_file = path
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_local_address(self, addr):
self.local_address = addr
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_remote_address(self, addr):
self.remote_address = addr
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def | (self, addr):
self.remote_host.append(addr)
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_tls_role(self, role):
self.tls.set_role(role)
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_tls_dh_file(self, path):
self.tls.set_dh_file(path)
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_tls_ca_cert_file(self, path):
self.tls.set_ca_cert_file(path)
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_tls_cert_file(self, path):
self.tls.set_cert_file(path)
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_tls_crl_file(self, path):
self.tls.set_crl_file(path)
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_tls_key_file(self, path):
self.tls.set_key_file(path)
#-------------------------------------------------------------------------------
# Class 'TlsOptions'
#-------------------------------------------------------------------------------
class TlsOptions():
''' Transport Layer Security (TLS) options
Helper class of the 'OpenVpnInterface' class '''
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def __init__(self):
''' Role in TLS negotiation
enumeration: 'active', 'passive' '''
self.role = None
''' File containing Diffie Hellman parameters (server only) '''
self.dh_file = None
''' File containing certificate for Certificate Authority (CA) '''
self.ca_cert_file = None
''' File containing certificate for this host '''
self.cert_file = None
''' File containing certificate revocation list (CRL) for this host '''
self.crl_file = None
''' File containing this host's private key '''
self.key_file = None
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_role(self, role):
self.role = role
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_dh_file(self, path):
self.dh_file = path
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_ca_cert_file(self, path):
self.ca_cert_file = path
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_cert_file(self, path):
self.cert_file = path
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_crl_file(self, path):
self.crl_file = path
#---------------------------------------------------------------------------
#
#---------------------------------------------------------------------------
def set_key_file(self, path):
self.key_file = path
#-------------------------------------------------------------------------------
# Class 'VirtualTunnelInterface'
#-------------------------------------------------------------------------------
class VirtualTunnelInterface():
''' Class representing a Virtual tunnel interface (VTI) '''
def __init__(self, name):
''' Virtual tunnel interface name '''
self.tagnode = name
''' Description for the interface '''
self.description = None
''' Maximum Transmission Unit (MTU), range 68..9000 '''
self.mtu = None
''' Disable this interface '''
self.disable = None
''' IPv4 or IPv6 Prefixes'''
self.address = []
''' IPv4 parameters '''
self.ip = None
''' IPv6 parameters '''
self.ipv6 = None
def to_string(self):
""" Return this object as a string """
return str(vars(self))
def to_json(self):
""" Return this object as JSON """
return json.dumps(self, default=lambda o: o.__dict__, sort_keys=True, indent=4)
def set_description(self, description):
self.description = description
def set_mtu(self, mtu):
self.mtu = mtu
def set_disable(self, value):
if (value == True):
self.disable = ""
else:
self.disable = None
def set_address(self, address):
self.address.append(address)
| set_remote_host |
bitcoin_de.ts | <TS language="de" version="2.1">
<context>
<name>AddressBookPage</name>
<message>
<source>Right-click to edit address or label</source>
<translation>Rechtsklick zum Bearbeiten der Adresse oder der Bezeichnung</translation>
</message>
<message>
<source>Create a new address</source>
<translation>Eine neue Adresse erstellen</translation>
</message>
<message>
<source>&New</source>
<translation>&Neu</translation>
</message>
<message>
<source>Copy the currently selected address to the system clipboard</source>
<translation>Ausgewählte Adresse in die Zwischenablage kopieren</translation>
</message>
<message>
<source>&Copy</source>
<translation>&Kopieren</translation>
</message>
<message>
<source>C&lose</source>
<translation>&Schließen</translation>
</message>
<message>
<source>&Copy Address</source>
<translation>Adresse &kopieren</translation>
</message>
<message>
<source>Delete the currently selected address from the list</source>
<translation>Ausgewählte Adresse aus der Liste entfernen</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Daten der aktuellen Ansicht in eine Datei exportieren</translation>
</message>
<message>
<source>&Export</source>
<translation>E&xportieren</translation>
</message>
<message>
<source>&Delete</source>
<translation>&Löschen</translation>
</message>
<message>
<source>Choose the address to send coins to</source>
<translation>Wählen Sie die Adresse aus, an die Sie Emercoins überweisen möchten</translation>
</message>
<message>
<source>Choose the address to receive coins with</source>
<translation>Wählen Sie die Adresse aus, über die Sie Emercoins empfangen wollen</translation>
</message>
<message>
<source>C&hoose</source>
<translation>&Auswählen</translation>
</message>
<message>
<source>Sending addresses</source>
<translation>Zahlungsadressen</translation>
</message>
<message>
<source>Receiving addresses</source>
<translation>Empfangsadressen</translation>
</message>
<message>
<source>These are your Emercoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>Dies sind Ihre Emercoin-Adressen zum Tätigen von Überweisungen. Bitte prüfen Sie den Betrag und die Empfangsadresse, bevor Sie Emercoins überweisen.</translation>
</message>
<message>
<source>These are your Emercoin addresses for receiving payments. It is recommended to use a new receiving address for each transaction.</source>
<translation>Dies sind Ihre Emercoin-Adressen zum Empfangen von Zahlungen. Es wird empfohlen für jede Transaktion eine neue Empfangsadresse zu verwenden.</translation>
</message>
<message>
<source>Copy &Label</source>
<translation>&Bezeichnung kopieren</translation>
</message>
<message>
<source>&Edit</source>
<translation>&Editieren</translation>
</message>
<message>
<source>Export Address List</source>
<translation>Addressliste exportieren</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Kommagetrennte-Datei (*.csv)</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Exportieren fehlgeschlagen</translation>
</message>
<message>
<source>There was an error trying to save the address list to %1. Please try again.</source>
<translation>Beim Speichern der Adressliste nach %1 ist ein Fehler aufgetreten. Bitte versuchen Sie es erneut.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<source>Label</source>
<translation>Bezeichnung</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>(no label)</source>
<translation>(keine Bezeichnung)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<source>Passphrase Dialog</source>
<translation>Passphrasendialog</translation>
</message>
<message>
<source>Enter passphrase</source>
<translation>Passphrase eingeben</translation>
</message>
<message>
<source>New passphrase</source>
<translation>Neue Passphrase</translation>
</message>
<message>
<source>Repeat new passphrase</source>
<translation>Neue Passphrase bestätigen</translation>
</message>
<message>
<source>Encrypt wallet</source>
<translation>Wallet verschlüsseln</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>Dieser Vorgang benötigt Ihre Passphrase, um die Wallet zu entsperren.</translation>
</message>
<message>
<source>Unlock wallet</source>
<translation>Wallet entsperren</translation>
</message>
<message>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>Dieser Vorgang benötigt Ihre Passphrase, um die Wallet zu entschlüsseln.</translation>
</message>
<message>
<source>Decrypt wallet</source>
<translation>Wallet entschlüsseln</translation>
</message>
<message>
<source>Change passphrase</source>
<translation>Passphrase ändern</translation>
</message>
<message>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>Geben Sie die alte und neue Wallet-Passphrase ein.</translation>
</message>
<message>
<source>Confirm wallet encryption</source>
<translation>Wallet-Verschlüsselung bestätigen</translation>
</message>
<message>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR EMERCOINS</b>!</source>
<translation>Warnung: Wenn Sie Ihre Wallet verschlüsseln und Ihre Passphrase verlieren, werden Sie <b>alle Ihre Emercoins verlieren</b>!</translation>
</message>
<message>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>Sind Sie sich sicher, dass Sie Ihre Wallet verschlüsseln möchten?</translation>
</message>
<message>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>WICHTIG: Alle vorherigen Wallet-Sicherungen sollten durch die neu erzeugte, verschlüsselte Wallet ersetzt werden. Aus Sicherheitsgründen werden vorherige Sicherungen der unverschlüsselten Wallet nutzlos, sobald Sie die neue, verschlüsselte Wallet verwenden.</translation>
</message>
<message>
<source>Warning: The Caps Lock key is on!</source>
<translation>Warnung: Die Feststelltaste ist aktiviert!</translation>
</message>
<message>
<source>Wallet encrypted</source>
<translation>Wallet verschlüsselt</translation>
</message>
<message>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>ten or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>Geben Sie die neue Passphrase für die Wallet ein.<br>Bitte benutzen Sie eine Passphrase bestehend aus <b>zehn oder mehr zufälligen Zeichen</b> oder <b>acht oder mehr Wörtern</b>.</translation>
</message>
<message>
<source>Emercoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your emercoins from being stolen by malware infecting your computer.</source>
<translation>Emercoin wird jetzt beendet, um den Verschlüsselungsprozess abzuschließen. Bitte beachten Sie, dass die Wallet-Verschlüsselung nicht vollständig vor Diebstahl Ihrer Emercoins durch Schadprogramme schützt, die Ihren Computer befällt.</translation>
</message>
<message>
<source>Wallet encryption failed</source>
<translation>Wallet-Verschlüsselung fehlgeschlagen</translation>
</message>
<message>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>Die Wallet-Verschlüsselung ist aufgrund eines internen Fehlers fehlgeschlagen. Ihre Wallet wurde nicht verschlüsselt.</translation>
</message>
<message>
<source>The supplied passphrases do not match.</source>
<translation>Die eingegebenen Passphrasen stimmen nicht überein.</translation>
</message>
<message>
<source>Wallet unlock failed</source>
<translation>Wallet-Entsperrung fehlgeschlagen</translation>
</message>
<message>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>Die eingegebene Passphrase zur Wallet-Entschlüsselung war nicht korrekt.</translation>
</message>
<message>
<source>Wallet decryption failed</source>
<translation>Wallet-Entschlüsselung fehlgeschlagen</translation>
</message>
<message>
<source>Wallet passphrase was successfully changed.</source>
<translation>Die Wallet-Passphrase wurde erfolgreich geändert.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<source>Sign &message...</source>
<translation>Nachricht s&ignieren...</translation>
</message>
<message>
<source>Synchronizing with network...</source>
<translation>Synchronisiere mit Netzwerk...</translation>
</message>
<message>
<source>&Overview</source>
<translation>&Übersicht</translation>
</message>
<message>
<source>Node</source>
<translation>Knoten</translation>
</message>
<message>
<source>Show general overview of wallet</source>
<translation>Allgemeine Wallet-Übersicht anzeigen</translation>
</message>
<message>
<source>&Transactions</source>
<translation>&Transaktionen</translation>
</message>
<message>
<source>Browse transaction history</source>
<translation>Transaktionsverlauf durchsehen</translation>
</message>
<message>
<source>E&xit</source>
<translation>&Beenden</translation>
</message>
<message>
<source>Quit application</source>
<translation>Anwendung beenden</translation>
</message>
<message>
<source>About &Qt</source>
<translation>Über &Qt</translation>
</message>
<message>
<source>Show information about Qt</source>
<translation>Informationen über Qt anzeigen</translation>
</message>
<message>
<source>&Options...</source>
<translation>&Konfiguration...</translation>
</message>
<message>
<source>&Encrypt Wallet...</source>
<translation>Wallet &verschlüsseln...</translation>
</message>
<message>
<source>&Backup Wallet...</source>
<translation>Wallet &sichern...</translation>
</message>
<message>
<source>&Change Passphrase...</source>
<translation>Passphrase &ändern...</translation>
</message>
<message>
<source>&Sending addresses...</source>
<translation>&Zahlungsadressen...</translation>
</message>
<message>
<source>&Receiving addresses...</source>
<translation>&Empfangsadressen...</translation>
</message>
<message>
<source>Open &URI...</source>
<translation>&URI öffnen...</translation>
</message>
<message>
<source>Emercoin Core client</source>
<translation>"Emercoin Core"-Client</translation>
</message>
<message>
<source>Importing blocks from disk...</source>
<translation>Importiere Blöcke von Datenträger...</translation>
</message>
<message>
<source>Reindexing blocks on disk...</source>
<translation>Reindiziere Blöcke auf Datenträger...</translation>
</message>
<message>
<source>Send coins to a Emercoin address</source>
<translation>Emercoins an eine Emercoin-Adresse überweisen</translation>
</message>
<message>
<source>Modify configuration options for Emercoin</source>
<translation>Die Konfiguration des Clients bearbeiten</translation>
</message>
<message>
<source>Backup wallet to another location</source>
<translation>Eine Wallet-Sicherungskopie erstellen und abspeichern</translation>
</message>
<message>
<source>Change the passphrase used for wallet encryption</source>
<translation>Ändert die Passphrase, die für die Wallet-Verschlüsselung benutzt wird</translation>
</message>
<message>
<source>&Debug window</source>
<translation>&Debugfenster</translation>
</message>
<message>
<source>Open debugging and diagnostic console</source>
<translation>Debugging- und Diagnosekonsole öffnen</translation>
</message>
<message>
<source>&Verify message...</source>
<translation>Nachricht &verifizieren...</translation>
</message>
<message>
<source>Emercoin</source>
<translation>Emercoin</translation>
</message>
<message>
<source>Wallet</source>
<translation>Wallet</translation>
</message>
<message>
<source>&Send</source>
<translation>&Überweisen</translation>
</message>
<message>
<source>&Receive</source>
<translation>&Empfangen</translation>
</message>
<message>
<source>Show information about Emercoin Core</source>
<translation>Informationen über Emercoin Core anzeigen</translation>
</message>
<message>
<source>&Show / Hide</source>
<translation>&Anzeigen / Verstecken</translation>
</message>
<message>
<source>Show or hide the main Window</source>
<translation>Das Hauptfenster anzeigen oder verstecken</translation>
</message>
<message>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>Verschlüsselt die zu Ihrer Wallet gehörenden privaten Schlüssel</translation>
</message>
<message>
<source>Sign messages with your Emercoin addresses to prove you own them</source>
<translation>Nachrichten signieren, um den Besitz Ihrer Emercoin-Adressen zu beweisen</translation>
</message>
<message>
<source>Verify messages to ensure they were signed with specified Emercoin addresses</source>
<translation>Nachrichten verifizieren, um sicherzustellen, dass diese mit den angegebenen Emercoin-Adressen signiert wurden</translation>
</message>
<message>
<source>&File</source>
<translation>&Datei</translation>
</message>
<message>
<source>&Settings</source>
<translation>&Einstellungen</translation>
</message>
<message>
<source>&Help</source>
<translation>&Hilfe</translation>
</message>
<message>
<source>Tabs toolbar</source>
<translation>Registerkartenleiste</translation>
</message>
<message>
<source>Emercoin Core</source>
<translation>Emercoin Core</translation>
</message>
<message>
<source>Request payments (generates QR codes and emercoin: URIs)</source>
<translation>Zahlungen anfordern (erzeugt QR-Codes und "emercoin:"-URIs)</translation>
</message>
<message>
<source>&About Emercoin Core</source>
<translation>&Über Emercoin Core</translation>
</message>
<message>
<source>Show the list of used sending addresses and labels</source>
<translation>Liste verwendeter Zahlungsadressen und Bezeichnungen anzeigen</translation>
</message>
<message>
<source>Show the list of used receiving addresses and labels</source>
<translation>Liste verwendeter Empfangsadressen und Bezeichnungen anzeigen</translation>
</message>
<message>
<source>Open a emercoin: URI or payment request</source>
<translation>Eine "emercoin:"-URI oder Zahlungsanforderung öffnen</translation>
</message>
<message>
<source>&Command-line options</source>
<translation>&Kommandozeilenoptionen</translation>
</message>
<message>
<source>Show the Emercoin Core help message to get a list with possible Emercoin command-line options</source>
<translation>Zeige den "Emercoin Core"-Hilfetext, um eine Liste mit möglichen Kommandozeilenoptionen zu erhalten</translation>
</message>
<message numerus="yes">
<source>%n active connection(s) to Emercoin network</source>
<translation><numerusform>%n aktive Verbindung zum Emercoin-Netzwerk</numerusform><numerusform>%n aktive Verbindungen zum Emercoin-Netzwerk</numerusform></translation>
</message>
<message>
<source>No block source available...</source>
<translation>Keine Blockquelle verfügbar...</translation>
</message>
<message numerus="yes">
<source>%n hour(s)</source>
<translation><numerusform>%n Stunde</numerusform><numerusform>%n Stunden</numerusform></translation>
</message>
<message numerus="yes">
<source>%n day(s)</source>
<translation><numerusform>%n Tag</numerusform><numerusform>%n Tage</numerusform></translation>
</message>
<message numerus="yes">
<source>%n week(s)</source>
<translation><numerusform>%n Woche</numerusform><numerusform>%n Wochen</numerusform></translation>
</message>
<message>
<source>%1 and %2</source>
<translation>%1 und %2</translation>
</message>
<message numerus="yes">
<source>%n year(s)</source>
<translation><numerusform>%n Jahr</numerusform><numerusform>%n Jahre</numerusform></translation>
</message>
<message>
<source>%1 behind</source>
<translation>%1 im Rückstand</translation>
</message>
<message>
<source>Last received block was generated %1 ago.</source>
<translation>Der letzte empfangene Block ist %1 alt.</translation>
</message>
<message>
<source>Transactions after this will not yet be visible.</source>
<translation>Transaktionen hiernach werden noch nicht angezeigt.</translation>
</message>
<message>
<source>Error</source>
<translation>Fehler</translation>
</message>
<message>
<source>Warning</source>
<translation>Warnung</translation>
</message>
<message>
<source>Information</source>
<translation>Hinweis</translation>
</message>
<message>
<source>Up to date</source>
<translation>Auf aktuellem Stand</translation>
</message>
<message numerus="yes">
<source>Processed %n blocks of transaction history.</source>
<translation><numerusform>%n Block des Transaktionsverlaufs verarbeitet.</numerusform><numerusform>%n Blöcke des Transaktionsverlaufs verarbeitet.</numerusform></translation>
</message>
<message>
<source>Catching up...</source>
<translation>Hole auf...</translation>
</message>
<message>
<source>Sent transaction</source>
<translation>Gesendete Transaktion</translation>
</message>
<message>
<source>Incoming transaction</source>
<translation>Eingehende Transaktion</translation>
</message>
<message>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>Datum: %1
Betrag: %2
Typ: %3
Adresse: %4</translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>Wallet ist <b>verschlüsselt</b> und aktuell <b>entsperrt</b></translation>
</message>
<message>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>Wallet ist <b>verschlüsselt</b> und aktuell <b>gesperrt</b></translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<source>Network Alert</source>
<translation>Netzwerkalarm</translation>
</message>
</context>
<context>
<name>CoinControlDialog</name>
<message>
<source>Coin Selection</source>
<translation>Münzauswahl ("Coin Control")</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Anzahl:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Byte:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Betrag:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Priorität:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Gebühr:</translation>
</message>
<message>
<source>Dust:</source>
<translation>"Dust":</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Abzüglich Gebühr:</translation>
</message>
<message>
<source>Change:</source>
<translation>Wechselgeld:</translation>
</message>
<message>
<source>(un)select all</source>
<translation>Alles (de)selektieren</translation>
</message>
<message>
<source>Tree mode</source>
<translation>Baumansicht</translation>
</message>
<message>
<source>List mode</source>
<translation>Listenansicht</translation>
</message>
<message>
<source>Amount</source>
<translation>Betrag</translation>
</message>
<message>
<source>Received with label</source>
<translation>Empfangen über Bezeichnung</translation>
</message>
<message>
<source>Received with address</source>
<translation>Empfangen über Adresse</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Confirmations</source>
<translation>Bestätigungen</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Bestätigt</translation>
</message>
<message>
<source>Priority</source>
<translation>Priorität</translation>
</message>
<message>
<source>Copy address</source>
<translation>Adresse kopieren</translation>
</message>
<message>
<source>Copy label</source>
<translation>Bezeichnung kopieren</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Betrag kopieren</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Transaktions-ID kopieren</translation>
</message>
<message>
<source>Lock unspent</source>
<translation>Nicht ausgegebenen Betrag sperren</translation>
</message>
<message>
<source>Unlock unspent</source>
<translation>Nicht ausgegebenen Betrag entsperren</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Anzahl kopieren</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Gebühr kopieren</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Abzüglich Gebühr kopieren</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Byte kopieren</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Priorität kopieren</translation>
</message>
<message>
<source>Copy dust</source>
<translation>"Dust" kopieren</translation>
</message>
<message>
<source>Copy change</source>
<translation>Wechselgeld kopieren</translation>
</message>
<message>
<source>highest</source>
<translation>am höchsten</translation>
</message>
<message>
<source>higher</source>
<translation>höher</translation>
</message>
<message>
<source>high</source>
<translation>hoch</translation>
</message>
<message>
<source>medium-high</source>
<translation>mittel-hoch</translation>
</message>
<message>
<source>medium</source>
<translation>mittel</translation>
</message>
<message>
<source>low-medium</source>
<translation>niedrig-mittel</translation>
</message>
<message>
<source>low</source>
<translation>niedrig</translation>
</message>
<message>
<source>lower</source>
<translation>niedriger</translation>
</message>
<message>
<source>lowest</source>
<translation>am niedrigsten</translation>
</message>
<message>
<source>(%1 locked)</source>
<translation>(%1 gesperrt)</translation>
</message>
<message>
<source>none</source>
<translation>keine</translation>
</message>
<message>
<source>Can vary +/- %1 satoshi(s) per input.</source>
<translation>Kann pro Eingabe um +/- %1 Satoshi(s) abweichen.</translation>
</message>
<message>
<source>yes</source>
<translation>ja</translation>
</message>
<message>
<source>no</source>
<translation>nein</translation>
</message>
<message>
<source>This label turns red, if the transaction size is greater than 1000 bytes.</source>
<translation>Diese Bezeichnung wird rot, wenn die Transaktion größer als 1000 Byte ist.</translation>
</message>
<message>
<source>This means a fee of at least %1 per kB is required.</source>
<translation>Das bedeutet, dass eine Gebühr von mindestens %1 pro kB erforderlich ist.</translation>
</message>
<message>
<source>Can vary +/- 1 byte per input.</source>
<translation>Kann um +/- 1 Byte pro Eingabe variieren.</translation>
</message>
<message>
<source>Transactions with higher priority are more likely to get included into a block.</source>
<translation>Transaktionen mit höherer Priorität haben eine größere Chance in einen Block aufgenommen zu werden.</translation>
</message>
<message>
<source>This label turns red, if the priority is smaller than "medium".</source>
<translation>Diese Bezeichnung wird rot, wenn die Priorität niedriger als "mittel" ist.</translation>
</message>
<message>
<source>This label turns red, if any recipient receives an amount smaller than %1.</source>
<translation>Diese Bezeichnung wird rot, wenn irgendein Empfänger einen Betrag kleiner als %1 erhält.</translation>
</message>
<message>
<source>(no label)</source>
<translation>(keine Bezeichnung)</translation>
</message>
<message>
<source>change from %1 (%2)</source>
<translation>Wechselgeld von %1 (%2)</translation>
</message>
<message>
<source>(change)</source>
<translation>(Wechselgeld)</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<source>Edit Address</source>
<translation>Adresse bearbeiten</translation>
</message>
<message>
<source>&Label</source>
<translation>&Bezeichnung</translation>
</message>
<message>
<source>The label associated with this address list entry</source>
<translation>Bezeichnung, die dem Adresslisteneintrag zugeordnet ist.</translation>
</message>
<message>
<source>The address associated with this address list entry. This can only be modified for sending addresses.</source>
<translation>Adresse, die dem Adresslisteneintrag zugeordnet ist. Diese kann nur bei Zahlungsadressen verändert werden.</translation>
</message>
<message>
<source>&Address</source>
<translation>&Adresse</translation>
</message>
<message>
<source>New receiving address</source>
<translation>Neue Empfangsadresse</translation>
</message>
<message>
<source>New sending address</source>
<translation>Neue Zahlungsadresse</translation>
</message>
<message>
<source>Edit receiving address</source>
<translation>Empfangsadresse bearbeiten</translation>
</message>
<message>
<source>Edit sending address</source>
<translation>Zahlungsadresse bearbeiten</translation>
</message>
<message>
<source>The entered address "%1" is already in the address book.</source>
<translation>Die eingegebene Adresse "%1" befindet sich bereits im Adressbuch.</translation>
</message>
<message>
<source>The entered address "%1" is not a valid Emercoin address.</source>
<translation>Die eingegebene Adresse "%1" ist keine gültige Emercoin-Adresse.</translation>
</message>
<message>
<source>Could not unlock wallet.</source>
<translation>Wallet konnte nicht entsperrt werden.</translation>
</message>
<message>
<source>New key generation failed.</source>
<translation>Erzeugung eines neuen Schlüssels fehlgeschlagen.</translation>
</message>
</context>
<context>
<name>FreespaceChecker</name>
<message>
<source>A new data directory will be created.</source>
<translation>Es wird ein neues Datenverzeichnis angelegt.</translation>
</message>
<message>
<source>name</source>
<translation>Name</translation>
</message>
<message>
<source>Directory already exists. Add %1 if you intend to create a new directory here.</source>
<translation>Verzeichnis existiert bereits. Fügen Sie %1 an, wenn Sie beabsichtigen hier ein neues Verzeichnis anzulegen.</translation>
</message>
<message>
<source>Path already exists, and is not a directory.</source>
<translation>Pfad existiert bereits und ist kein Verzeichnis.</translation>
</message>
<message>
<source>Cannot create data directory here.</source>
<translation>Datenverzeichnis kann hier nicht angelegt werden.</translation>
</message>
</context>
<context>
<name>HelpMessageDialog</name>
<message>
<source>Emercoin Core</source>
<translation>Emercoin Core</translation>
</message>
<message>
<source>version</source>
<translation>Version</translation>
</message>
<message>
<source>(%1-bit)</source>
<translation>(%1-Bit)</translation>
</message>
<message>
<source>About Emercoin Core</source>
<translation>Über Emercoin Core</translation>
</message>
<message>
<source>Command-line options</source>
<translation>Kommandozeilenoptionen</translation>
</message>
<message>
<source>Usage:</source>
<translation>Benutzung:</translation>
</message>
<message>
<source>command-line options</source>
<translation>Kommandozeilenoptionen</translation>
</message>
<message>
<source>UI options</source>
<translation>UI-Optionen</translation>
</message>
<message>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>Sprache festlegen, z.B. "de_DE" (Standard: Systemstandard)</translation>
</message>
<message>
<source>Start minimized</source>
<translation>Minimiert starten</translation>
</message>
<message>
<source>Set SSL root certificates for payment request (default: -system-)</source>
<translation>SSL-Wurzelzertifikate für Zahlungsanforderungen festlegen (Standard: -system-)</translation>
</message>
<message>
<source>Show splash screen on startup (default: 1)</source>
<translation>Startbildschirm beim Starten anzeigen (Standard: 1)</translation>
</message>
<message>
<source>Choose data directory on startup (default: 0)</source>
<translation>Datenverzeichnis beim Starten auswählen (Standard: 0)</translation>
</message>
</context>
<context>
<name>Intro</name>
<message>
<source>Welcome</source>
<translation>Willkommen</translation>
</message>
<message>
<source>Welcome to Emercoin Core.</source>
<translation>Willkommen zu Emercoin Core.</translation>
</message>
<message>
<source>As this is the first time the program is launched, you can choose where Emercoin Core will store its data.</source>
<translation>Da Sie das Programm gerade zum ersten Mal starten, können Sie nun auswählen wo Emercoin Core seine Daten ablegen soll.</translation>
</message>
<message>
<source>Emercoin Core will download and store a copy of the Emercoin block chain. At least %1GB of data will be stored in this directory, and it will grow over time. The wallet will also be stored in this directory.</source>
<translation>Emercoin Core wird eine Kopie der Blockkette herunterladen und speichern. Mindestens %1GB Daten werden in diesem Verzeichnis abgelegt und die Datenmenge wächst über die Zeit an. Auch die Wallet wird in diesem Verzeichnis abgelegt.</translation>
</message>
<message>
<source>Use the default data directory</source>
<translation>Standard-Datenverzeichnis verwenden</translation>
</message>
<message>
<source>Use a custom data directory:</source>
<translation>Ein benutzerdefiniertes Datenverzeichnis verwenden:</translation>
</message>
<message>
<source>Emercoin Core</source>
<translation>Emercoin Core</translation>
</message>
<message>
<source>Error: Specified data directory "%1" cannot be created.</source>
<translation>Fehler: Angegebenes Datenverzeichnis "%1" kann nicht angelegt werden.</translation>
</message>
<message>
<source>Error</source>
<translation>Fehler</translation>
</message>
<message numerus="yes">
<source>%n GB of free space available</source>
<translation><numerusform>%n GB freier Speicherplatz verfügbar</numerusform><numerusform>%n GB freier Speicherplatz verfügbar</numerusform></translation>
</message>
<message numerus="yes">
<source>(of %n GB needed)</source>
<translation><numerusform>(von benötigtem %n GB)</numerusform><numerusform>(von benötigten %n GB)</numerusform></translation>
</message>
</context>
<context>
<name>OpenURIDialog</name>
<message>
<source>Open URI</source>
<translation>URI öffnen</translation>
</message>
<message>
<source>Open payment request from URI or file</source>
<translation>Zahlungsanforderung über URI oder aus Datei öffnen</translation>
</message>
<message>
<source>URI:</source>
<translation>URI:</translation>
</message>
<message>
<source>Select payment request file</source>
<translation>Zahlungsanforderungsdatei auswählen</translation>
</message>
<message>
<source>Select payment request file to open</source>
<translation>Zu öffnende Zahlungsanforderungsdatei auswählen</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<source>Options</source>
<translation>Konfiguration</translation>
</message>
<message>
<source>&Main</source>
<translation>&Allgemein</translation>
</message>
<message>
<source>Automatically start Emercoin after logging in to the system.</source>
<translation>Emercoin nach der Anmeldung am System automatisch ausführen.</translation>
</message>
<message>
<source>&Start Emercoin on system login</source>
<translation>&Starte Emercoin nach Systemanmeldung</translation>
</message>
<message>
<source>Size of &database cache</source>
<translation>Größe des &Datenbankcaches</translation>
</message>
<message>
<source>MB</source>
<translation>MB</translation>
</message>
<message>
<source>Number of script &verification threads</source>
<translation>Anzahl an Skript-&Verifizierungs-Threads</translation>
</message>
<message>
<source>Accept connections from outside</source>
<translation>Eingehende Verbindungen annehmen</translation>
</message>
<message>
<source>Allow incoming connections</source>
<translation>Erlaubt eingehende Verbindungen</translation>
</message>
<message>
<source>IP address of the proxy (e.g. IPv4: 127.0.0.1 / IPv6: ::1)</source>
<translation>IP-Adresse des Proxies (z.B. IPv4: 127.0.0.1 / IPv6: ::1)</translation>
</message>
<message>
<source>Third party URLs (e.g. a block explorer) that appear in the transactions tab as context menu items. %s in the URL is replaced by transaction hash. Multiple URLs are separated by vertical bar |.</source>
<translation>Externe URLs (z.B. ein Block-Explorer), die im Kontextmenü des Transaktionsverlaufs eingefügt werden. In der URL wird %s durch den Transaktionshash ersetzt. Bei Angabe mehrerer URLs müssen diese durch "|" voneinander getrennt werden.</translation>
</message>
<message>
<source>Third party transaction URLs</source>
<translation>Externe Transaktions-URLs</translation>
</message>
<message>
<source>Active command-line options that override above options:</source>
<translation>Aktive Kommandozeilenoptionen, die obige Konfiguration überschreiben:</translation>
</message>
<message>
<source>Reset all client options to default.</source>
<translation>Setzt die Clientkonfiguration auf Standardwerte zurück.</translation>
</message>
<message>
<source>&Reset Options</source>
<translation>Konfiguration &zurücksetzen</translation>
</message>
<message>
<source>&Network</source>
<translation>&Netzwerk</translation>
</message>
<message>
<source>(0 = auto, <0 = leave that many cores free)</source>
<translation>(0 = automatisch, <0 = so viele Kerne frei lassen)</translation>
</message>
<message>
<source>W&allet</source>
<translation>W&allet</translation>
</message>
<message>
<source>Expert</source>
<translation>Erweiterte Wallet-Optionen</translation>
</message>
<message>
<source>Enable coin &control features</source>
<translation>"&Coin Control"-Funktionen aktivieren</translation>
</message>
<message>
<source>If you disable the spending of unconfirmed change, the change from a transaction cannot be used until that transaction has at least one confirmation. This also affects how your balance is computed.</source>
<translation>Wenn Sie das Ausgeben von unbestätigtem Wechselgeld deaktivieren, kann das Wechselgeld einer Transaktion nicht verwendet werden, bis es mindestens eine Bestätigung erhalten hat. Dies wirkt sich auf die Berechnung des Kontostands aus.</translation>
</message>
<message>
<source>&Spend unconfirmed change</source>
<translation>&Unbestätigtes Wechselgeld darf ausgegeben werden</translation>
</message>
<message>
<source>Automatically open the Emercoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>Automatisch den Emercoin-Clientport auf dem Router öffnen. Dies funktioniert nur, wenn Ihr Router UPnP unterstützt und dies aktiviert ist.</translation>
</message>
<message>
<source>Map port using &UPnP</source>
<translation>Portweiterleitung via &UPnP</translation>
</message>
<message>
<source>Connect to the Emercoin network through a SOCKS5 proxy.</source>
<translation>Über einen SOCKS5-Proxy mit dem Emercoin-Netzwerk verbinden.</translation>
</message>
<message>
<source>&Connect through SOCKS5 proxy (default proxy):</source>
<translation>Über einen SOCKS5-Proxy &verbinden (Standardproxy):</translation>
</message>
<message>
<source>Proxy &IP:</source>
<translation>Proxy-&IP:</translation>
</message>
<message>
<source>&Port:</source>
<translation>&Port:</translation>
</message>
<message>
<source>Port of the proxy (e.g. 9050)</source>
<translation>Port des Proxies (z.B. 9050)</translation>
</message>
<message>
<source>&Window</source>
<translation>&Programmfenster</translation>
</message>
<message>
<source>Show only a tray icon after minimizing the window.</source>
<translation>Nur ein Symbol im Infobereich anzeigen, nachdem das Programmfenster minimiert wurde.</translation>
</message>
<message>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>In den Infobereich anstatt in die Taskleiste &minimieren</translation>
</message>
<message>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>Minimiert die Anwendung anstatt sie zu beenden wenn das Fenster geschlossen wird. Wenn dies aktiviert ist, müssen Sie das Programm über "Beenden" im Menü schließen.</translation>
</message>
<message>
<source>M&inimize on close</source>
<translation>Beim Schließen m&inimieren</translation>
</message>
<message>
<source>&Display</source>
<translation>Anzei&ge</translation>
</message>
<message>
<source>User Interface &language:</source>
<translation>&Sprache der Benutzeroberfläche:</translation>
</message>
<message>
<source>The user interface language can be set here. This setting will take effect after restarting Emercoin.</source>
<translation>Legt die Sprache der Benutzeroberfläche fest. Diese Einstellung wird erst nach einem Neustart von Emercoin aktiv.</translation>
</message>
<message>
<source>&Unit to show amounts in:</source>
<translation>&Einheit der Beträge:</translation>
</message>
<message>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>Wählen Sie die standardmäßige Untereinheit, die in der Benutzeroberfläche und beim Überweisen von Emercoins angezeigt werden soll.</translation>
</message>
<message>
<source>Whether to show coin control features or not.</source>
<translation>Legt fest, ob die "Coin Control"-Funktionen angezeigt werden.</translation>
</message>
<message>
<source>&OK</source>
<translation>&OK</translation>
</message>
<message>
<source>&Cancel</source>
<translation>A&bbrechen</translation>
</message>
<message>
<source>default</source>
<translation>Standard</translation>
</message>
<message>
<source>none</source>
<translation>keine</translation>
</message>
<message>
<source>Confirm options reset</source>
<translation>Zurücksetzen der Konfiguration bestätigen</translation>
</message>
<message>
<source>Client restart required to activate changes.</source>
<translation>Clientneustart nötig, um die Änderungen zu aktivieren.</translation>
</message>
<message>
<source>Client will be shutdown, do you want to proceed?</source>
<translation>Client wird beendet, wollen Sie fortfahren?</translation>
</message>
<message>
<source>This change would require a client restart.</source>
<translation>Diese Änderung würde einen Clientneustart benötigen.</translation>
</message>
<message>
<source>The supplied proxy address is invalid.</source>
<translation>Die eingegebene Proxyadresse ist ungültig.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<source>Form</source>
<translation>Formular</translation>
</message>
<message>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Emercoin network after a connection is established, but this process has not completed yet.</source>
<translation>Die angezeigten Informationen sind möglicherweise nicht mehr aktuell. Ihre Wallet wird automatisch synchronisiert, nachdem eine Verbindung zum Emercoin-Netzwerk hergestellt wurde. Dieser Prozess ist jedoch derzeit noch nicht abgeschlossen.</translation>
</message>
<message>
<source>Watch-only:</source>
<translation>Beobachtet:</translation>
</message>
<message>
<source>Available:</source>
<translation>Verfügbar:</translation>
</message>
<message>
<source>Your current spendable balance</source>
<translation>Ihr aktuell verfügbarer Kontostand</translation>
</message>
<message>
<source>Pending:</source>
<translation>Ausstehend:</translation>
</message>
<message>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the spendable balance</source>
<translation>Betrag aus unbestätigten Transaktionen, der noch nicht im aktuell verfügbaren Kontostand enthalten ist</translation>
</message>
<message>
<source>Immature:</source>
<translation>Unreif:</translation>
</message>
<message>
<source>Mined balance that has not yet matured</source>
<translation>Erarbeiteter Betrag der noch nicht gereift ist</translation>
</message>
<message>
<source>Balances</source>
<translation>Kontostände</translation>
</message>
<message>
<source>Total:</source>
<translation>Gesamtbetrag:</translation>
</message>
<message>
<source>Your current total balance</source>
<translation>Aktueller Gesamtbetrag aus obigen Kategorien</translation>
</message>
<message>
<source>Your current balance in watch-only addresses</source>
<translation>Ihr aktueller Kontostand beobachteter Adressen</translation>
</message>
<message>
<source>Spendable:</source>
<translation>Verfügbar:</translation>
</message>
<message>
<source>Recent transactions</source>
<translation>Letzte Transaktionen</translation>
</message>
<message>
<source>Unconfirmed transactions to watch-only addresses</source>
<translation>Unbestätigte Transaktionen von beobachteten Adressen</translation>
</message>
<message>
<source>Mined balance in watch-only addresses that has not yet matured</source>
<translation>Erarbeiteter Betrag in beobachteten Adressen der noch nicht gereift ist</translation>
</message>
<message>
<source>Current total balance in watch-only addresses</source>
<translation>Aktueller Gesamtbetrag in beobachteten Adressen aus obigen Kategorien</translation>
</message>
<message>
<source>out of sync</source>
<translation>nicht synchron</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<source>URI handling</source>
<translation>URI-Verarbeitung</translation>
</message>
<message>
<source>Invalid payment address %1</source>
<translation>Ungültige Zahlungsadresse %1</translation>
</message>
<message>
<source>Payment request rejected</source>
<translation>Zahlungsanforderung abgelehnt</translation>
</message>
<message>
<source>Payment request network doesn't match client network.</source>
<translation>Netzwerk der Zahlungsanforderung stimmt nicht mit dem Client-Netzwerk überein.</translation>
</message>
<message>
<source>Payment request has expired.</source>
<translation>Zahlungsanforderung ist abgelaufen.</translation>
</message>
<message>
<source>Payment request is not initialized.</source>
<translation>Zahlungsanforderung ist nicht initialisiert.</translation>
</message>
<message>
<source>Requested payment amount of %1 is too small (considered dust).</source>
<translation>Angeforderter Zahlungsbetrag in Höhe von %1 ist zu niedrig und wurde als "Dust" eingestuft.</translation>
</message>
<message>
<source>Payment request error</source>
<translation>fehlerhafte Zahlungsanforderung</translation>
</message>
<message>
<source>Cannot start emercoin: click-to-pay handler</source>
<translation>"emercoin: Klicken-zum-Bezahlen"-Handler konnte nicht gestartet werden</translation>
</message>
<message>
<source>Payment request fetch URL is invalid: %1</source>
<translation>Abruf-URL der Zahlungsanforderung ist ungültig: %1</translation>
</message>
<message>
<source>URI cannot be parsed! This can be caused by an invalid Emercoin address or malformed URI parameters.</source>
<translation>URI kann nicht analysiert werden! Dies kann durch eine ungültige Emercoin-Adresse oder fehlerhafte URI-Parameter verursacht werden.</translation>
</message>
<message>
<source>Payment request file handling</source>
<translation>Zahlungsanforderungsdatei-Verarbeitung</translation>
</message>
<message>
<source>Payment request file cannot be read! This can be caused by an invalid payment request file.</source>
<translation>Zahlungsanforderungsdatei kann nicht gelesen werden! Dies kann durch eine ungültige Zahlungsanforderungsdatei verursacht werden.</translation>
</message>
<message>
<source>Unverified payment requests to custom payment scripts are unsupported.</source>
<translation>Unverifizierte Zahlungsanforderungen an benutzerdefinierte Zahlungsskripte werden nicht unterstützt.</translation>
</message>
<message>
<source>Refund from %1</source>
<translation>Rücküberweisung von %1</translation>
</message>
<message>
<source>Payment request %1 is too large (%2 bytes, allowed %3 bytes).</source>
<translation>Zahlungsanforderung %1 ist zu groß (%2 Byte, erlaubt sind %3 Byte).</translation>
</message>
<message>
<source>Payment request DoS protection</source>
<translation>Zahlungsanforderungs-DoS-Schutz</translation>
</message>
<message>
<source>Error communicating with %1: %2</source>
<translation>Kommunikationsfehler mit %1: %2</translation>
</message>
<message>
<source>Payment request cannot be parsed!</source>
<translation>Zahlungsanforderung kann nicht verarbeitet werden!</translation>
</message>
<message>
<source>Bad response from server %1</source>
<translation>Fehlerhafte Antwort vom Server: %1</translation>
</message>
<message>
<source>Payment acknowledged</source>
<translation>Zahlung bestätigt</translation>
</message>
<message>
<source>Network request error</source>
<translation>fehlerhafte Netzwerkanfrage</translation>
</message>
</context>
<context>
<name>PeerTableModel</name>
<message>
<source>User Agent</source>
<translation>User-Agent</translation>
</message>
<message>
<source>Address/Hostname</source>
<translation>Adresse/Hostname</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Pingzeit</translation>
</message>
</context>
<context>
<name>QObject</name>
<message>
<source>Amount</source>
<translation>Betrag</translation>
</message>
<message>
<source>Enter a Emercoin address (e.g. %1)</source>
<translation>Emercoin-Adresse eingeben (z.B. %1)</translation>
</message>
<message>
<source>%1 d</source>
<translation>%1 d</translation>
</message>
<message>
<source>%1 h</source>
<translation>%1 h</translation>
</message>
<message>
<source>%1 m</source>
<translation>%1 m</translation>
</message>
<message>
<source>%1 s</source>
<translation>%1 s</translation>
</message>
<message>
<source>NETWORK</source>
<translation>NETZWERK</translation>
</message>
<message>
<source>UNKNOWN</source>
<translation>UNBEKANNT</translation>
</message>
<message>
<source>None</source>
<translation>Keine</translation>
</message>
<message>
<source>N/A</source>
<translation>k.A.</translation>
</message>
<message>
<source>%1 ms</source>
<translation>%1 ms</translation>
</message>
</context>
<context>
<name>QRImageWidget</name>
<message>
<source>&Save Image...</source>
<translation>Grafik &speichern...</translation>
</message>
<message>
<source>&Copy Image</source>
<translation>Grafik &kopieren</translation>
</message>
<message>
<source>Save QR Code</source>
<translation>QR-Code speichern</translation>
</message>
<message>
<source>PNG Image (*.png)</source>
<translation>PNG-Grafik (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<source>Client name</source>
<translation>Clientname</translation>
</message>
<message>
<source>N/A</source>
<translation>k.A.</translation>
</message>
<message>
<source>Client version</source>
<translation>Clientversion</translation>
</message>
<message>
<source>&Information</source>
<translation>Hinweis</translation>
</message>
<message>
<source>Debug window</source>
<translation>Debugfenster</translation>
</message>
<message>
<source>General</source>
<translation>Allgemein</translation>
</message>
<message>
<source>Using OpenSSL version</source>
<translation>Verwendete OpenSSL-Version</translation>
</message>
<message>
<source>Using BerkeleyDB version</source>
<translation>Verwendete BerkeleyDB-Version</translation>
</message>
<message>
<source>Startup time</source>
<translation>Startzeit</translation>
</message>
<message>
<source>Network</source>
<translation>Netzwerk</translation>
</message>
<message>
<source>Name</source>
<translation>Name</translation>
</message>
<message>
<source>Number of connections</source>
<translation>Anzahl Verbindungen</translation>
</message>
<message>
<source>Block chain</source>
<translation>Blockkette</translation>
</message>
<message>
<source>Current number of blocks</source>
<translation>Aktuelle Anzahl Blöcke</translation>
</message>
<message>
<source>Received</source>
<translation>Empfangen</translation>
</message>
<message>
<source>Sent</source>
<translation>Übertragen</translation>
</message>
<message>
<source>&Peers</source>
<translation>&Gegenstellen</translation>
</message>
<message>
<source>Select a peer to view detailed information.</source>
<translation>Gegenstelle auswählen, um detaillierte Informationen zu erhalten.</translation>
</message>
<message>
<source>Direction</source>
<translation>Richtung</translation>
</message>
<message>
<source>Version</source>
<translation>Version</translation>
</message>
<message>
<source>User Agent</source>
<translation>User-Agent</translation>
</message>
<message>
<source>Services</source>
<translation>Dienste</translation>
</message>
<message>
<source>Starting Height</source>
<translation>Start-Höhe</translation>
</message>
<message>
<source>Sync Height</source>
<translation>Sync-Höhe</translation>
</message>
<message>
<source>Ban Score</source>
<translation>Sperrpunktzahl</translation>
</message>
<message>
<source>Connection Time</source>
<translation>Verbindungsdauer</translation>
</message>
<message>
<source>Last Send</source>
<translation>Letzte Übertragung</translation>
</message>
<message>
<source>Last Receive</source>
<translation>Letzter Empfang</translation>
</message>
<message>
<source>Bytes Sent</source>
<translation>Übertragene Byte</translation>
</message>
<message>
<source>Bytes Received</source>
<translation>Empfangene Byte</translation>
</message>
<message>
<source>Ping Time</source>
<translation>Pingzeit</translation>
</message>
<message>
<source>Last block time</source>
<translation>Letzte Blockzeit</translation>
</message>
<message>
<source>&Open</source>
<translation>&Öffnen</translation>
</message>
<message>
<source>&Console</source>
<translation>&Konsole</translation>
</message>
<message>
<source>&Network Traffic</source>
<translation>&Netzwerkauslastung</translation>
</message>
<message>
<source>&Clear</source>
<translation>&Zurücksetzen</translation>
</message>
<message>
<source>Totals</source>
<translation>Gesamtbetrag:</translation>
</message>
<message>
<source>In:</source>
<translation>eingehend:</translation>
</message>
<message>
<source>Out:</source>
<translation>ausgehend:</translation>
</message>
<message>
<source>Build date</source>
<translation>Erstellungsdatum</translation>
</message>
<message>
<source>Debug log file</source>
<translation>Debugprotokolldatei</translation>
</message>
<message>
<source>Open the Emercoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>Öffnet die Emercoin-Debugprotokolldatei aus dem aktuellen Datenverzeichnis. Dies kann bei großen Protokolldateien einige Sekunden dauern.</translation>
</message>
<message>
<source>Clear console</source>
<translation>Konsole zurücksetzen</translation>
</message>
<message>
<source>Welcome to the Emercoin RPC console.</source>
<translation>Willkommen in der Emercoin-RPC-Konsole.</translation>
</message>
<message>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>Pfeiltaste hoch und runter, um den Verlauf durchzublättern und <b>Strg-L</b>, um die Konsole zurückzusetzen.</translation>
</message>
<message>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>Bitte <b>help</b> eingeben, um eine Übersicht verfügbarer Befehle zu erhalten.</translation>
</message>
<message>
<source>%1 B</source>
<translation>%1 B</translation>
</message>
<message>
<source>%1 KB</source>
<translation>%1 KB</translation>
</message>
<message>
<source>%1 MB</source>
<translation>%1 MB</translation>
</message>
<message>
<source>%1 GB</source>
<translation>%1 GB</translation>
</message>
<message>
<source>via %1</source>
<translation>über %1</translation>
</message>
<message>
<source>never</source>
<translation>nie</translation>
</message>
<message>
<source>Inbound</source>
<translation>eingehend</translation>
</message>
<message>
<source>Outbound</source>
<translation>ausgehend</translation>
</message>
<message>
<source>Unknown</source>
<translation>Unbekannt</translation>
</message>
<message>
<source>Fetching...</source>
<translation>Aktualisiere...</translation>
</message>
</context>
<context>
<name>ReceiveCoinsDialog</name>
<message>
<source>&Amount:</source>
<translation>&Betrag:</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Bezeichnung:</translation>
</message>
<message>
<source>&Message:</source>
<translation>&Nachricht:</translation>
</message>
<message>
<source>Reuse one of the previously used receiving addresses. Reusing addresses has security and privacy issues. Do not use this unless re-generating a payment request made before.</source>
<translation>Eine der bereits verwendeten Empfangsadressen wiederverwenden. Addressen wiederzuverwenden birgt Sicherheits- und Datenschutzrisiken. Außer zum Neuerstellen einer bereits erzeugten Zahlungsanforderung sollten Sie dies nicht nutzen.</translation>
</message>
<message>
<source>R&euse an existing receiving address (not recommended)</source>
<translation>Vorhandene Empfangsadresse &wiederverwenden (nicht empfohlen)</translation>
</message>
<message>
<source>An optional message to attach to the payment request, which will be displayed when the request is opened. Note: The message will not be sent with the payment over the Emercoin network.</source>
<translation>Eine optionale Nachricht, die an die Zahlungsanforderung angehängt wird. Sie wird angezeigt, wenn die Anforderung geöffnet wird. Hinweis: Diese Nachricht wird nicht mit der Zahlung über das Emercoin-Netzwerk gesendet.</translation>
</message>
<message>
<source>An optional label to associate with the new receiving address.</source>
<translation>Eine optionale Bezeichnung, die der neuen Empfangsadresse zugeordnet wird.</translation>
</message>
<message>
<source>Use this form to request payments. All fields are <b>optional</b>.</source>
<translation>Verwenden Sie dieses Formular, um Zahlungen anzufordern. Alle Felder sind <b>optional</b>.</translation>
</message>
<message>
<source>An optional amount to request. Leave this empty or zero to not request a specific amount.</source>
<translation>Ein optional angeforderte Betrag. Lassen Sie dieses Feld leer oder setzen Sie es auf 0, um keinen spezifischen Betrag anzufordern.</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Alle Formularfelder zurücksetzen.</translation>
</message>
<message>
<source>Clear</source>
<translation>Zurücksetzen</translation>
</message>
<message>
<source>Requested payments history</source>
<translation>Verlauf der angeforderten Zahlungen</translation>
</message>
<message>
<source>&Request payment</source>
<translation>&Zahlung anfordern</translation>
</message>
<message>
<source>Show the selected request (does the same as double clicking an entry)</source>
<translation>Ausgewählte Zahlungsanforderungen anzeigen (entspricht einem Doppelklick auf einen Eintrag)</translation>
</message>
<message>
<source>Show</source>
<translation>Anzeigen</translation>
</message>
<message>
<source>Remove the selected entries from the list</source>
<translation>Ausgewählte Einträge aus der Liste entfernen</translation>
</message>
<message>
<source>Remove</source>
<translation>Entfernen</translation>
</message>
<message>
<source>Copy label</source>
<translation>Bezeichnung kopieren</translation>
</message>
<message>
<source>Copy message</source>
<translation>Nachricht kopieren</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Betrag kopieren</translation>
</message>
</context>
<context>
<name>ReceiveRequestDialog</name>
<message>
<source>QR Code</source>
<translation>QR-Code</translation>
</message>
<message>
<source>Copy &URI</source>
<translation>&URI kopieren</translation>
</message>
<message>
<source>Copy &Address</source>
<translation>&Addresse kopieren</translation>
</message>
<message>
<source>&Save Image...</source>
<translation>Grafik &speichern...</translation>
</message>
<message>
<source>Request payment to %1</source>
<translation>Zahlung anfordern an %1</translation>
</message>
<message>
<source>Payment information</source>
<translation>Zahlungsinformationen</translation>
</message>
<message>
<source>URI</source>
<translation>URI</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>Amount</source>
<translation>Betrag</translation>
</message>
<message>
<source>Label</source>
<translation>Bezeichnung</translation>
</message>
<message>
<source>Message</source>
<translation>Nachricht</translation>
</message>
<message>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>Resultierende URI ist zu lang, bitte den Text für Bezeichnung/Nachricht kürzen.</translation>
</message>
<message>
<source>Error encoding URI into QR Code.</source>
<translation>Beim Enkodieren der URI in den QR-Code ist ein Fehler aufgetreten.</translation>
</message>
</context>
<context>
<name>RecentRequestsTableModel</name>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Label</source>
<translation>Bezeichnung</translation>
</message>
<message>
<source>Message</source>
<translation>Nachricht</translation>
</message>
<message>
<source>Amount</source>
<translation>Betrag</translation>
</message>
<message>
<source>(no label)</source>
<translation>(keine Bezeichnung)</translation>
</message>
<message>
<source>(no message)</source>
<translation>(keine Nachricht)</translation>
</message>
<message>
<source>(no amount)</source>
<translation>(kein Betrag)</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<source>Send Coins</source>
<translation>Emercoins überweisen</translation>
</message>
<message>
<source>Coin Control Features</source>
<translation>"Coin Control"-Funktionen</translation>
</message>
<message>
<source>Inputs...</source>
<translation>Eingaben...</translation>
</message>
<message>
<source>automatically selected</source>
<translation>automatisch ausgewählt</translation>
</message>
<message>
<source>Insufficient funds!</source>
<translation>Unzureichender Kontostand!</translation>
</message>
<message>
<source>Quantity:</source>
<translation>Anzahl:</translation>
</message>
<message>
<source>Bytes:</source>
<translation>Byte:</translation>
</message>
<message>
<source>Amount:</source>
<translation>Betrag:</translation>
</message>
<message>
<source>Priority:</source>
<translation>Priorität:</translation>
</message>
<message>
<source>Fee:</source>
<translation>Gebühr:</translation>
</message>
<message>
<source>After Fee:</source>
<translation>Abzüglich Gebühr:</translation>
</message>
<message>
<source>Change:</source>
<translation>Wechselgeld:</translation>
</message>
<message>
<source>If this is activated, but the change address is empty or invalid, change will be sent to a newly generated address.</source>
<translation>Wenn dies aktivert, und die Wechselgeld-Adresse leer oder ungültig ist, wird das Wechselgeld einer neu erzeugten Adresse gutgeschrieben.</translation>
</message>
<message>
<source>Custom change address</source>
<translation>Benutzerdefinierte Wechselgeld-Adresse</translation>
</message>
<message>
<source>Transaction Fee:</source>
<translation>Transaktionsgebühr:</translation>
</message>
<message>
<source>Choose...</source>
<translation>Auswählen...</translation>
</message>
<message>
<source>collapse fee-settings</source>
<translation>Transaktionsgebühreneinstellungen ausblenden</translation>
</message>
<message>
<source>Minimize</source>
<translation>Minimieren</translation>
</message>
<message>
<source>If the custom fee is set to 1000 satoshis and the transaction is only 250 bytes, then "per kilobyte" only pays 250 satoshis in fee, while "at least" pays 1000 satoshis. For transactions bigger than a kilobyte both pay by kilobyte.</source>
<translation>Wenn die benutzerdefinierte Gebühr 1000 Satoshis beträgt und die Transaktion nur 250 Byte groß ist, wird bei Auswahl von "pro Kilobyte" eine Gebühr in Höhe von 250 Satoshis, bei Auswahl von "Mindestbetrag" eine Gebühr in Höhe von 1000 Satoshis bezahlt. Bei Transaktionen die Größer als ein Kilobyte sind, werden bei beiden Optionen die Gebühren pro Kilobyte bezahlt.</translation>
</message>
<message>
<source>per kilobyte</source>
<translation>pro Kilobyte</translation>
</message>
<message>
<source>If the custom fee is set to 1000 satoshis and the transaction is only 250 bytes, then "per kilobyte" only pays 250 satoshis in fee, while "total at least" pays 1000 satoshis. For transactions bigger than a kilobyte both pay by kilobyte.</source>
<translation>Wenn die benutzerdefinierte Gebühr 1000 Satoshis beträgt und die Transaktion nur 250 Byte groß ist, wird bei Auswahl von "pro Kilobyte" eine Gebühr in Höhe von 250 Satoshis, bei Auswahl von "Mindestbetrag" eine Gebühr in Höhe von 1000 Satoshis bezahlt. Bei Transaktionen die Größer als ein Kilobyte sind, werden bei beiden Optionen die Gebühren pro Kilobyte bezahlt.</translation>
</message>
<message>
<source>total at least</source>
<translation>Mindestbetrag</translation>
</message>
<message>
<source>Paying only the minimum fee is just fine as long as there is less transaction volume than space in the blocks. But be aware that this can end up in a never confirming transaction once there is more demand for emercoin transactions than the network can process.</source>
<translation>Nur die minimale Gebühr zu bezahlen ist so lange in Ordnung, wie weniger Transaktionsvolumen als Platz in den Blöcken vorhanden ist. Aber Vorsicht, diese Option kann dazu führen, dass Transaktionen nicht bestätigt werden, wenn mehr Bedarf an Emercoin-Transaktionen besteht als das Netzwerk verarbeiten kann.</translation>
</message>
<message>
<source>(read the tooltip)</source>
<translation>(den Hinweistext lesen)</translation>
</message>
<message>
<source>Recommended:</source>
<translation>Empfehlungen:</translation>
</message>
<message>
<source>Custom:</source>
<translation>Benutzerdefiniert:</translation>
</message>
<message>
<source>(Smart fee not initialized yet. This usually takes a few blocks...)</source>
<translation>(Intelligente Gebührenlogik ist noch nicht verfügbar. Normalerweise dauert dies einige Blöcke lang...)</translation>
</message>
<message>
<source>Confirmation time:</source>
<translation>Bestätigungszeit:</translation>
</message>
<message>
<source>normal</source>
<translation>normal</translation>
</message>
<message>
<source>fast</source>
<translation>schnell</translation>
</message>
<message>
<source>Send as zero-fee transaction if possible</source>
<translation>Wenn möglich als gebührenfreie Transaktion senden</translation>
</message>
<message>
<source>(confirmation may take longer)</source>
<translation>(Bestätigung kann länger dauern)</translation>
</message>
<message>
<source>Send to multiple recipients at once</source>
<translation>An mehrere Empfänger auf einmal überweisen</translation>
</message>
<message>
<source>Add &Recipient</source>
<translation>Empfänger &hinzufügen</translation>
</message>
<message>
<source>Clear all fields of the form.</source>
<translation>Alle Formularfelder zurücksetzen.</translation>
</message>
<message>
<source>Dust:</source>
<translation>"Dust":</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Zurücksetzen</translation>
</message>
<message>
<source>Balance:</source>
<translation>Kontostand:</translation>
</message>
<message>
<source>Confirm the send action</source>
<translation>Überweisung bestätigen</translation>
</message>
<message>
<source>S&end</source>
<translation>&Überweisen</translation>
</message>
<message>
<source>Confirm send coins</source>
<translation>Überweisung bestätigen</translation>
</message>
<message>
<source>%1 to %2</source>
<translation>%1 an %2</translation>
</message>
<message>
<source>Copy quantity</source>
<translation>Anzahl kopieren</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Betrag kopieren</translation>
</message>
<message>
<source>Copy fee</source>
<translation>Gebühr kopieren</translation>
</message>
<message>
<source>Copy after fee</source>
<translation>Abzüglich Gebühr kopieren</translation>
</message>
<message>
<source>Copy bytes</source>
<translation>Byte kopieren</translation>
</message>
<message>
<source>Copy priority</source>
<translation>Priorität kopieren</translation>
</message>
<message>
<source>Copy change</source>
<translation>Wechselgeld kopieren</translation>
</message>
<message>
<source>Total Amount %1 (= %2)</source>
<translation>Gesamtbetrag %1 (= %2)</translation>
</message>
<message>
<source>or</source>
<translation>oder</translation>
</message>
<message>
<source>The recipient address is not valid, please recheck.</source>
<translation>Die Zahlungsadresse ist ungültig, bitte nochmals überprüfen.</translation>
</message>
<message>
<source>The amount to pay must be larger than 0.</source>
<translation>Der zu zahlende Betrag muss größer als 0 sein.</translation>
</message>
<message>
<source>The amount exceeds your balance.</source>
<translation>Der angegebene Betrag übersteigt Ihren Kontostand.</translation>
</message>
<message>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>Der angegebene Betrag übersteigt aufgrund der Transaktionsgebühr in Höhe von %1 Ihren Kontostand.</translation>
</message>
<message>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>Doppelte Zahlungsadresse gefunden, pro Überweisung kann an jede Adresse nur einmalig etwas überwiesen werden.</translation>
</message>
<message>
<source>Transaction creation failed!</source>
<translation>Transaktionserstellung fehlgeschlagen!</translation>
</message>
<message>
<source>The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>Die Transaktion wurde abgelehnt! Dies kann passieren, wenn einige Emercoins aus Ihrer Wallet bereits ausgegeben wurden. Beispielsweise weil Sie eine Kopie Ihrer wallet.dat genutzt, die Emercoins dort ausgegeben haben und dies daher in der derzeit aktiven Wallet nicht vermerkt ist.</translation>
</message>
<message>
<source>A fee higher than %1 is considered an insanely high fee.</source>
<translation>Eine höhere Gebühr als %1 wird als unsinnig hohe Gebühr angesehen.</translation>
</message>
<message>
<source>Pay only the minimum fee of %1</source>
<translation>Nur die minimale Gebühr in Höhe von %1 zahlen</translation>
</message>
<message>
<source>Estimated to begin confirmation within %1 block(s).</source>
<translation>Voraussichtlicher Beginn der Bestätigung innerhalb von %1 Blöcken.</translation>
</message>
<message>
<source>Warning: Invalid Emercoin address</source>
<translation>Warnung: Ungültige Emercoin-Adresse</translation>
</message>
<message>
<source>(no label)</source>
<translation>(keine Bezeichnung)</translation>
</message>
<message>
<source>Warning: Unknown change address</source>
<translation>Warnung: Unbekannte Wechselgeld-Adresse</translation>
</message>
<message>
<source>Copy dust</source>
<translation>"Dust" kopieren</translation>
</message>
<message>
<source>Are you sure you want to send?</source>
<translation>Wollen Sie die Überweisung ausführen?</translation>
</message>
<message>
<source>added as transaction fee</source>
<translation>als Transaktionsgebühr hinzugefügt</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<source>A&mount:</source>
<translation>Betra&g:</translation>
</message>
<message>
<source>Pay &To:</source>
<translation>E&mpfänger:</translation>
</message>
<message>
<source>Enter a label for this address to add it to your address book</source>
<translation>Adressbezeichnung eingeben (diese wird zusammen mit der Adresse dem Adressbuch hinzugefügt)</translation>
</message>
<message>
<source>&Label:</source>
<translation>&Bezeichnung:</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Bereits verwendete Adresse auswählen</translation>
</message>
<message>
<source>This is a normal payment.</source>
<translation>Dies ist eine normale Überweisung.</translation>
</message>
<message>
<source>The Emercoin address to send the payment to</source>
<translation>Die Zahlungsadresse der Überweisung</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Adresse aus der Zwischenablage einfügen</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Remove this entry</source>
<translation>Diesen Eintrag entfernen</translation>
</message>
<message>
<source>Message:</source>
<translation>Nachricht:</translation>
</message>
<message>
<source>This is a verified payment request.</source>
<translation>Dies is eine verifizierte Zahlungsanforderung.</translation>
</message>
<message>
<source>Enter a label for this address to add it to the list of used addresses</source>
<translation>Adressbezeichnung eingeben, die dann zusammen mit der Adresse der Liste bereits verwendeter Adressen hinzugefügt wird.</translation>
</message>
<message>
<source>A message that was attached to the emercoin: URI which will be stored with the transaction for your reference. Note: This message will not be sent over the Emercoin network.</source>
<translation>Eine an die "emercoin:"-URI angefügte Nachricht, die zusammen mit der Transaktion gespeichert wird. Hinweis: Diese Nachricht wird nicht über das Emercoin-Netzwerk gesendet.</translation>
</message>
<message>
<source>This is an unverified payment request.</source>
<translation>Dies is eine unverifizierte Zahlungsanforderung.</translation>
</message>
<message>
<source>Pay To:</source>
<translation>Empfänger:</translation>
</message>
<message>
<source>Memo:</source>
<translation>Memo:</translation>
</message>
</context>
<context>
<name>ShutdownWindow</name>
<message>
<source>Emercoin Core is shutting down...</source>
<translation>Emercoin Core wird beendet...</translation>
</message>
<message>
<source>Do not shut down the computer until this window disappears.</source>
<translation>Fahren Sie den Computer nicht herunter, bevor dieses Fenster verschwindet.</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<source>Signatures - Sign / Verify a Message</source>
<translation>Signaturen - eine Nachricht signieren / verifizieren</translation>
</message>
<message>
<source>&Sign Message</source>
<translation>Nachricht &signieren</translation>
</message>
<message>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>Sie können Nachrichten mit Ihren Adressen signieren, um den Besitz dieser Adressen zu beweisen. Bitte nutzen Sie diese Funktion mit Vorsicht und nehmen Sie sich vor Phishingangriffen in Acht. Signieren Sie nur Nachrichten, mit denen Sie vollständig einverstanden sind.</translation>
</message>
<message>
<source>The Emercoin address to sign the message with</source>
<translation>Die Emercoin-Adresse mit der die Nachricht signiert wird</translation>
</message>
<message>
<source>Choose previously used address</source>
<translation>Bereits verwendete Adresse auswählen</translation>
</message>
<message>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<source>Paste address from clipboard</source>
<translation>Adresse aus der Zwischenablage einfügen</translation>
</message>
<message>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<source>Enter the message you want to sign here</source>
<translation>Zu signierende Nachricht hier eingeben</translation>
</message>
<message>
<source>Signature</source>
<translation>Signatur</translation>
</message>
<message>
<source>Copy the current signature to the system clipboard</source>
<translation>Aktuelle Signatur in die Zwischenablage kopieren</translation>
</message>
<message>
<source>Sign the message to prove you own this Emercoin address</source>
<translation>Die Nachricht signieren, um den Besitz dieser Emercoin-Adresse zu beweisen</translation>
</message>
<message>
<source>Sign &Message</source>
<translation>&Nachricht signieren</translation>
</message>
<message>
<source>Reset all sign message fields</source>
<translation>Alle "Nachricht signieren"-Felder zurücksetzen</translation>
</message>
<message>
<source>Clear &All</source>
<translation>&Zurücksetzen</translation>
</message>
<message>
<source>&Verify Message</source>
<translation>Nachricht &verifizieren</translation>
</message>
<message>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>Geben Sie die signierende Adresse, Nachricht (achten Sie darauf Zeilenumbrüche, Leerzeichen, Tabulatoren usw. exakt zu kopieren) und Signatur unten ein, um die Nachricht zu verifizieren. Vorsicht, interpretieren Sie nicht mehr in die Signatur hinein, als in der signierten Nachricht selber enthalten ist, um nicht von einem Man-in-the-middle-Angriff hinters Licht geführt zu werden.</translation>
</message>
<message>
<source>The Emercoin address the message was signed with</source>
<translation>Die Emercoin-Adresse mit der die Nachricht signiert wurde</translation>
</message>
<message>
<source>Verify the message to ensure it was signed with the specified Emercoin address</source>
<translation>Die Nachricht verifizieren, um sicherzustellen, dass diese mit der angegebenen Emercoin-Adresse signiert wurde</translation>
</message>
<message>
<source>Verify &Message</source>
<translation>&Nachricht verifizieren</translation>
</message>
<message>
<source>Reset all verify message fields</source>
<translation>Alle "Nachricht verifizieren"-Felder zurücksetzen</translation>
</message>
<message>
<source>Click "Sign Message" to generate signature</source>
<translation>Auf "Nachricht signieren" klicken, um die Signatur zu erzeugen</translation>
</message>
<message>
<source>The entered address is invalid.</source>
<translation>Die eingegebene Adresse ist ungültig.</translation>
</message>
<message>
<source>Please check the address and try again.</source>
<translation>Bitte überprüfen Sie die Adresse und versuchen Sie es erneut.</translation>
</message>
<message>
<source>The entered address does not refer to a key.</source>
<translation>Die eingegebene Adresse verweist nicht auf einen Schlüssel.</translation>
</message>
<message>
<source>Wallet unlock was cancelled.</source>
<translation>Wallet-Entsperrung wurde abgebrochen.</translation>
</message>
<message>
<source>Private key for the entered address is not available.</source>
<translation>Privater Schlüssel zur eingegebenen Adresse ist nicht verfügbar.</translation>
</message>
<message>
<source>Message signing failed.</source>
<translation>Signierung der Nachricht fehlgeschlagen.</translation>
</message>
<message>
<source>Message signed.</source>
<translation>Nachricht signiert.</translation>
</message>
<message>
<source>The signature could not be decoded.</source>
<translation>Die Signatur konnte nicht dekodiert werden.</translation>
</message>
<message>
<source>Please check the signature and try again.</source>
<translation>Bitte überprüfen Sie die Signatur und versuchen Sie es erneut.</translation>
</message>
<message>
<source>The signature did not match the message digest.</source>
<translation>Die Signatur entspricht nicht dem "Message Digest".</translation>
</message>
<message>
<source>Message verification failed.</source>
<translation>Verifikation der Nachricht fehlgeschlagen.</translation>
</message>
<message>
<source>Message verified.</source>
<translation>Nachricht verifiziert.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<source>Emercoin Core</source>
<translation>Emercoin Core</translation>
</message>
<message>
<source>The Emercoin Core developers</source>
<translation>Die "Emercoin Core"-Entwickler</translation>
</message>
<message>
<source>[testnet]</source>
<translation>[Testnetz]</translation>
</message>
</context>
<context>
<name>TrafficGraphWidget</name>
<message>
<source>KB/s</source>
<translation>KB/s</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<source>Open until %1</source>
<translation>Offen bis %1</translation>
</message>
<message>
<source>conflicted</source>
<translation>in Konflikt stehend</translation>
</message>
<message>
<source>%1/offline</source>
<translation>%1/offline</translation>
</message>
<message>
<source>%1/unconfirmed</source>
<translation>%1/unbestätigt</translation>
</message>
<message>
<source>%1 confirmations</source>
<translation>%1 Bestätigungen</translation>
</message>
<message>
<source>Status</source>
<translation>Status</translation>
</message>
<message numerus="yes">
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, über %n Knoten übertragen</numerusform><numerusform>, über %n Knoten übertragen</numerusform></translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Source</source>
<translation>Quelle</translation>
</message>
<message>
<source>Generated</source>
<translation>Erzeugt</translation>
</message>
<message>
<source>From</source>
<translation>Von</translation>
</message>
<message>
<source>To</source>
<translation>An</translation>
</message>
<message>
<source>own address</source>
<translation>eigene Adresse</translation>
</message>
<message>
<source>watch-only</source>
<translation>beobachtet</translation>
</message>
<message>
<source>label</source>
<translation>Bezeichnung</translation>
</message>
<message>
<source>Credit</source>
<translation>Gutschrift</translation>
</message>
<message numerus="yes">
<source>matures in %n more block(s)</source>
<translation><numerusform>reift noch %n weiteren Block</numerusform><numerusform>reift noch %n weitere Blöcke</numerusform></translation>
</message>
<message>
<source>not accepted</source>
<translation>nicht angenommen</translation>
</message>
<message>
<source>Debit</source>
<translation>Belastung</translation>
</message>
<message>
<source>Total debit</source>
<translation>Gesamtbelastung</translation>
</message>
<message>
<source>Total credit</source>
<translation>Gesamtgutschrift</translation>
</message>
<message>
<source>Transaction fee</source>
<translation>Transaktionsgebühr</translation>
</message>
<message>
<source>Net amount</source>
<translation>Nettobetrag</translation>
</message>
<message>
<source>Message</source>
<translation>Nachricht</translation>
</message>
<message>
<source>Comment</source>
<translation>Kommentar</translation>
</message>
<message>
<source>Transaction ID</source>
<translation>Transaktions-ID</translation>
</message>
<message>
<source>Merchant</source>
<translation>Händler</translation>
</message>
<message>
<source>Generated coins must mature %1 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>Erzeugte Emercoins müssen %1 Blöcke lang reifen, bevor sie ausgegeben werden können. Als Sie diesen Block erzeugten, wurde er an das Netzwerk übertragen, um ihn der Blockkette hinzuzufügen. Falls dies fehlschlägt wird der Status in "nicht angenommen" geändert und Sie werden keine Emercoins gutgeschrieben bekommen. Das kann gelegentlich passieren, wenn ein anderer Knoten einen Block fast zeitgleich erzeugt.</translation>
</message>
<message>
<source>Debug information</source>
<translation>Debuginformationen</translation>
</message>
<message>
<source>Transaction</source>
<translation>Transaktion</translation>
</message>
<message>
<source>Inputs</source>
<translation>Eingaben</translation>
</message>
<message>
<source>Amount</source>
<translation>Betrag</translation>
</message>
<message>
<source>true</source>
<translation>wahr</translation>
</message>
<message>
<source>false</source>
<translation>falsch</translation>
</message>
<message>
<source>, has not been successfully broadcast yet</source>
<translation>, wurde noch nicht erfolgreich übertragen</translation>
</message>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>Offen für %n weiteren Block</numerusform><numerusform>Offen für %n weitere Blöcke</numerusform></translation>
</message>
<message>
<source>unknown</source>
<translation>unbekannt</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<source>Transaction details</source>
<translation>Transaktionsdetails</translation>
</message>
<message>
<source>This pane shows a detailed description of the transaction</source>
<translation>Dieser Bereich zeigt eine detaillierte Beschreibung der Transaktion an</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>Immature (%1 confirmations, will be available after %2)</source>
<translation>Unreif (%1 Bestätigungen, wird verfügbar sein nach %2)</translation>
</message>
<message numerus="yes">
<source>Open for %n more block(s)</source>
<translation><numerusform>Offen für %n weiteren Block</numerusform><numerusform>Offen für %n weitere Blöcke</numerusform></translation>
</message>
<message>
<source>Open until %1</source>
<translation>Offen bis %1</translation>
</message>
<message>
<source>Confirmed (%1 confirmations)</source>
<translation>Bestätigt (%1 Bestätigungen)</translation>
</message>
<message>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>Dieser Block wurde von keinem anderen Knoten empfangen und wird wahrscheinlich nicht angenommen werden!</translation>
</message>
<message>
<source>Generated but not accepted</source>
<translation>Erzeugt, jedoch nicht angenommen</translation>
</message>
<message>
<source>Offline</source>
<translation>Offline</translation>
</message>
<message>
<source>Unconfirmed</source>
<translation>Unbestätigt</translation>
</message>
<message>
<source>Confirming (%1 of %2 recommended confirmations)</source>
<translation>Wird bestätigt (%1 von %2 empfohlenen Bestätigungen)</translation>
</message>
<message>
<source>Conflicted</source>
<translation>in Konflikt stehend</translation>
</message>
<message>
<source>Received with</source>
<translation>Empfangen über</translation>
</message>
<message>
<source>Received from</source>
<translation>Empfangen von</translation>
</message>
<message>
<source>Sent to</source>
<translation>Überwiesen an</translation>
</message>
<message>
<source>Payment to yourself</source>
<translation>Eigenüberweisung</translation>
</message>
<message>
<source>Mined</source>
<translation>Erarbeitet</translation>
</message>
<message>
<source>watch-only</source>
<translation>beobachtet</translation>
</message>
<message>
<source>(n/a)</source>
<translation>(k.A.)</translation>
</message>
<message>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>Transaktionsstatus, fahren Sie mit der Maus über dieses Feld, um die Anzahl der Bestätigungen zu sehen.</translation>
</message>
<message>
<source>Date and time that the transaction was received.</source>
<translation>Datum und Uhrzeit zu der die Transaktion empfangen wurde.</translation>
</message>
<message>
<source>Type of transaction.</source>
<translation>Art der Transaktion</translation>
</message>
<message>
<source>Whether or not a watch-only address is involved in this transaction.</source>
<translation>Zeigt an, ob eine beobachtete Adresse in diese Transaktion involviert ist.</translation>
</message>
<message>
<source>Destination address of transaction.</source>
<translation>Zieladresse der Transaktion.</translation>
</message>
<message>
<source>Amount removed from or added to balance.</source>
<translation>Der Betrag, der dem Kontostand abgezogen oder hinzugefügt wurde.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<source>All</source>
<translation>Alle</translation>
</message>
<message>
<source>Today</source>
<translation>Heute</translation>
</message>
<message>
<source>This week</source>
<translation>Diese Woche</translation>
</message>
<message>
<source>This month</source>
<translation>Diesen Monat</translation>
</message>
<message>
<source>Last month</source>
<translation>Letzten Monat</translation>
</message>
<message>
<source>This year</source>
<translation>Dieses Jahr</translation>
</message>
<message>
<source>Range...</source>
<translation>Zeitraum</translation>
</message>
<message>
<source>Received with</source>
<translation>Empfangen über</translation>
</message>
<message>
<source>Sent to</source>
<translation>Überwiesen an</translation>
</message>
<message>
<source>To yourself</source>
<translation>Eigenüberweisung</translation>
</message>
<message>
<source>Mined</source>
<translation>Erarbeitet</translation>
</message>
<message>
<source>Other</source>
<translation>Andere</translation>
</message>
<message>
<source>Enter address or label to search</source>
<translation>Zu suchende Adresse oder Bezeichnung eingeben</translation>
</message>
<message>
<source>Min amount</source>
<translation>Minimaler Betrag</translation>
</message>
<message>
<source>Copy address</source>
<translation>Adresse kopieren</translation>
</message>
<message>
<source>Copy label</source>
<translation>Bezeichnung kopieren</translation>
</message>
<message>
<source>Copy amount</source>
<translation>Betrag kopieren</translation>
</message>
<message>
<source>Copy transaction ID</source>
<translation>Transaktions-ID kopieren</translation>
</message>
<message>
<source>Edit label</source>
<translation>Bezeichnung bearbeiten</translation>
</message>
<message>
<source>Show transaction details</source>
<translation>Transaktionsdetails anzeigen</translation>
</message>
<message>
<source>Export Transaction History</source>
<translation>Transaktionsverlauf exportieren</translation>
</message>
<message>
<source>Watch-only</source>
<translation>Beobachtet</translation>
</message>
<message>
<source>Exporting Failed</source>
<translation>Exportieren fehlgeschlagen</translation>
</message>
<message>
<source>There was an error trying to save the transaction history to %1.</source>
<translation>Beim Speichern des Transaktionsverlaufs nach %1 ist ein Fehler aufgetreten.</translation>
</message>
<message>
<source>Exporting Successful</source>
<translation>Exportieren erfolgreich</translation>
</message>
<message>
<source>The transaction history was successfully saved to %1.</source>
<translation>Speichern des Transaktionsverlaufs nach %1 war erfolgreich.</translation>
</message>
<message>
<source>Comma separated file (*.csv)</source>
<translation>Kommagetrennte-Datei (*.csv)</translation>
</message>
<message>
<source>Confirmed</source>
<translation>Bestätigt</translation>
</message>
<message>
<source>Date</source>
<translation>Datum</translation>
</message>
<message>
<source>Type</source>
<translation>Typ</translation>
</message>
<message>
<source>Label</source>
<translation>Bezeichnung</translation>
</message>
<message>
<source>Address</source>
<translation>Adresse</translation>
</message>
<message>
<source>ID</source>
<translation>ID</translation>
</message>
<message>
<source>Range:</source>
<translation>Zeitraum:</translation>
</message>
<message>
<source>to</source>
<translation>bis</translation>
</message>
</context>
<context>
<name>UnitDisplayStatusBarControl</name>
<message>
<source>Unit to show amounts in. Click to select another unit.</source>
<translation>Die Einheit in der Beträge angezeigt werden. Klicken, um eine andere Einheit auszuwählen.</translation>
</message>
</context>
<context>
<name>WalletFrame</name>
<message>
<source>No wallet has been loaded.</source>
<translation>Es wurde keine Wallet geladen.</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<source>Send Coins</source>
<translation>Emercoins überweisen</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<source>&Export</source>
<translation>E&xportieren</translation>
</message>
<message>
<source>Export the data in the current tab to a file</source>
<translation>Daten der aktuellen Ansicht in eine Datei exportieren</translation>
</message>
<message>
<source>Backup Wallet</source>
<translation>Wallet sichern</translation>
</message>
<message>
<source>Wallet Data (*.dat)</source>
<translation>Wallet-Daten (*.dat)</translation>
</message>
<message>
<source>Backup Failed</source>
<translation>Sicherung fehlgeschlagen</translation>
</message>
<message>
<source>There was an error trying to save the wallet data to %1.</source>
<translation>Beim Speichern der Wallet-Daten nach %1 ist ein Fehler aufgetreten.</translation>
</message>
<message>
<source>The wallet data was successfully saved to %1.</source>
<translation>Speichern der Wallet-Daten nach %1 war erfolgreich.</translation>
</message>
<message>
<source>Backup Successful</source>
<translation>Sicherung erfolgreich</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<source>Options:</source>
<translation>Optionen:</translation>
</message>
<message>
<source>Specify data directory</source>
<translation>Datenverzeichnis festlegen</translation>
</message>
<message>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>Mit dem angegebenen Knoten verbinden, um Adressen von Gegenstellen abzufragen, danach trennen</translation>
</message>
<message>
<source>Specify your own public address</source>
<translation>Die eigene öffentliche Adresse angeben</translation>
</message>
<message>
<source>Accept command line and JSON-RPC commands</source>
<translation>Kommandozeilen- und JSON-RPC-Befehle annehmen</translation>
</message>
<message>
<source>Run in the background as a daemon and accept commands</source>
<translation>Als Hintergrunddienst ausführen und Befehle annehmen</translation>
</message>
<message>
<source>Use the test network</source>
<translation>Das Testnetz verwenden</translation>
</message>
<message>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>Eingehende Verbindungen annehmen (Standard: 1, wenn nicht -proxy oder -connect)</translation>
</message>
<message>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>An die angegebene Adresse binden und immer abhören. Für IPv6 "[Host]:Port"-Notation verwenden</translation>
</message>
<message>
<source>Delete all wallet transactions and only recover those parts of the blockchain through -rescan on startup</source>
<translation>Alle Wallet-Transaktionen löschen und nur diese Teilbereiche der Blockkette durch -rescan beim Starten wiederherstellen</translation>
</message>
<message>
<source>Distributed under the MIT software license, see the accompanying file COPYING or <http://www.opensource.org/licenses/mit-license.php>.</source>
<translation>Veröffentlicht unter der MIT-Softwarelizenz, siehe beiligende Datei COPYING oder <http://www.opensource.org/licenses/mit-license.php>.</translation>
</message>
<message>
<source>Enter regression test mode, which uses a special chain in which blocks can be solved instantly.</source>
<translation>Regressionstest-Modus aktivieren, der eine spezielle Blockkette nutzt, in der Blöcke sofort gelöst werden können.</translation>
</message>
<message>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>Befehl ausführen wenn sich eine Wallet-Transaktion verändert (%s im Befehl wird durch die Transaktions-ID ersetzt)</translation>
</message>
<message>
<source>In this mode -genproclimit controls how many blocks are generated immediately.</source>
<translation>In diesem Modus legt -genproclimit fest, wie viele Blöcke sofort erzeugt werden.</translation>
</message>
<message>
<source>Set the number of script verification threads (%u to %d, 0 = auto, <0 = leave that many cores free, default: %d)</source>
<translation>Maximale Anzahl an Skript-Verifizierungs-Threads festlegen (%u bis %d, 0 = automatisch, <0 = so viele Kerne frei lassen, Standard: %d)</translation>
</message>
<message>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>Dies ist eine Vorab-Testversion - Verwendung auf eigene Gefahr - nicht für Mining- oder Handelsanwendungen nutzen!</translation>
</message>
<message>
<source>Unable to bind to %s on this computer. Emercoin Core is probably already running.</source>
<translation>Kann auf diesem Computer nicht an %s binden, da Emercoin Core wahrscheinlich bereits gestartet wurde.</translation>
</message>
<message>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>Warnung: -paytxfee ist auf einen sehr hohen Wert festgelegt! Dies ist die Gebühr die beim Senden einer Transaktion fällig wird.</translation>
</message>
<message>
<source>Warning: The network does not appear to fully agree! Some miners appear to be experiencing issues.</source>
<translation>Warnung: Das Netzwerk scheint nicht vollständig übereinzustimmen! Einige Miner scheinen Probleme zu haben.</translation>
</message>
<message>
<source>Warning: We do not appear to fully agree with our peers! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>Warnung: Wir scheinen nicht vollständig mit unseren Gegenstellen übereinzustimmen! Sie oder die anderen Knoten müssen unter Umständen Ihre Client-Software aktualisieren.</translation>
</message>
<message>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>Warnung: Lesen von wallet.dat fehlgeschlagen! Alle Schlüssel wurden korrekt gelesen, Transaktionsdaten bzw. Adressbucheinträge fehlen aber möglicherweise oder sind inkorrekt.</translation>
</message>
<message>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>Warnung: wallet.dat beschädigt, Datenrettung erfolgreich! Original wallet.dat wurde als wallet.{Zeitstempel}.dat in %s gespeichert. Falls Ihr Kontostand oder Transaktionen nicht korrekt sind, sollten Sie von einer Datensicherung wiederherstellen.</translation>
</message>
<message>
<source>Whitelist peers connecting from the given netmask or IP address. Can be specified multiple times.</source>
<translation>Gegenstellen die sich von der angegebenen Netzmaske oder IP-Adresse aus verbinden immer zulassen. Kann mehrmals angegeben werden.</translation>
</message>
<message>
<source>(default: 1)</source>
<translation>(Standard: 1)</translation>
</message>
<message>
<source><category> can be:</source>
<translation><category> kann sein:</translation>
</message>
<message>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>Versuchen, private Schlüssel aus einer beschädigten wallet.dat wiederherzustellen</translation>
</message>
<message>
<source>Block creation options:</source>
<translation>Blockerzeugungsoptionen:</translation>
</message>
<message>
<source>Connect only to the specified node(s)</source>
<translation>Mit nur dem oder den angegebenen Knoten verbinden</translation>
</message>
<message>
<source>Connection options:</source>
<translation>Verbindungsoptionen:</translation>
</message>
<message>
<source>Corrupted block database detected</source>
<translation>Beschädigte Blockdatenbank erkannt</translation>
</message>
<message>
<source>Debugging/Testing options:</source>
<translation>Debugging-/Testoptionen:</translation>
</message>
<message>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>Eigene IP-Adresse erkennen (Standard: 1, wenn abgehört wird und nicht -externalip)</translation>
</message>
<message>
<source>Do not load the wallet and disable wallet RPC calls</source>
<translation>Die Wallet nicht laden und Wallet-RPC-Aufrufe deaktivieren</translation>
</message>
<message>
<source>Do you want to rebuild the block database now?</source>
<translation>Möchten Sie die Blockdatenbank jetzt neu aufbauen?</translation>
</message>
<message>
<source>Error initializing block database</source>
<translation>Fehler beim Initialisieren der Blockdatenbank</translation>
</message>
<message>
<source>Error initializing wallet database environment %s!</source>
<translation>Fehler beim Initialisieren der Wallet-Datenbankumgebung %s!</translation>
</message>
<message>
<source>Error loading block database</source>
<translation>Fehler beim Laden der Blockdatenbank</translation>
</message>
<message>
<source>Error opening block database</source>
<translation>Fehler beim Öffnen der Blockdatenbank</translation>
</message>
<message>
<source>Error: A fatal internal error occured, see debug.log for details</source>
<translation>Fehler: Ein schwerer Fehler ist aufgetreten, für Details debug.log ansehen.</translation>
</message>
<message>
<source>Error: Disk space is low!</source>
<translation>Fehler: Zu wenig freier Speicherplatz auf dem Datenträger!</translation>
</message>
<message>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>Fehler, es konnte kein Port abgehört werden. Wenn dies so gewünscht wird -listen=0 verwenden.</translation>
</message>
<message>
<source>If <category> is not supplied, output all debugging information.</source>
<translation>Wenn <category> nicht angegeben wird, jegliche Debugginginformationen ausgeben.</translation>
</message>
<message>
<source>Importing...</source>
<translation>Importiere...</translation>
</message>
<message>
<source>Incorrect or no genesis block found. Wrong datadir for network?</source>
<translation>Fehlerhafter oder kein Genesis-Block gefunden. Falsches Datenverzeichnis für das Netzwerk?</translation>
</message>
<message>
<source>Invalid -onion address: '%s'</source>
<translation>Ungültige "-onion"-Adresse: '%s'</translation>
</message>
<message>
<source>Not enough file descriptors available.</source>
<translation>Nicht genügend Datei-Deskriptoren verfügbar.</translation>
</message>
<message>
<source>Only connect to nodes in network <net> (ipv4, ipv6 or onion)</source>
<translation>Nur zu Knoten des Netzwerktyps <net> verbinden (ipv4, ipv6 oder onion)</translation>
</message>
<message>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>Blockkettenindex aus aktuellen Dateien blk000??.dat wiederaufbauen</translation>
</message>
<message>
<source>Set database cache size in megabytes (%d to %d, default: %d)</source>
<translation>Größe des Datenbankcaches in Megabyte festlegen (%d bis %d, Standard: %d)</translation>
</message>
<message>
<source>Set maximum block size in bytes (default: %d)</source>
<translation>Maximale Blockgröße in Byte festlegen (Standard: %d)</translation>
</message>
<message>
<source>Specify wallet file (within data directory)</source>
<translation>Wallet-Datei angeben (innerhalb des Datenverzeichnisses)</translation>
</message>
<message>
<source>This is intended for regression testing tools and app development.</source>
<translation>Dies ist für Regressionstest-Tools und Anwendungsentwicklung gedacht.</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: %u)</source>
<translation>UPnP verwenden, um eine Portweiterleitung einzurichten (Standard: %u)</translation>
</message>
<message>
<source>Verifying blocks...</source>
<translation>Verifiziere Blöcke...</translation>
</message>
<message>
<source>Verifying wallet...</source>
<translation>Verifiziere Wallet...</translation>
</message> | <message>
<source>Wallet %s resides outside data directory %s</source>
<translation>Wallet %s liegt außerhalb des Datenverzeichnisses %s</translation>
</message>
<message>
<source>Wallet options:</source>
<translation>Wallet-Optionen:</translation>
</message>
<message>
<source>You need to rebuild the database using -reindex to change -txindex</source>
<translation>Sie müssen die Datenbank mit Hilfe von -reindex neu aufbauen, um -txindex zu verändern</translation>
</message>
<message>
<source>Imports blocks from external blk000??.dat file</source>
<translation>Blöcke aus externer Datei blk000??.dat importieren</translation>
</message>
<message>
<source>Allow JSON-RPC connections from specified source. Valid for <ip> are a single IP (e.g. 1.2.3.4), a network/netmask (e.g. 1.2.3.4/255.255.255.0) or a network/CIDR (e.g. 1.2.3.4/24). This option can be specified multiple times</source>
<translation>JSON-RPC-Verbindungen von der angegeben Quelle erlauben. Gültig für <ip> ist eine einzelne IP-Adresse (z.B. 1.2.3.4), ein Netzwerk bzw. eine Netzmaske (z.B. 1.2.3.4/255.255.255.0), oder die CIDR-Notation (z.B. 1.2.3.4/24). Kann mehrmals angegeben werden.</translation>
</message>
<message>
<source>An error occurred while setting up the RPC address %s port %u for listening: %s</source>
<translation>Beim Einrichten der abzuhörenden RPC-Adresse %s auf Port %u ist ein Fehler aufgetreten: %s</translation>
</message>
<message>
<source>Bind to given address and whitelist peers connecting to it. Use [host]:port notation for IPv6</source>
<translation>An die angegebene Adresse binden und Gegenstellen, die sich dorthin verbinden, immer zulassen. Für IPv6 "[Host]:Port"-Notation verwenden</translation>
</message>
<message>
<source>Bind to given address to listen for JSON-RPC connections. Use [host]:port notation for IPv6. This option can be specified multiple times (default: bind to all interfaces)</source>
<translation>An die angegebene Adresse binden und nach eingehenden JSON-RPC-Verbindungen abhören. Für IPv6 "[Host]:Port"-Notation verwenden. Kann mehrmals angegeben werden. (Standard: an alle Schnittstellen binden)</translation>
</message>
<message>
<source>Cannot obtain a lock on data directory %s. Emercoin Core is probably already running.</source>
<translation>Datenverzeichnis %s kann nicht gesperrt werden, da Emercoin Core wahrscheinlich bereits gestartet wurde.</translation>
</message>
<message>
<source>Continuously rate-limit free transactions to <n>*1000 bytes per minute (default:%u)</source>
<translation>Anzahl der freien Transaktionen auf <n> * 1000 Byte pro Minute begrenzen (Standard: %u)</translation>
</message>
<message>
<source>Create new files with system default permissions, instead of umask 077 (only effective with disabled wallet functionality)</source>
<translation>Neue Dateien mit Standard-Systemrechten erzeugen, anstatt mit umask 077 (nur mit deaktivierter Walletfunktion nutzbar)</translation>
</message>
<message>
<source>Error: Listening for incoming connections failed (listen returned error %s)</source>
<translation>Fehler: Abhören nach eingehenden Verbindungen fehlgeschlagen (listen meldete Fehler %s)</translation>
</message>
<message>
<source>Error: Unsupported argument -socks found. Setting SOCKS version isn't possible anymore, only SOCKS5 proxies are supported.</source>
<translation>Fehler: Nicht unterstütztes Argument -socks gefunden. Das Festlegen der SOCKS-Version ist nicht mehr möglich, nur noch SOCKS5-Proxies werden unterstützt.</translation>
</message>
<message>
<source>Execute command when a relevant alert is received or we see a really long fork (%s in cmd is replaced by message)</source>
<translation>Befehl ausführen wenn ein relevanter Alarm empfangen wird oder wir einen wirklich langen Fork entdecken (%s im Befehl wird durch die Nachricht ersetzt)</translation>
</message>
<message>
<source>Fees (in EMC/Kb) smaller than this are considered zero fee for relaying (default: %s)</source>
<translation>Niedrigere Gebühren (in EMC/Kb) als diese werden bei der Weiterleitung als gebührenfrei angesehen (Standard: %s)</translation>
</message>
<message>
<source>Fees (in EMC/Kb) smaller than this are considered zero fee for transaction creation (default: %s)</source>
<translation>Niedrigere Gebühren (in EMC/Kb) als diese werden bei der Transaktionserstellung als gebührenfrei angesehen (Standard: %s)</translation>
</message>
<message>
<source>If paytxfee is not set, include enough fee so transactions begin confirmation on average within n blocks (default: %u)</source>
<translation>Wenn -paytxfee nicht festgelegt wurde Gebühren einschließen, so dass mit der Bestätigung von Transaktionen im Schnitt innerhalb von n Blöcken begonnen wird (Standard: %u)</translation>
</message>
<message>
<source>Invalid amount for -maxtxfee=<amount>: '%s' (must be at least the minrelay fee of %s to prevent stuck transactions)</source>
<translation>Ungültiger Betrag für -maxtxfee=<amount>: '%s' (muss mindestens die minimale Weiterleitungsgebühr in Höhe von %s sein, um zu verhindern dass Transaktionen nicht bearbeitet werden)</translation>
</message>
<message>
<source>Maximum size of data in data carrier transactions we relay and mine (default: %u)</source>
<translation>Maximale Datengröße in "Data Carrier"-Transaktionen die weitergeleitet und erarbeitet werden (Standard: %u)</translation>
</message>
<message>
<source>Maximum total fees to use in a single wallet transaction, setting too low may abort large transactions (default: %s)</source>
<translation>Maximale Gesamtgebühren je Wallet-Transaktion, ein zu niedriger Wert kann große Transaktionen abbrechen (Standard: %s)</translation>
</message>
<message>
<source>Query for peer addresses via DNS lookup, if low on addresses (default: 1 unless -connect)</source>
<translation>Adressen von Gegenstellen via DNS-Namensauflösung finden, falls zu wenige Adressen verfügbar sind (Standard: 1, außer bei -connect)</translation>
</message>
<message>
<source>Require high priority for relaying free or low-fee transactions (default:%u)</source>
<translation>Zum Weiterleiten von freien Transaktionen oder Transaktionen mit niedrigen Gebühren eine hohe Priorität voraussetzen (Standard: %u)</translation>
</message>
<message>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: %d)</source>
<translation>Maximale Größe in Byte von "high-priority/low-fee"-Transaktionen festlegen (Standard: %d)</translation>
</message>
<message>
<source>Set the number of threads for coin generation if enabled (-1 = all cores, default: %d)</source>
<translation>Maximale Anzahl an Threads zur Emercoinerzeugung, wenn aktiviert, festlegen (-1 = alle Kerne, Standard: %d)</translation>
</message>
<message>
<source>This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit <https://www.openssl.org/> and cryptographic software written by Eric Young and UPnP software written by Thomas Bernard.</source>
<translation>Dieses Produkt enthält Software, die vom OpenSSL-Projekt zur Verwendung im OpenSSL-Toolkit <https://www.openssl.org/> entwickelt wird, sowie von Eric Young geschriebene kryptographische Software und von Thomas Bernard geschriebene UPnP-Software.</translation>
</message>
<message>
<source>To use emercoind, or the -server option to emercoin-qt, you must set an rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=emercoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Emercoin Alert" [email protected]
</source>
<translation>Um emercoind oder die Option -server mit emercoin-qt verwenden zu können, müssen Sie rpcpassword in der Konfigurationsdatei angeben:
%s
Es wird empfohlen das folgende Zufallspasswort zu verwenden.
rpcuser=emercoinrpc
rpcpassword=%s
(Sie müssen sich dieses Passwort nicht merken!)
Der Benutzername und das Passwort dürfen NICHT identisch sein.
Falls die Konfigurationsdatei nicht existiert, erzeugen Sie diese bitte mit Leserechten nur für den Dateibesitzer.
Es wird ebenfalls empfohlen alertnotify anzugeben, um im Problemfall benachrichtigt zu werden.
Beispiel: alertnotify=echo %%s | mail -s "Emercoin Alert" [email protected]
</translation>
</message>
<message>
<source>Warning: -maxtxfee is set very high! Fees this large could be paid on a single transaction.</source>
<translation>Warnung: -maxtxfee ist auf einen sehr hohen Wert festgelegt! Gebühren dieser Höhe könnten für eine einzelne Transaktion bezahlt werden.</translation>
</message>
<message>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Emercoin Core will not work properly.</source>
<translation>Warnung: Bitte korrigieren Sie die Datums- und Uhrzeiteinstellungen Ihres Computers, da Emercoin Core ansonsten nicht ordnungsgemäß funktionieren wird.</translation>
</message>
<message>
<source>Whitelisted peers cannot be DoS banned and their transactions are always relayed, even if they are already in the mempool, useful e.g. for a gateway</source>
<translation>Erlaubte Gegenstellen werden nicht für DoS-Attacken gesperrt und ihre Transkationen werden immer weitergeleitet, auch wenn sie sich bereits im Speicherpool befinden, was z.B. für Gateways sinnvoll ist.</translation>
</message>
<message>
<source>Accept public REST requests (default: %u)</source>
<translation>Öffentliche REST-Anfragen annehmen (Standard: %u)</translation>
</message>
<message>
<source>Cannot resolve -whitebind address: '%s'</source>
<translation>Kann Adresse in -whitebind nicht auflösen: '%s'</translation>
</message>
<message>
<source>Connect through SOCKS5 proxy</source>
<translation>Über einen SOCKS5-Proxy &verbinden</translation>
</message>
<message>
<source>Copyright (C) 2013-%i The Emercoin Core Developers</source>
<translation>Urheberrecht (C) 2013-%i Die "Emercoin Core"-Entwickler</translation>
</message>
<message>
<source>Could not parse -rpcbind value %s as network address</source>
<translation>Der Wert %s von -rpcbind wurde nicht als Netzwerkadresse erkannt</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet requires newer version of Emercoin Core</source>
<translation>Fehler beim Laden von wallet.dat: Wallet benötigt neuere Version von Emercoin Core</translation>
</message>
<message>
<source>Error reading from database, shutting down.</source>
<translation>Fehler beim lesen der Datenbank, Ausführung wird beendet.</translation>
</message>
<message>
<source>Error: Unsupported argument -tor found, use -onion.</source>
<translation>Fehler: Nicht unterstütztes Argument -tor gefunden, bitte -onion verwenden.</translation>
</message>
<message>
<source>Fee (in EMC/Kb) to add to transactions you send (default: %s)</source>
<translation>Gebühr (in EMC/Kb), die von Ihnen gesendeten Transaktionen hinzugefügt wird (Standard: %s)</translation>
</message>
<message>
<source>Information</source>
<translation>Hinweis</translation>
</message>
<message>
<source>Initialization sanity check failed. Emercoin Core is shutting down.</source>
<translation>Initialisierungsplausibilitätsprüfung fehlgeschlagen. Emercoin Core wird beendet.</translation>
</message>
<message>
<source>Invalid amount for -maxtxfee=<amount>: '%s'</source>
<translation>Ungültiger Betrag für -maxtxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation>Ungültiger Betrag für -minrelaytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation>Ungültiger Betrag für -mintxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s' (must be at least %s)</source>
<translation>Ungültiger Betrag für -paytxfee=<amount>: '%s' (muss mindestens %s sein)</translation>
</message>
<message>
<source>Invalid netmask specified in -whitelist: '%s'</source>
<translation>Ungültige Netzmaske angegeben in -whitelist: '%s'</translation>
</message>
<message>
<source>Keep at most <n> unconnectable transactions in memory (default: %u)</source>
<translation>Maximal <n> nicht-verbindbare Transaktionen im Speicher halten (Standard: %u)</translation>
</message>
<message>
<source>Need to specify a port with -whitebind: '%s'</source>
<translation>Angabe eines Ports benötigt für -whitebind: '%s'</translation>
</message>
<message>
<source>Node relay options:</source>
<translation>Knoten-Weiterleitungsoptionen:</translation>
</message>
<message>
<source>RPC SSL options: (see the Bitcoin Wiki for SSL setup instructions)</source>
<translation>RPC-SSL-Optionen (siehe Emercoin-Wiki für SSL-Einrichtung):</translation>
</message>
<message>
<source>RPC server options:</source>
<translation>RPC-Serveroptionen:</translation>
</message>
<message>
<source>RPC support for HTTP persistent connections (default: %d)</source>
<translation>Unterstützung für persistente HTTP-Verbindungen bei RPC (Standard: %d)</translation>
</message>
<message>
<source>Randomly drop 1 of every <n> network messages</source>
<translation>Zufällig eine von <n> Netzwerknachrichten verwerfen</translation>
</message>
<message>
<source>Randomly fuzz 1 of every <n> network messages</source>
<translation>Zufällig eine von <n> Netzwerknachrichten verwürfeln</translation>
</message>
<message>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>Rückverfolgungs- und Debuginformationen an die Konsole senden, anstatt sie in debug.log zu schreiben</translation>
</message>
<message>
<source>Send transactions as zero-fee transactions if possible (default: %u)</source>
<translation>Transaktionen, wenn möglich, als gebührenfreie Transaktion senden (Standard: %u)</translation>
</message>
<message>
<source>Show all debugging options (usage: --help -help-debug)</source>
<translation>Zeige alle Debuggingoptionen (Benutzung: --help -help-debug)</translation>
</message>
<message>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>Protokolldatei debug.log beim Starten des Clients kürzen (Standard: 1, wenn kein -debug)</translation>
</message>
<message>
<source>Signing transaction failed</source>
<translation>Signierung der Transaktion fehlgeschlagen</translation>
</message>
<message>
<source>This is experimental software.</source>
<translation>Dies ist experimentelle Software.</translation>
</message>
<message>
<source>Transaction amount too small</source>
<translation>Transaktionsbetrag zu niedrig</translation>
</message>
<message>
<source>Transaction amounts must be positive</source>
<translation>Transaktionsbeträge müssen positiv sein</translation>
</message>
<message>
<source>Transaction too large for fee policy</source>
<translation>Transaktion ist für die Gebührenrichtlinie zu groß</translation>
</message>
<message>
<source>Transaction too large</source>
<translation>Transaktion zu groß</translation>
</message>
<message>
<source>Unable to bind to %s on this computer (bind returned error %s)</source>
<translation>Kann auf diesem Computer nicht an %s binden (bind meldete Fehler %s)</translation>
</message>
<message>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>UPnP verwenden, um eine Portweiterleitung einzurichten (Standard: 1, wenn abgehört wird)</translation>
</message>
<message>
<source>Username for JSON-RPC connections</source>
<translation>Benutzername für JSON-RPC-Verbindungen</translation>
</message>
<message>
<source>Wallet needed to be rewritten: restart Emercoin Core to complete</source>
<translation>Wallet musste neu geschrieben werden: starten Sie Emercoin Core zur Fertigstellung neu</translation>
</message>
<message>
<source>Warning</source>
<translation>Warnung</translation>
</message>
<message>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>Warnung: Diese Version is veraltet, Aktualisierung erforderlich!</translation>
</message>
<message>
<source>Warning: Unsupported argument -benchmark ignored, use -debug=bench.</source>
<translation>Warnung: Nicht unterstütztes Argument -benchmark wurde ignoriert, bitte -debug=bench verwenden.</translation>
</message>
<message>
<source>Warning: Unsupported argument -debugnet ignored, use -debug=net.</source>
<translation>Warnung: Nicht unterstütztes Argument -debugnet wurde ignoriert, bitte -debug=net verwenden.</translation>
</message>
<message>
<source>Zapping all transactions from wallet...</source>
<translation>Lösche alle Transaktionen aus Wallet...</translation>
</message>
<message>
<source>on startup</source>
<translation>beim Starten</translation>
</message>
<message>
<source>wallet.dat corrupt, salvage failed</source>
<translation>wallet.dat beschädigt, Datenrettung fehlgeschlagen</translation>
</message>
<message>
<source>Password for JSON-RPC connections</source>
<translation>Passwort für JSON-RPC-Verbindungen</translation>
</message>
<message>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>Befehl ausführen wenn der beste Block wechselt (%s im Befehl wird durch den Hash des Blocks ersetzt)</translation>
</message>
<message>
<source>Upgrade wallet to latest format</source>
<translation>Wallet auf das neueste Format aktualisieren</translation>
</message>
<message>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>Blockkette erneut nach fehlenden Wallet-Transaktionen durchsuchen</translation>
</message>
<message>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>OpenSSL (https) für JSON-RPC-Verbindungen verwenden</translation>
</message>
<message>
<source>This help message</source>
<translation>Dieser Hilfetext</translation>
</message>
<message>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>Erlaube DNS-Abfragen für -addnode, -seednode und -connect</translation>
</message>
<message>
<source>Loading addresses...</source>
<translation>Lade Adressen...</translation>
</message>
<message>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>Fehler beim Laden von wallet.dat: Wallet beschädigt</translation>
</message>
<message>
<source>(1 = keep tx meta data e.g. account owner and payment request information, 2 = drop tx meta data)</source>
<translation>(1 = TX-Metadaten wie z.B. Accountbesitzer und Zahlungsanforderungsinformationen behalten, 2 = TX-Metadaten verwerfen)</translation>
</message>
<message>
<source>Flush database activity from memory pool to disk log every <n> megabytes (default: %u)</source>
<translation>Datenbankaktivitäten vom Arbeitsspeicher-Pool alle <n> Megabyte auf den Datenträger schreiben (Standard: %u)</translation>
</message>
<message>
<source>How thorough the block verification of -checkblocks is (0-4, default: %u)</source>
<translation>Legt fest, wie gründlich die Blockverifikation von -checkblocks ist (0-4, Standard: %u)</translation>
</message>
<message>
<source>Log transaction priority and fee per kB when mining blocks (default: %u)</source>
<translation>Transaktionspriorität und Gebühr pro kB beim Erzeugen von Blöcken protokollieren (Standard: %u)</translation>
</message>
<message>
<source>Maintain a full transaction index, used by the getrawtransaction rpc call (default: %u)</source>
<translation>Einen vollständigen Transaktionsindex führen, der vom RPC-Befehl "getrawtransaction" genutzt wird (Standard: %u)</translation>
</message>
<message>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: %u)</source>
<translation>Anzahl Sekunden, während denen sich nicht konform verhaltenden Gegenstellen die Wiederverbindung verweigert wird (Standard: %u)</translation>
</message>
<message>
<source>Output debugging information (default: %u, supplying <category> is optional)</source>
<translation>Debugginginformationen ausgeben (Standard: %u, <category> anzugeben ist optional)</translation>
</message>
<message>
<source>Use separate SOCKS5 proxy to reach peers via Tor hidden services (default: %s)</source>
<translation>Separaten SOCKS5-Proxy verwenden, um Gegenstellen über versteckte Tor-Dienste zu erreichen (Standard: %s)</translation>
</message>
<message>
<source>(default: %s)</source>
<translation>(Standard: %s)</translation>
</message>
<message>
<source>Acceptable ciphers (default: %s)</source>
<translation>Zulässige Chiffren (Standard: %s)</translation>
</message>
<message>
<source>Always query for peer addresses via DNS lookup (default: %u)</source>
<translation>Adressen von Gegenstellen immer über DNS-Namensauflösung abfragen (Standard: %u)</translation>
</message>
<message>
<source>Disable safemode, override a real safe mode event (default: %u)</source>
<translation>Sicherheitsmodus deaktivieren, übergeht ein echtes Sicherheitsmodusereignis (Standard: %u)</translation>
</message>
<message>
<source>Error loading wallet.dat</source>
<translation>Fehler beim Laden von wallet.dat</translation>
</message>
<message>
<source>Force safe mode (default: %u)</source>
<translation>Sicherheitsmodus erzwingen (Standard: %u)</translation>
</message>
<message>
<source>Generate coins (default: %u)</source>
<translation>Emercoins erzeugen (Standard: %u)</translation>
</message>
<message>
<source>How many blocks to check at startup (default: %u, 0 = all)</source>
<translation>Wieviele Blöcke beim Starten geprüft werden sollen (Standard: %u, 0 = alle)</translation>
</message>
<message>
<source>Include IP addresses in debug output (default: %u)</source>
<translation>IP-Adressen in Debugausgabe einschließen (Standard: %u)</translation>
</message>
<message>
<source>Invalid -proxy address: '%s'</source>
<translation>Ungültige Adresse in -proxy: '%s'</translation>
</message>
<message>
<source>Limit size of signature cache to <n> entries (default: %u)</source>
<translation>Größe des Signaturcaches auf <n> Einträge begrenzen (Standard: %u)</translation>
</message>
<message>
<source>Listen for JSON-RPC connections on <port> (default: %u or testnet: %u)</source>
<translation><port> nach JSON-RPC-Verbindungen abhören (Standard: %u oder Testnetz: %u)</translation>
</message>
<message>
<source>Maintain at most <n> connections to peers (default: %u)</source>
<translation>Maximal <n> Verbindungen zu Gegenstellen aufrechterhalten (Standard: %u)</translation>
</message>
<message>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: %u)</source>
<translation>Maximale Größe des Empfangspuffers pro Verbindung, <n> * 1000 Byte (Standard: %u)</translation>
</message>
<message>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: %u)</source>
<translation>Maximale Größe des Sendepuffers pro Verbindung, <n> * 1000 Byte (Standard: %u)</translation>
</message>
<message>
<source>Only accept block chain matching built-in checkpoints (default: %u)</source>
<translation>Blockkette nur als gültig ansehen, wenn sie mit den integrierten Prüfpunkten übereinstimmt (Standard: %u)</translation>
</message>
<message>
<source>Prepend debug output with timestamp (default: %u)</source>
<translation>Debugausgaben einen Zeitstempel voranstellen (Standard: %u)</translation>
</message>
<message>
<source>Relay and mine data carrier transactions (default: %u)</source>
<translation>"Data Carrier"-Transaktionen weiterleiten und erarbeiten (Standard: %u)</translation>
</message>
<message>
<source>Relay non-P2SH multisig (default: %u)</source>
<translation>Nicht-"P2SH-Multisig" weiterleiten (Standard: %u)</translation>
</message>
<message>
<source>Run a thread to flush wallet periodically (default: %u)</source>
<translation>Einen Thread starten, der periodisch die Wallet sicher auf den Datenträger schreibt (Standard: %u)</translation>
</message>
<message>
<source>Server certificate file (default: %s)</source>
<translation>Serverzertifikat (Standard: %s)</translation>
</message>
<message>
<source>Server private key (default: %s)</source>
<translation>Privater Serverschlüssel (Standard: %s)</translation>
</message>
<message>
<source>Set key pool size to <n> (default: %u)</source>
<translation>Größe des Schlüsselpools festlegen auf <n> (Standard: %u)</translation>
</message>
<message>
<source>Set minimum block size in bytes (default: %u)</source>
<translation>Minimale Blockgröße in Byte festlegen (Standard: %u)</translation>
</message>
<message>
<source>Set the number of threads to service RPC calls (default: %d)</source>
<translation>Maximale Anzahl an Threads zur Verarbeitung von RPC-Anfragen festlegen (Standard: %d)</translation>
</message>
<message>
<source>Sets the DB_PRIVATE flag in the wallet db environment (default: %u)</source>
<translation>"DB_PRIVATE"-Flag in der Wallet-Datenbankumgebung setzen (Standard: %u)</translation>
</message>
<message>
<source>Specify configuration file (default: %s)</source>
<translation>Konfigurationsdatei festlegen (Standard: %s)</translation>
</message>
<message>
<source>Specify connection timeout in milliseconds (minimum: 1, default: %d)</source>
<translation>Verbindungzeitüberschreitung in Millisekunden festlegen (Minimum: 1, Standard: %d)</translation>
</message>
<message>
<source>Specify pid file (default: %s)</source>
<translation>PID-Datei festlegen (Standard: %s)</translation>
</message>
<message>
<source>Spend unconfirmed change when sending transactions (default: %u)</source>
<translation>Unbestätigtes Wechselgeld darf beim Senden von Transaktionen ausgegeben werden (Standard: %u)</translation>
</message>
<message>
<source>Stop running after importing blocks from disk (default: %u)</source>
<translation>Beenden, nachdem Blöcke vom Datenträger importiert wurden (Standard: %u)</translation>
</message>
<message>
<source>Threshold for disconnecting misbehaving peers (default: %u)</source>
<translation>Schwellenwert, um Verbindungen zu sich nicht konform verhaltenden Gegenstellen zu beenden (Standard: %u)</translation>
</message>
<message>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>Unbekannter Netztyp in -onlynet angegeben: '%s'</translation>
</message>
<message>
<source>Cannot resolve -bind address: '%s'</source>
<translation>Kann Adresse in -bind nicht auflösen: '%s'</translation>
</message>
<message>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>Kann Adresse in -externalip nicht auflösen: '%s'</translation>
</message>
<message>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>Ungültiger Betrag für -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<source>Insufficient funds</source>
<translation>Unzureichender Kontostand</translation>
</message>
<message>
<source>Loading block index...</source>
<translation>Lade Blockindex...</translation>
</message>
<message>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>Mit dem angegebenen Knoten verbinden und versuchen die Verbindung aufrecht zu erhalten</translation>
</message>
<message>
<source>Loading wallet...</source>
<translation>Lade Wallet...</translation>
</message>
<message>
<source>Cannot downgrade wallet</source>
<translation>Wallet kann nicht auf eine ältere Version herabgestuft werden</translation>
</message>
<message>
<source>Cannot write default address</source>
<translation>Standardadresse kann nicht geschrieben werden</translation>
</message>
<message>
<source>Rescanning...</source>
<translation>Durchsuche erneut...</translation>
</message>
<message>
<source>Done loading</source>
<translation>Laden abgeschlossen</translation>
</message>
<message>
<source>Error</source>
<translation>Fehler</translation>
</message>
</context>
</TS> | |
text.py | """
Text types
"""
from scrapely.extractors import text as extract_text, safehtml
class _BaseTextProcessor(object):
"""basic text processor, defines identity functions, some of which
are overridden in subclasses
"""
def extract(self, text):
"""Matches and extracts any string, as it is"""
return text
def adapt(self, text, htmlpage):
return text
class RawFieldTypeProcessor(_BaseTextProcessor):
"""Extracts the raw data, without processing. Data is escaped for presentation
>>> from scrapely.extractors import htmlregion
>>> r = RawFieldTypeProcessor()
>>> html = htmlregion(u'<p>test</p>')
>>> r.extract(html)
u'<p>test</p>'
>>> r.adapt(html, None) |
class TextFieldTypeProcessor(_BaseTextProcessor):
"""Extracts strings, removing all HTML markup
>>> from scrapely.extractors import htmlregion
>>> p = TextFieldTypeProcessor()
>>> html = htmlregion(u'<p>test</p><!-- comment --><script> // script</script>!')
>>> extracted = p.extract(html)
>>> extracted
u'test !'
>>> p.adapt(extracted, None)
u'test !'
>>> html = htmlregion(u'<p> \\n<p>')
>>> p.extract(html)
u''
"""
name = 'text'
description = 'extracts text from web pages, cleaning all markup'
def extract(self, htmlregion):
return extract_text(htmlregion.text_content)
class SafeHtmlFieldTypeProcessor(_BaseTextProcessor):
"""Extracts strings, with only a safe subset of HTML remaining
Extraction checks for presence of text content, and adapt transforms the HTML
>>> from scrapely.extractors import htmlregion
>>> p = SafeHtmlFieldTypeProcessor()
>>> html = htmlregion(u'<p>test</p> <blink>foo')
>>> p.extract(html)
u'<p>test</p> <blink>foo'
>>> p.adapt(html)
u'<p>test</p> foo'
html without text must not be extracted
>>> html = htmlregion(u'<br/>')
"""
name = 'safe html'
description = 'removes all but a small subset of html tags'
def extract(self, htmlregion):
if extract_text(htmlregion.text_content):
return htmlregion
def adapt(self, text, htmlpage=None):
"""Remove html markup"""
return safehtml(text) | u'<p>test</p>'
"""
name = 'raw html'
description = 'raw html as it appears in the page' |
zz_generated.deepcopy.go | // +build !ignore_autogenerated
/*
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
// Code generated by controller-gen. DO NOT EDIT.
package v1alpha1
import (
"k8s.io/api/core/v1"
runtime "k8s.io/apimachinery/pkg/runtime"
)
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *Gateway) DeepCopyInto(out *Gateway) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
in.Spec.DeepCopyInto(&out.Spec)
in.Status.DeepCopyInto(&out.Status)
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Gateway.
func (in *Gateway) DeepCopy() *Gateway {
if in == nil {
return nil
}
out := new(Gateway)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *Gateway) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *GatewayClass) DeepCopyInto(out *GatewayClass) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
if in.Parameters != nil {
in, out := &in.Parameters, &out.Parameters
*out = new(v1.TypedLocalObjectReference)
(*in).DeepCopyInto(*out)
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GatewayClass.
func (in *GatewayClass) DeepCopy() *GatewayClass {
if in == nil {
return nil
}
out := new(GatewayClass)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *GatewayClass) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *GatewayClassList) DeepCopyInto(out *GatewayClassList) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ListMeta.DeepCopyInto(&out.ListMeta)
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]GatewayClass, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GatewayClassList.
func (in *GatewayClassList) DeepCopy() *GatewayClassList {
if in == nil {
return nil
}
out := new(GatewayClassList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *GatewayClassList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *GatewayList) DeepCopyInto(out *GatewayList) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ListMeta.DeepCopyInto(&out.ListMeta)
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]Gateway, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GatewayList.
func (in *GatewayList) DeepCopy() *GatewayList {
if in == nil {
return nil
}
out := new(GatewayList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *GatewayList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *GatewayRouteStatus) DeepCopyInto(out *GatewayRouteStatus) {
*out = *in
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GatewayRouteStatus.
func (in *GatewayRouteStatus) DeepCopy() *GatewayRouteStatus {
if in == nil {
return nil
}
out := new(GatewayRouteStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *GatewaySpec) DeepCopyInto(out *GatewaySpec) {
*out = *in
if in.Listeners != nil {
in, out := &in.Listeners, &out.Listeners
*out = make([]Listener, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
if in.Routes != nil {
in, out := &in.Routes, &out.Routes
*out = make([]v1.TypedLocalObjectReference, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GatewaySpec.
func (in *GatewaySpec) DeepCopy() *GatewaySpec {
if in == nil {
return nil
}
out := new(GatewaySpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *GatewayStatus) DeepCopyInto(out *GatewayStatus) {
*out = *in
if in.Listeners != nil {
in, out := &in.Listeners, &out.Listeners
*out = make([]ListenerStatus, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
if in.Routes != nil {
in, out := &in.Routes, &out.Routes
*out = make([]GatewayRouteStatus, len(*in))
copy(*out, *in)
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new GatewayStatus.
func (in *GatewayStatus) DeepCopy() *GatewayStatus {
if in == nil {
return nil
}
out := new(GatewayStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *HttpRoute) DeepCopyInto(out *HttpRoute) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
out.Spec = in.Spec
out.Status = in.Status
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HttpRoute.
func (in *HttpRoute) DeepCopy() *HttpRoute {
if in == nil {
return nil
}
out := new(HttpRoute)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *HttpRoute) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *HttpRouteList) DeepCopyInto(out *HttpRouteList) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ListMeta.DeepCopyInto(&out.ListMeta)
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]HttpRoute, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HttpRouteList.
func (in *HttpRouteList) DeepCopy() *HttpRouteList {
if in == nil {
return nil
}
out := new(HttpRouteList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *HttpRouteList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *HttpRouteSpec) DeepCopyInto(out *HttpRouteSpec) {
*out = *in
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HttpRouteSpec.
func (in *HttpRouteSpec) DeepCopy() *HttpRouteSpec {
if in == nil {
return nil
}
out := new(HttpRouteSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *HttpRouteStatus) DeepCopyInto(out *HttpRouteStatus) {
*out = *in
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new HttpRouteStatus.
func (in *HttpRouteStatus) DeepCopy() *HttpRouteStatus {
if in == nil {
return nil
}
out := new(HttpRouteStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *Listener) DeepCopyInto(out *Listener) {
*out = *in
if in.Address != nil {
in, out := &in.Address, &out.Address
*out = new(ListenerAddress)
**out = **in
}
if in.Ports != nil {
in, out := &in.Ports, &out.Ports
*out = make([]ListenerPort, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new Listener.
func (in *Listener) DeepCopy() *Listener {
if in == nil {
return nil
}
out := new(Listener)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ListenerAddress) DeepCopyInto(out *ListenerAddress) {
*out = *in
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListenerAddress.
func (in *ListenerAddress) DeepCopy() *ListenerAddress {
if in == nil {
return nil
}
out := new(ListenerAddress)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ListenerError) DeepCopyInto(out *ListenerError) {
*out = *in
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListenerError.
func (in *ListenerError) DeepCopy() *ListenerError {
if in == nil {
return nil
}
out := new(ListenerError)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ListenerPort) DeepCopyInto(out *ListenerPort) {
*out = *in
if in.Port != nil {
in, out := &in.Port, &out.Port
*out = new(int)
**out = **in
}
if in.Protocols != nil {
in, out := &in.Protocols, &out.Protocols
*out = make([]string, len(*in))
copy(*out, *in)
}
if in.TLS != nil |
if in.Extension != nil {
in, out := &in.Extension, &out.Extension
*out = new(v1.TypedLocalObjectReference)
(*in).DeepCopyInto(*out)
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListenerPort.
func (in *ListenerPort) DeepCopy() *ListenerPort {
if in == nil {
return nil
}
out := new(ListenerPort)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ListenerStatus) DeepCopyInto(out *ListenerStatus) {
*out = *in
if in.Errors != nil {
in, out := &in.Errors, &out.Errors
*out = make([]ListenerError, len(*in))
copy(*out, *in)
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListenerStatus.
func (in *ListenerStatus) DeepCopy() *ListenerStatus {
if in == nil {
return nil
}
out := new(ListenerStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *ListenerTLS) DeepCopyInto(out *ListenerTLS) {
*out = *in
if in.Certificates != nil {
in, out := &in.Certificates, &out.Certificates
*out = make([]v1.TypedLocalObjectReference, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
if in.MinimumVersion != nil {
in, out := &in.MinimumVersion, &out.MinimumVersion
*out = new(string)
**out = **in
}
if in.Options != nil {
in, out := &in.Options, &out.Options
*out = make(map[string]string, len(*in))
for key, val := range *in {
(*out)[key] = val
}
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new ListenerTLS.
func (in *ListenerTLS) DeepCopy() *ListenerTLS {
if in == nil {
return nil
}
out := new(ListenerTLS)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TcpRoute) DeepCopyInto(out *TcpRoute) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
out.Spec = in.Spec
out.Status = in.Status
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TcpRoute.
func (in *TcpRoute) DeepCopy() *TcpRoute {
if in == nil {
return nil
}
out := new(TcpRoute)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *TcpRoute) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TcpRouteList) DeepCopyInto(out *TcpRouteList) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ListMeta.DeepCopyInto(&out.ListMeta)
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]TcpRoute, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TcpRouteList.
func (in *TcpRouteList) DeepCopy() *TcpRouteList {
if in == nil {
return nil
}
out := new(TcpRouteList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *TcpRouteList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TcpRouteSpec) DeepCopyInto(out *TcpRouteSpec) {
*out = *in
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TcpRouteSpec.
func (in *TcpRouteSpec) DeepCopy() *TcpRouteSpec {
if in == nil {
return nil
}
out := new(TcpRouteSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TcpRouteStatus) DeepCopyInto(out *TcpRouteStatus) {
*out = *in
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TcpRouteStatus.
func (in *TcpRouteStatus) DeepCopy() *TcpRouteStatus {
if in == nil {
return nil
}
out := new(TcpRouteStatus)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TrafficSplit) DeepCopyInto(out *TrafficSplit) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ObjectMeta.DeepCopyInto(&out.ObjectMeta)
out.Spec = in.Spec
out.Status = in.Status
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TrafficSplit.
func (in *TrafficSplit) DeepCopy() *TrafficSplit {
if in == nil {
return nil
}
out := new(TrafficSplit)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *TrafficSplit) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TrafficSplitList) DeepCopyInto(out *TrafficSplitList) {
*out = *in
out.TypeMeta = in.TypeMeta
in.ListMeta.DeepCopyInto(&out.ListMeta)
if in.Items != nil {
in, out := &in.Items, &out.Items
*out = make([]TrafficSplit, len(*in))
for i := range *in {
(*in)[i].DeepCopyInto(&(*out)[i])
}
}
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TrafficSplitList.
func (in *TrafficSplitList) DeepCopy() *TrafficSplitList {
if in == nil {
return nil
}
out := new(TrafficSplitList)
in.DeepCopyInto(out)
return out
}
// DeepCopyObject is an autogenerated deepcopy function, copying the receiver, creating a new runtime.Object.
func (in *TrafficSplitList) DeepCopyObject() runtime.Object {
if c := in.DeepCopy(); c != nil {
return c
}
return nil
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TrafficSplitSpec) DeepCopyInto(out *TrafficSplitSpec) {
*out = *in
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TrafficSplitSpec.
func (in *TrafficSplitSpec) DeepCopy() *TrafficSplitSpec {
if in == nil {
return nil
}
out := new(TrafficSplitSpec)
in.DeepCopyInto(out)
return out
}
// DeepCopyInto is an autogenerated deepcopy function, copying the receiver, writing into out. in must be non-nil.
func (in *TrafficSplitStatus) DeepCopyInto(out *TrafficSplitStatus) {
*out = *in
}
// DeepCopy is an autogenerated deepcopy function, copying the receiver, creating a new TrafficSplitStatus.
func (in *TrafficSplitStatus) DeepCopy() *TrafficSplitStatus {
if in == nil {
return nil
}
out := new(TrafficSplitStatus)
in.DeepCopyInto(out)
return out
}
| {
in, out := &in.TLS, &out.TLS
*out = new(ListenerTLS)
(*in).DeepCopyInto(*out)
} |
widget_helpers.py | # -*- coding: utf-8 -*-
"""
Created on Wed Sep 19 19:27:45 2018
@author: Jonas Hartmann @ Gilmour group @ EMBL Heidelberg
@descript: Convenient little helpers for ipywidgets hacking.
"""
#------------------------------------------------------------------------------
# IMPORTS
from __future__ import division
import os
import ipywidgets as widgets
import matplotlib.pyplot as plt
from IPython.display import display, clear_output
#------------------------------------------------------------------------------
# Decorator to add a "save figure" button to matplotlib plots,
# including plots created in interactive widgets using ipywidgets.interact!
def savebutton(func):
"""Decorator that adds a "save figure" button to a function that generates
a matplotlib plot, including functions made into interactive widgets using
ipywidgets.interact.
NOTE: make sure you don't have `plt.show()` in the figure generation
function, as that will clear the current figure!
WARNING: This does not properly forward defaults for sliders and such set
in the function definition. Instead, the defaults are those that
are automatically determined by ipywidgets.interact!
Examples
--------
# Without `interact` widget
@savebutton
def make_plot():
t = np.linspace(0, 10, 500)
y = np.sin(2.0*t)
plt.plot(t, y, color='red')
# With `interact` widget
@interact(freq=(0.5, 10, 0.5),
color=['red', 'blue', 'green'])
@savebutton
def make_plot(freq=2.0, color='red'):
t = np.linspace(0, 10, 500)
y = np.sin(freq*t)
plt.plot(t, y, color=color)
"""
def wrapper(**kwargs):
# Prepare textbox (for filename) and button
textbox = widgets.Text(value='', placeholder='Enter Filename',
description='Filename:', disabled=False)
button = widgets.Button(description='Save figure!')
box = widgets.HBox([textbox, button])
# Callback to save figure when button is clicked
# TODO: This current uses/creates a specific folder ('figures') for
# saving. It would be great to make this more general, perhaps
# by triggering a standard file saving dialogue to open!
def | (b):
if textbox.value:
if not os.path.isdir('figures'):
os.mkdir('figures')
figpath = str(textbox.value)
if not figpath.endswith('.pdf'):
figpath += '.pdf'
figpath = os.path.join('figures', figpath)
b.fig.savefig(figpath, bbox_inches='tight', transparent=True)
print "Saved figure as '%s'" % figpath
textbox.value=''
button.on_click(on_button_clicked)
# Run wrapped function to generate figure
func(**kwargs)
# Update figure in button
button.fig = plt.gcf()
# Display textbox and button
display(box)
# Done!
return wrapper
#------------------------------------------------------------------------------
| on_button_clicked |
rapier_world.rs | use crate::utils::NodeExt;
use gdnative::api::Engine;
use gdnative::prelude::*;
use rapier2d::{
dynamics::{
CCDSolver, IntegrationParameters, JointSet, RigidBodyBuilder, RigidBodyHandle, RigidBodySet,
},
geometry::{BroadPhase, ColliderBuilder, ColliderSet, NarrowPhase},
na,
pipeline::PhysicsPipeline,
};
use std::cell::RefCell;
struct PhysicsState {
pub pipeline: PhysicsPipeline,
pub broad_phase: BroadPhase,
pub narrow_phase: NarrowPhase,
pub bodies: RigidBodySet,
pub colliders: ColliderSet,
pub joints: JointSet,
pub ccd: CCDSolver,
}
impl PhysicsState {
fn new() -> Self {
Self {
pipeline: PhysicsPipeline::new(),
broad_phase: BroadPhase::new(),
narrow_phase: NarrowPhase::new(),
bodies: RigidBodySet::new(),
colliders: ColliderSet::new(),
joints: JointSet::new(),
ccd: CCDSolver::new(),
}
}
fn tick(&mut self, gravity: Vector2) {
let gravity = na::Vector2::new(gravity.x, gravity.y);
let integration_parameters = IntegrationParameters::default();
self.pipeline.step(
&gravity,
&integration_parameters,
&mut self.broad_phase,
&mut self.narrow_phase,
&mut self.bodies,
&mut self.colliders,
&mut self.joints,
&mut self.ccd,
&(),
&(),
);
}
fn add_static(&mut self, x: f32, y: f32, w: f32, h: f32) {
let floor = RigidBodyBuilder::new_static().translation(x, y).build();
let floor = self.bodies.insert(floor);
let floor_collider = ColliderBuilder::cuboid(w, h).build();
self.colliders
.insert(floor_collider, floor, &mut self.bodies);
}
fn add_box(&mut self, x: f32, y: f32) -> RigidBodyHandle {
let falling_box = RigidBodyBuilder::new_dynamic().translation(x, y).build();
let falling_box = self.bodies.insert(falling_box);
let box_collider = ColliderBuilder::cuboid(48. * 0.4, 48. * 0.4).build();
self.colliders
.insert(box_collider, falling_box, &mut self.bodies);
falling_box
}
}
#[derive(NativeClass)]
#[inherit(Node2D)]
pub struct RapierWorld2D {
#[property]
gravity: Vector2,
physics: RefCell<PhysicsState>,
boxes: RefCell<Vec<(RigidBodyHandle, Ref<Node2D>)>>,
}
#[methods]
impl RapierWorld2D {
fn new(_owner: &Node2D) -> Self {
godot_print!("RapierWorld2D new");
Self {
gravity: Vector2::new(0., 98.),
physics: RefCell::new(PhysicsState::new()),
boxes: RefCell::new(Vec::new()),
}
}
#[export]
fn | (&self, owner: &Node2D) {
let w = owner.get_viewport_rect().width();
let h = owner.get_viewport_rect().height();
godot_print!("size: {},{}", w, h);
let mut physics = self.physics.borrow_mut();
physics.add_static(0., h, w, 10.);
physics.add_static(0., h / 2., 10., h);
physics.add_static(w, h / 2., 10., h);
}
#[export]
fn _on_button_pressed(&self, owner: &Node2D) {
godot_print!("button");
unsafe { owner.get_tree().unwrap().assume_safe() }
.change_scene("res://scenes/GodotScene.tscn")
.unwrap();
}
#[export]
fn _process(&self, owner: &Node2D, _delta: f64) {
let mouse_press = Input::godot_singleton().is_action_pressed("click");
let pos = owner.get_global_mouse_position();
// let label: TRef<Label> = owner.get_typed_node("../LabelFps");
// label.set_text(format!(
// "FPS: {}",
// Engine::godot_singleton().get_frames_per_second()
// ));
if mouse_press {
self.spawn(owner, pos.x, pos.y);
let count = self.boxes.borrow().len();
// let label: TRef<Label> = owner.get_typed_node("../Label");
// label.set_text(format!("boxes: {}", count));
}
self.physics.borrow_mut().tick(self.gravity);
self.update_boxes(owner);
}
fn update_boxes(&self, _owner: &Node2D) {
let bodies = &self.physics.borrow().bodies;
for b in self.boxes.borrow().iter() {
let handle = b.0;
let node = b.1;
let body = bodies.get(handle).unwrap();
let pos = body.position();
let node = unsafe { node.assume_safe() };
node.set_position(Vector2::new(pos.translation.x, pos.translation.y));
node.set_rotation(pos.rotation.angle() as f64);
}
}
fn spawn(&self, owner: &Node2D, x: f32, y: f32) {
let mut physics = self.physics.borrow_mut();
let handle = physics.add_box(x, y);
let mut boxes = self.boxes.borrow_mut();
let falling_box_index = boxes.len();
{
let box_asset = load_scene("res://scenes/RapierBox.tscn").unwrap();
let new_node = instance_scene::<Node2D>(&box_asset);
let key_str = format!("box_{}", falling_box_index);
new_node.set_name(&key_str);
let shared_node = new_node.into_shared();
owner.add_child(shared_node, false);
boxes.push((handle, shared_node));
}
}
}
pub fn load_scene(path: &str) -> Option<Ref<PackedScene, ThreadLocal>> {
let scene = ResourceLoader::godot_singleton().load(path, "PackedScene", false)?;
let scene = unsafe { scene.assume_thread_local() };
scene.cast::<PackedScene>()
}
fn instance_scene<Root>(scene: &PackedScene) -> Ref<Root, Unique>
where
Root: gdnative::GodotObject<RefKind = ManuallyManaged> + SubClass<Node>,
{
let instance = scene
.instance(PackedScene::GEN_EDIT_STATE_DISABLED)
.unwrap();
let instance = unsafe { instance.assume_unique() };
instance.try_cast::<Root>().unwrap()
} | _ready |
Disabled.js | import React, { Fragment } from 'react'
import PropTypes from 'prop-types'
import Repinned from 'repinned'
import Header from '../components/Header'
import PageContent from '../components/PageContent'
import {
Container,
HeaderWrapper,
ContentWrapper,
Heading,
CornerButtonWrapper,
} from '../components/styles'
class | extends React.Component {
state = {
disabled: false,
}
toggleDisabled = () => {
this.setState(state => ({
disabled: !state.disabled,
}))
}
render() {
const { disabled } = this.state
return (
<Fragment>
<Repinned disabled={disabled} {...this.props}>
{({ setRef, height, ...restProps }) => (
<div
style={{
height,
}}
>
<Header
innerRef={setRef}
data-testid="headerContainer"
data-state={restProps.state}
{...restProps}
>
<Container>
<HeaderWrapper>
<Heading>repinned</Heading>
</HeaderWrapper>
</Container>
</Header>
</div>
)}
</Repinned>
<Container>
<ContentWrapper>
<PageContent />
</ContentWrapper>
</Container>
<CornerButtonWrapper>
<button
data-testid="disabledButton"
type="button"
onClick={this.toggleDisabled}
>
{`Toggle Disabled ${disabled ? 'Off' : 'On'}`}
</button>
</CornerButtonWrapper>
</Fragment>
)
}
}
Page.propTypes = {
onUnfix: PropTypes.func,
onPin: PropTypes.func,
onUnpin: PropTypes.func,
}
export default Page
| Page |
test_util.py | # Copyright 2014 Pants project contributors (see CONTRIBUTORS.md).
# Licensed under the Apache License, Version 2.0 (see LICENSE).
import os
import subprocess
from hashlib import sha1
from textwrap import dedent
from pex.common import safe_mkdir, safe_open, temporary_dir, touch
from pex.compatibility import to_bytes
from pex.pex import PEX
from pex.pex_builder import PEXBuilder
from pex.typing import TYPE_CHECKING, cast
from pex.util import CacheHelper, DistributionHelper, iter_pth_paths, named_temporary_file
try:
from unittest import mock
except ImportError:
import mock # type: ignore[no-redef,import]
if TYPE_CHECKING:
from typing import Any, Dict, List
def test_access_zipped_assets():
# type: (...) -> None
pex_third_party_asset_dir = DistributionHelper.access_zipped_assets("pex", "third_party")
resources = os.listdir(pex_third_party_asset_dir)
assert (
len(resources) > 0
), "The pex.third_party package should contain at least an __init__.py file."
resources.remove("__init__.py")
for path in resources:
assert path in (
"__init__.pyc",
"__init__.pyo",
"__pycache__",
), "Expected only __init__.py (and its compilations) in the pex.third_party package."
def test_hash():
# type: () -> None
empty_hash_digest = sha1().hexdigest()
with named_temporary_file() as fp:
fp.flush()
assert empty_hash_digest == CacheHelper.hash(fp.name)
with named_temporary_file() as fp:
string = b"asdf" * 1024 * sha1().block_size + b"extra padding"
fp.write(string)
fp.flush()
assert sha1(string).hexdigest() == CacheHelper.hash(fp.name)
with named_temporary_file() as fp:
empty_hash = sha1()
fp.write(b"asdf")
fp.flush()
hash_output = CacheHelper.hash(fp.name, digest=empty_hash)
assert hash_output == empty_hash.hexdigest()
def test_dir_hash():
# type: () -> None
with temporary_dir() as tmp_dir:
safe_mkdir(os.path.join(tmp_dir, "a", "b"))
with safe_open(os.path.join(tmp_dir, "c", "d", "e.py"), "w") as fp:
fp.write("contents1")
with safe_open(os.path.join(tmp_dir, "f.py"), "w") as fp:
fp.write("contents2")
hash1 = CacheHelper.dir_hash(tmp_dir)
os.rename(os.path.join(tmp_dir, "c"), os.path.join(tmp_dir, "c-renamed"))
assert hash1 != CacheHelper.dir_hash(tmp_dir)
os.rename(os.path.join(tmp_dir, "c-renamed"), os.path.join(tmp_dir, "c"))
assert hash1 == CacheHelper.dir_hash(tmp_dir)
touch(os.path.join(tmp_dir, "c", "d", "e.pyc"))
assert hash1 == CacheHelper.dir_hash(tmp_dir)
touch(os.path.join(tmp_dir, "c", "d", "e.pyc.123456789"))
assert hash1 == CacheHelper.dir_hash(tmp_dir)
pycache_dir = os.path.join(tmp_dir, "__pycache__")
safe_mkdir(pycache_dir)
touch(os.path.join(pycache_dir, "f.pyc"))
assert hash1 == CacheHelper.dir_hash(tmp_dir)
touch(os.path.join(pycache_dir, "f.pyc.123456789"))
assert hash1 == CacheHelper.dir_hash(tmp_dir)
touch(os.path.join(pycache_dir, "f.py"))
assert hash1 == CacheHelper.dir_hash(
tmp_dir
), "All content under __pycache__ directories should be ignored."
try:
import __builtin__ as python_builtins # type: ignore[import]
except ImportError:
import builtins as python_builtins # type: ignore[no-redef]
def assert_access_zipped_assets(distribution_helper_import):
# type: (str) -> bytes
test_executable = dedent(
"""
import os
{distribution_helper_import}
temp_dir = DistributionHelper.access_zipped_assets('my_package', 'submodule')
with open(os.path.join(temp_dir, 'mod.py'), 'r') as fp:
for line in fp:
print(line)
""".format(
distribution_helper_import=distribution_helper_import
)
)
with temporary_dir() as td1, temporary_dir() as td2:
pb = PEXBuilder(path=td1)
with open(os.path.join(td1, "exe.py"), "w") as fp:
fp.write(test_executable)
pb.set_executable(fp.name)
submodule = os.path.join(td1, "my_package", "submodule")
safe_mkdir(submodule)
mod_path = os.path.join(submodule, "mod.py")
with open(mod_path, "w") as fp:
fp.write("accessed")
pb.add_source(fp.name, "my_package/submodule/mod.py")
pb.add_source(None, "my_package/__init__.py")
pb.add_source(None, "my_package/submodule/__init__.py")
pex = os.path.join(td2, "app.pex")
pb.build(pex)
process = PEX(pex, interpreter=pb.interpreter).run(
blocking=False, stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
stdout, stderr = process.communicate()
assert process.returncode == 0
assert b"accessed\n" == stdout
return cast(bytes, stderr)
def test_access_zipped_assets_integration():
# type: () -> None
stderr = assert_access_zipped_assets("from pex.util import DistributionHelper")
assert b"" == stderr.strip()
def test_named_temporary_file():
# type: () -> None
with named_temporary_file() as fp:
name = fp.name
fp.write(b"hi")
fp.flush()
assert os.path.exists(name)
with open(name) as new_fp:
assert new_fp.read() == "hi"
assert not os.path.exists(name)
@mock.patch("os.path.exists", autospec=True, spec_set=True)
def | (mock_exists):
# type: (Any) -> None
# Ensure path checking always returns True for dummy paths.
mock_exists.return_value = True
with temporary_dir() as tmpdir:
in_tmp = lambda f: os.path.join(tmpdir, f)
PTH_TEST_MAPPING = {
# A mapping of .pth file content -> expected paths.
"/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python\n": [
"/System/Library/Frameworks/Python.framework/Versions/2.7/Extras/lib/python"
],
"relative_path\nrelative_path2\n\nrelative_path3": [
in_tmp("relative_path"),
in_tmp("relative_path2"),
in_tmp("relative_path3"),
],
"duplicate_path\nduplicate_path": [in_tmp("duplicate_path")],
"randompath\nimport nosuchmodule\n": [in_tmp("randompath")],
"import sys\nfoo\n/bar/baz": [in_tmp("foo"), "/bar/baz"],
"import nosuchmodule\nfoo": [],
"import nosuchmodule\n": [],
"import bad)syntax\n": [],
} # type: Dict[str, List[str]]
for i, pth_content in enumerate(PTH_TEST_MAPPING):
pth_tmp_path = os.path.abspath(os.path.join(tmpdir, "test%s.pth" % i))
with open(pth_tmp_path, "wb") as f:
f.write(to_bytes(pth_content))
assert sorted(PTH_TEST_MAPPING[pth_content]) == sorted(
list(iter_pth_paths(pth_tmp_path))
)
| test_iter_pth_paths |
log.go | // Copyright (c) 2013-2017 The essentiaone developers
// Copyright (c) 2017 The Decred developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package main
import (
"fmt"
"os"
"path/filepath"
"github.com/essentiaone/divid/addrmgr"
"github.com/essentiaone/divid/blockchain"
"github.com/essentiaone/divid/blockchain/indexers"
"github.com/essentiaone/divid/connmgr"
"github.com/essentiaone/divid/database"
"github.com/essentiaone/divid/mempool"
"github.com/essentiaone/divid/mining"
"github.com/essentiaone/divid/mining/cpuminer"
"github.com/essentiaone/divid/netsync"
"github.com/essentiaone/divid/peer"
"github.com/essentiaone/divid/txscript"
"github.com/btcsuite/btclog"
"github.com/jrick/logrotate/rotator"
)
// logWriter implements an io.Writer that outputs to both standard output and
// the write-end pipe of an initialized log rotator.
type logWriter struct{}
func (logWriter) Write(p []byte) (n int, err error) {
os.Stdout.Write(p)
logRotator.Write(p)
return len(p), nil
}
// Loggers per subsystem. A single backend logger is created and all subsytem
// loggers created from it will write to the backend. When adding new
// subsystems, add the subsystem logger variable here and to the
// subsystemLoggers map.
//
// Loggers can not be used before the log rotator has been initialized with a
// log file. This must be performed early during application startup by calling
// initLogRotator.
var (
// backendLog is the logging backend used to create all subsystem loggers.
// The backend must not be used before the log rotator has been initialized,
// or data races and/or nil pointer dereferences will occur.
backendLog = btclog.NewBackend(logWriter{})
// logRotator is one of the logging outputs. It should be closed on
// application shutdown.
logRotator *rotator.Rotator
adxrLog = backendLog.Logger("ADXR")
amgrLog = backendLog.Logger("AMGR")
cmgrLog = backendLog.Logger("CMGR")
bcdbLog = backendLog.Logger("BCDB")
btcdLog = backendLog.Logger("BTCD")
chanLog = backendLog.Logger("CHAN")
discLog = backendLog.Logger("DISC")
indxLog = backendLog.Logger("INDX")
minrLog = backendLog.Logger("MINR")
peerLog = backendLog.Logger("PEER")
rpcsLog = backendLog.Logger("RPCS")
scrpLog = backendLog.Logger("SCRP")
srvrLog = backendLog.Logger("SRVR")
syncLog = backendLog.Logger("SYNC")
txmpLog = backendLog.Logger("TXMP")
)
// Initialize package-global logger variables.
func init() {
addrmgr.UseLogger(amgrLog)
connmgr.UseLogger(cmgrLog)
database.UseLogger(bcdbLog)
blockchain.UseLogger(chanLog)
indexers.UseLogger(indxLog)
mining.UseLogger(minrLog)
cpuminer.UseLogger(minrLog)
peer.UseLogger(peerLog)
txscript.UseLogger(scrpLog)
netsync.UseLogger(syncLog)
mempool.UseLogger(txmpLog)
}
// subsystemLoggers maps each subsystem identifier to its associated logger.
var subsystemLoggers = map[string]btclog.Logger{
"ADXR": adxrLog,
"AMGR": amgrLog,
"CMGR": cmgrLog,
"BCDB": bcdbLog,
"BTCD": btcdLog,
"CHAN": chanLog,
"DISC": discLog,
"INDX": indxLog,
"MINR": minrLog,
"PEER": peerLog,
"RPCS": rpcsLog,
"SCRP": scrpLog,
"SRVR": srvrLog,
"SYNC": syncLog,
"TXMP": txmpLog,
}
// initLogRotator initializes the logging rotater to write logs to logFile and
// create roll files in the same directory. It must be called before the
// package-global log rotater variables are used.
func initLogRotator(logFile string) {
logDir, _ := filepath.Split(logFile)
err := os.MkdirAll(logDir, 0700)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to create log directory: %v\n", err)
os.Exit(1)
}
r, err := rotator.New(logFile, 10*1024, false, 3)
if err != nil {
fmt.Fprintf(os.Stderr, "failed to create file rotator: %v\n", err)
os.Exit(1)
}
logRotator = r
}
// setLogLevel sets the logging level for provided subsystem. Invalid
// subsystems are ignored. Uninitialized subsystems are dynamically created as
// needed.
func setLogLevel(subsystemID string, logLevel string) {
// Ignore invalid subsystems.
logger, ok := subsystemLoggers[subsystemID]
if !ok {
return
}
// Defaults to info if the log level is invalid.
level, _ := btclog.LevelFromString(logLevel)
logger.SetLevel(level)
}
// setLogLevels sets the log level for all subsystem loggers to the passed
// level. It also dynamically creates the subsystem loggers as needed, so it | // create loggers as needed.
for subsystemID := range subsystemLoggers {
setLogLevel(subsystemID, logLevel)
}
}
// directionString is a helper function that returns a string that represents
// the direction of a connection (inbound or outbound).
func directionString(inbound bool) string {
if inbound {
return "inbound"
}
return "outbound"
}
// pickNoun returns the singular or plural form of a noun depending
// on the count n.
func pickNoun(n uint64, singular, plural string) string {
if n == 1 {
return singular
}
return plural
} | // can be used to initialize the logging system.
func setLogLevels(logLevel string) {
// Configure all sub-systems with the new logging level. Dynamically |
mod.rs | pub mod translation;
type Bundle = fluent::bundle::FluentBundle<
fluent::FluentResource,
intl_memoizer::concurrent::IntlLangMemoizer,
>;
type Map = std::collections::HashMap<realm_lang::Language, Option<Bundle>>;
pub type Base = std::sync::Arc<antidote::Mutex<(Bundle, Map)>>;
#[derive(serde::Serialize)]
#[allow(clippy::upper_case_acronyms)]
pub struct HTML {
pub text: String,
}
#[derive(serde::Serialize)]
struct Integer {
value: i64,
localised: String,
}
#[derive(serde::Serialize)]
struct Float {
value: f64,
localised: String,
}
fn new_bundle(lang: &realm_lang::Language, res: String) -> Bundle {
let i = issue(lang, res.as_str(), None);
let mut b = fluent::bundle::FluentBundle::new_concurrent(vec![lang
.to_2_letter_code()
.parse()
.unwrap_or_else(|_| panic!("{}", i))]);
b.add_resource(fluent::FluentResource::try_new(res).unwrap_or_else(|_| panic!("{}", i)))
.unwrap_or_else(|_| panic!("{}", i));
b
}
pub fn new_base(id: &'static str) -> Base {
let default = realm_lang::Language::English;
std::sync::Arc::new(antidote::Mutex::new((
new_bundle(
&default,
read_file(&default, id).unwrap_or_else(|| panic!("cant read english resource: {}", id)),
),
std::collections::HashMap::new(),
)))
}
// fn bundle<'a, 'b>(
// base: &'a Base,
// lang: &realm_lang::Language,
// ) -> (antidote::MutexGuard<'b, (Bundle, crate::Map)>, &'b Bundle)
// where
// 'a: 'b,
// {
// use std::ops::DerefMut;
//
// let mut lock = base.lock();
// let (en, ref mut m) = lock.deref_mut();
// let b = match m.get(lang) {
// Some(Some(v)) => v,
// Some(None) => en,
// None => {
// todo!()
// }
// };
//
// (lock, b)
// }
fn issue(lang: &realm_lang::Language, res: &str, id: Option<&str>) -> String {
format!("issue with {}/{}/{:?}", lang.to_2_letter_code(), res, id)
}
/*pub fn html(base: &Base, lang: &realm_lang::Language, res: &'static str, id: &'static str) -> HTML {
assert!(id.ends_with("-html"));
HTML {
text: message(base, lang, res, id),
}
}
pub fn message(
base: &Base,
lang: &realm_lang::Language,
res: &'static str,
id: &'static str,
) -> String {
lookup(base, lang, res, id, None, None)
}
// message_with_args
pub fn attribute(
base: &Base,
lang: &realm_lang::Language,
res: &'static str,
id: &'static str,
attr: &'static str,
) -> String {
lookup(base, lang, res, id, Some(attr), None)
}*/
// message_with_args
pub fn lookup(
base: &Base,
lang: &realm_lang::Language,
res: &'static str,
id: &'static str,
attribute: Option<&'static str>,
args: Option<&fluent::FluentArgs>,
) -> String {
use std::ops::DerefMut;
let i = issue(lang, res, Some(id));
let mut lock = base.lock();
let (en, ref mut m) = lock.deref_mut();
if m.get(lang).is_none() {
match read_file(lang, res) {
Some(v) => {
m.insert(*lang, Some(new_bundle(lang, v)));
}
None => {
m.insert(*lang, None);
}
}
};
let b: &Bundle = match m.get(lang) {
Some(Some(v)) => v,
Some(None) => en,
None => unreachable!(),
}; |
let msg = b
.get_message(id)
.or_else(|| en.get_message(id))
.unwrap_or_else(|| panic!("{}", i));
let mut errors = vec![];
let pattern = match attribute {
Some(key) => msg
.get_attribute(key)
.unwrap_or_else(|| panic!("{}", i))
.value(),
None => msg.value().unwrap_or_else(|| panic!("{}", i)),
};
let s = b.format_pattern(pattern, args, &mut errors);
if !errors.is_empty() {
panic!("errors found in {}: {:?}", i, errors)
}
s.into()
}
fn read_file(lang: &realm_lang::Language, res: &'static str) -> Option<String> {
let string = match (lang, res) {
(&realm_lang::Language::Hindi, "translation") => {
include_str!("../../i18n/hi/translation.ftl")
}
(_, "translation") => {
include_str!("../../i18n/en/translation.ftl")
}
_ => panic!(),
};
Some(string.to_string())
} | |
main4.py | # -*- coding: utf-8 -*-
"""
Created on Sun Feb 10 16:45:42 2019
@author: ASUS PC
"""
from flask import Flask, render_template, Response
import time
from threading import Lock, Thread
import queue
import socket
from threading import Thread
# emulated camera
from camera3 import Camera
# Raspberry Pi camera module (requires picamera package)
# from camera_pi import Camera
#server_socket = socket.socket()
#server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
#server_socket.bind(('0.0.0.0', 2000))
#server_socket.listen(0)
#connection = server_socket.accept()[0] #.makefile('rb')
#connection_file = connection.makefile('rb')
server_socket = None
connection_file = None
connection = None
frame_queue = queue.Queue(maxsize=5)
stream_entered = False
socket_open = False
lock = Lock()
app = Flask(__name__)
import urllib.request
import json
def send_device_status(cond):
body = {'device_id': 26082007, 'status': cond}
myurl = "http://167.99.215.27:8000/api/updateDeviceStatus"
req = urllib.request.Request(myurl)
req.add_header('Content-Type', 'application/json')
jsondata = json.dumps(body)
jsondataasbytes = jsondata.encode('utf-8')
req.add_header('Content-Length', len(jsondataasbytes))
print (jsondataasbytes)
response = urllib.request.urlopen(req, jsondataasbytes)
def bar(camera, t):
global stream_entered
global lock
global frame_queue
global server_socket
global connection_file
global connection
global socket_open
first_time = True
while True:
if not socket_open:
if first_time:
t.sleep(2)
send_device_status(socket_open)
server_socket = socket.socket()
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(('0.0.0.0', 2000))
keep_on = True
while keep_on:
try:
keep_on = False
server_socket.listen(0)
connection = server_socket.accept()[0] # .makefile('rb')
print('I am here')
except Exception as e:
print(e)
keep_on = True
# connection = server_socket.accept()[0] # .makefile('rb')
connection_file = connection.makefile('rb')
socket_open = True
send_device_status(socket_open)
elif stream_entered:
start = t.time()
try:
frame_temp = camera.get_frame(connection_file, time)
except Exception as e:
print("An exception occured")
print(e)
socket_open = False
stream_entered = False
send_device_status(socket_open)
print("Waiting for socket")
server_socket = socket.socket()
server_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
server_socket.bind(('0.0.0.0', 2000))
keep_on = True
while keep_on:
try:
keep_on = False
server_socket.listen(0)
connection = server_socket.accept()[0]
except Exception as e:
print(e)
keep_on = True
#connection = server_socket.accept()[0] # .makefile('rb')
connection_file = connection.makefile('rb')
socket_open = True
send_device_status(socket_open)
print("Socket opened")
if frame_temp is not None:
lock.acquire()
frame_queue.put(frame_temp)
lock.release()
finish = t.time()
@app.route('/')
def index():
"""Video streaming home page."""
print('x')
return render_template('index.html')
def gen():
|
def gen2(frame):
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
@app.route('/video_feed')
def video_feed():
print('in')
global stream_entered
global socket_open
global connection
if not stream_entered and not socket_open:
try:
file_name = "root/video_stream/RefreshImage.jpg"
with open(file_name, 'rb') as img_file:
frame_temp = img_file.read()
except Exception as e:
print(e)
file_name = "RefreshImage.jpg"
with open(file_name, 'rb') as img_file:
frame_temp = img_file.read()
return Response(gen2(frame_temp),
mimetype='multipart/x-mixed-replace; boundary=frame')
elif not stream_entered and socket_open:
# Start streaming
connection.sendall(b'w')
print('I am in01')
data = connection.recv(128)
print('I am in1')
if data == b's':
print('I am in')
stream_entered = True
if stream_entered:
print(stream_entered)
"""Video streaming route. Put this in the src attribute of an img tag."""
return Response(gen(),
mimetype='multipart/x-mixed-replace; boundary=frame')
def foo():
app.run('0.0.0.0', port=5000, debug=False, threaded=True,ssl_context=('/etc/letsencrypt/live/vestelagu.site/fullchain.pem','/etc/letsencrypt/live/vestelagu.site/privkey.pem'))
if __name__ == '__main__':
dum = Thread(target= bar, args=(Camera(), time))
dum.start()
foo()
| global frame_queue
global lock
"""Video streaming generator function."""
while True:
frame = frame_queue.get()
if frame is not None:
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n')
else:
try:
file_name = "root/video_stream/RefreshImage.jpg"
with open(file_name, 'rb') as img_file:
frame_temp = img_file.read()
except Exception as e:
print(e)
file_name = "RefreshImage.jpg"
with open(file_name, 'rb') as img_file:
frame_temp = img_file.read()
frame = frame_temp
yield (b'--frame\r\n'
b'Content-Type: image/jpeg\r\n\r\n' + frame + b'\r\n') |
attachment.ts | export class Attachment {
id: number;
type: String;
name: String;
firePower: number;
accuracy: number;
range: number;
handling: number;
rateOfFire: number;
magSize: number;
fireMode: String;
zoomLevel: String;
positives: String;
negatives: String;
weapon: String;
constructor(
id: number,
type: String,
name: String,
firePower: number, | magSize: number,
fireMode: String,
zoomLevel: String,
positives: String,
negatives: String,
weapon: String
) {
this.accuracy = accuracy;
this.positives = positives;
this.negatives = negatives;
this.weapon = weapon;
this.id = id;
this.type = type;
this.fireMode = fireMode;
this.firePower = firePower;
this.magSize = magSize;
this.handling = handling;
this.rateOfFire = rateOfFire;
this.range = range;
this.name = name;
this.zoomLevel = zoomLevel;
}
} | accuracy: number,
range: number,
handling: number,
rateOfFire: number, |
setup.py | # (C) Datadog, Inc. 2018
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
# Always prefer setuptools over distutils
from setuptools import setup
# To use a consistent encoding
from codecs import open
from os import path
HERE = path.abspath(path.dirname(__file__))
# Get the long description from the README file
with open(path.join(HERE, 'README.md'), encoding='utf-8') as f:
long_description = f.read()
# Get version info
ABOUT = {}
with open(path.join(HERE, "datadog_checks", "hdfs_namenode", "__about__.py")) as f:
exec(f.read(), ABOUT)
def | (fpath):
with open(path.join(HERE, fpath), encoding='utf-8') as f:
return f.readlines()
CHECKS_BASE_REQ = 'datadog_checks_base'
setup(
name='datadog-hdfs_namenode',
version=ABOUT['__version__'],
description='The HDFS Namenode check',
long_description=long_description,
long_description_content_type='text/markdown',
keywords='datadog agent hdfs_namenode check',
# The project's main homepage.
url='https://github.com/DataDog/integrations-core',
# Author details
author='Datadog',
author_email='[email protected]',
# License
license='BSD',
# See https://pypi.python.org/pypi?%3Aaction=list_classifiers
classifiers=[
'Development Status :: 5 - Production/Stable',
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Topic :: System :: Monitoring',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python :: 2',
'Programming Language :: Python :: 2.7',
],
# The package we're going to ship
packages=['datadog_checks.hdfs_namenode'],
# Run-time dependencies
install_requires=[CHECKS_BASE_REQ],
tests_require=get_requirements('requirements-dev.txt'),
# Extra files to ship with the wheel package
include_package_data=True,
)
| get_requirements |
directives.js | "use strict";
angular.module("app.directives", ["ui.load"]).directive("uiModule", ["MODULE_CONFIG", "uiLoad", "$compile", function(MODULE_CONFIG, uiLoad, $compile) {
return {
restrict: "A",
compile: function(el, attrs) {
var contents = el.contents().clone();
return function(scope, el, attrs) {
el.contents().remove();
uiLoad.load(MODULE_CONFIG[attrs.uiModule]).then(function() {
$compile(contents)(scope, function(clonedElement, scope) {
el.append(clonedElement)
})
})
}
}
}
}]).directive("uiShift", ["$timeout", function($timeout) {
return {
restrict: "A",
link: function(scope, el, attr) {
var _el = $(el),
_window = $(window),
prev = _el.prev(),
parent, width = _window.width();
!prev.length && (parent = _el.parent());
function sm() {
$timeout(function() {
var method = attr.uiShift;
var target = attr.target;
_el.hasClass("in") || _el[method](target).addClass("in")
})
}
function md() {
parent && parent["prepend"](el);
!parent && _el["insertAfter"](prev);
_el.removeClass("in")
}(width < 768 && sm()) || md();
_window.resize(function() {
if (width !== _window.width()) {
$timeout(function() {
(_window.width() < 768 && sm()) || md();
width = _window.width()
})
}
})
}
}
}]).directive("uiToggleClass", ["$timeout", "$document", function($timeout, $document) {
return {
restrict: "AC",
link: function(scope, el, attr) {
el.on("click", function(e) {
e.preventDefault();
var classes = attr.uiToggleClass.split(","),
targets = (attr.target && attr.target.split(",")) || Array(el),
key = 0;
angular.forEach(classes, function(_class) {
var target = targets[(targets.length && key)];
(_class.indexOf("*") !== -1) && magic(_class, target);
$(target).toggleClass(_class);
key++
});
$(el).toggleClass("active");
function magic(_class, target) {
var patt = new RegExp("\\s" + _class.replace(/\*/g, "[A-Za-z0-9-_]+").split(" ").join("\\s|\\s") + "\\s", "g");
var cn = " " + $(target)[0].className + " ";
while (patt.test(cn)) {
cn = cn.replace(patt, " ")
}
$(target)[0].className = $.trim(cn)
}
})
}
}
}]).directive("uiNav", ["$timeout", function($timeout) {
return {
restrict: "AC",
link: function(scope, el, attr) {
var _window = $(window),
_mb = 768,
wrap = $(".app-aside"),
next, backdrop = ".dropdown-backdrop";
el.on("click", "a", function(e) {
next && next.trigger("mouseleave.nav");
var _this = $(this);
_this.parent().siblings(".active").toggleClass("active");
_this.next().is("ul") && _this.parent().toggleClass("active") && e.preventDefault();
_this.next().is("ul") || ((_window.width() < _mb) && $(".app-aside").removeClass("show off-screen"))
});
el.on("mouseenter", "a", function(e) {
next && next.trigger("mouseleave.nav");
$("> .nav", wrap).remove();
if (!$(".app-aside-fixed.app-aside-folded").length || (_window.width() < _mb)) {
return
}
var _this = $(e.target),
top, w_h = $(window).height(),
offset = 50,
min = 150;
!_this.is("a") && (_this = _this.closest("a"));
if (_this.next().is("ul")) {
next = _this.next()
} else {
return
}
_this.parent().addClass("active");
top = _this.parent().position().top + offset;
next.css("top", top);
if (top + next.height() > w_h) {
next.css("bottom", 0)
}
if (top + min > w_h) {
next.css("bottom", w_h - top - offset).css("top", "auto")
}
next.appendTo(wrap);
next.on("mouseleave.nav", function(e) {
$(backdrop).remove();
next.appendTo(_this.parent());
next.off("mouseleave.nav").css("top", "auto").css("bottom", "auto");
_this.parent().removeClass("active")
});
$(".smart").length && $('<div class="dropdown-backdrop"/>').insertAfter(".app-aside").on("click", function(next) {
next && next.trigger("mouseleave.nav")
})
});
wrap.on("mouseleave", function(e) {
next && next.trigger("mouseleave.nav");
$("> .nav", wrap).remove()
})
}
}
}]).directive("uiScroll", ["$location", "$anchorScroll", function($location, $anchorScroll) {
return {
restrict: "AC",
link: function(scope, el, attr) {
el.on("click", function(e) {
$location.hash(attr.uiScroll);
$anchorScroll()
})
}
}
}]).directive("uiFullscreen", ["uiLoad", function(uiLoad) {
return {
restrict: "AC",
template: '<i class="fa fa-expand fa-fw text"></i><i class="fa fa-compress fa-fw text-active"></i>',
link: function(scope, el, attr) {
if (screenfull.enabled) {
el.removeClass("hide")
}
el.on("click", function() {
var target;
attr.target && (target = $(attr.target)[0]);
el.toggleClass("active");
screenfull.toggle(target)
})
}
}
}]).directive("uiButterbar", ["$rootScope", "$anchorScroll", function($rootScope, $anchorScroll) {
return {
restrict: "AC",
template: '<span class="bar"></span>',
link: function(scope, el, attrs) {
el.addClass("butterbar hide");
scope.$on("$stateChangeStart", function(event) {
$anchorScroll();
el.removeClass("hide").addClass("active")
}); | scope.$on("$stateChangeSuccess", function(event, toState, toParams, fromState) {
event.targetScope.$watch("$viewContentLoaded", function() {
el.addClass("hide").removeClass("active")
})
})
}
}
}]).directive("setNgAnimate", ["$animate", function($animate) {
return {
link: function($scope, $element, $attrs) {
$scope.$watch(function() {
return $scope.$eval($attrs.setNgAnimate, $scope)
}, function(valnew, valold) {
$animate.enabled( !! valnew, $element)
})
}
}
}]).directive("highlight", ["$interpolate", "hljs", "$window", function($interpolate, hljs, $window) {
return {
restrict: "EA",
scope: true,
compile: function(tElem, tAttrs) {
var interpolateFn = $interpolate(tElem.html(), true);
tElem.html("");
return function(scope, elem, attrs) {
scope.$watch(interpolateFn, function(value) {
elem.html(hljs.highlight("sql", value).value)
})
}
}
}
}]).directive("ngConfirmClick", [function() {
return {
priority: 1,
terminal: true,
link: function(scope, element, attr) {
var msg = attr.ngConfirmClick || "Are you sure?";
var clickAction = attr.confirmedClick;
element.bind("click", function(event) {
if (window.confirm(msg)) {
scope.$eval(clickAction)
}
})
}
}
}]).directive("scrollLeftOnClick", function() {
return {
restrict: "A",
link: function(scope, $elm) {
$elm.on("click", function() {
$('[name="unionScroll"]').animate({
scrollLeft: "+=400"
}, 1000)
})
}
}
}).directive("scrollRightOnClick", function() {
return {
restrict: "A",
link: function(scope, $elm) {
$elm.on("click", function() {
$('[name="unionScroll"]').animate({
scrollLeft: "-=400"
}, 1000)
})
}
}
}).directive("scroll", function($window) {
return function(scope, element, attrs) {
angular.element(document.getElementsByName("unionScroll")).bind("scroll", function() {
var scrollLeft = this.scrollLeft;
angular.forEach(document.getElementsByName("unionScroll"), function(ele) {
ele.scrollLeft = scrollLeft
})
})
}
}).directive("slideToggle", function($ocLazyLoad, $timeout) {
return {
restrict: "A",
scope: {
isOpen: "=slideToggle"
},
link: function(scope, element, attr) {
var slideDuration = parseInt(attr.slideToggleDuration, 10) || 200;
if (attr.startShown == "false") {
element.hide()
}
scope.$watch("isOpen", function(newVal, oldVal) {
if (newVal !== oldVal) {
element.stop().slideToggle(slideDuration)
}
if (scope.isOpen) {
if (angular.element("xmp").length > 0) {
$ocLazyLoad.load([{
files: ["https://dev.g.alicdn.com/alimama-engineering-fe/zhizi-fe/0.0.30/js/sqi-cdn/strapdown.min.js"],
cache: false
}])
}
}
})
}
}
}); | |
pulsar-perf-go.go | // Licensed to the Apache Software Foundation (ASF) under one
// or more contributor license agreements. See the NOTICE file
// distributed with this work for additional information
// regarding copyright ownership. The ASF licenses this file
// to you under the Apache License, Version 2.0 (the
// "License"); you may not use this file except in compliance
// with the License. You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing,
// software distributed under the License is distributed on an
// "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
// KIND, either express or implied. See the License for the
// specific language governing permissions and limitations
// under the License.
package main
import (
"context"
"fmt"
"net/http"
_ "net/http/pprof"
"os"
"os/signal"
"github.com/spf13/cobra"
log "github.com/sirupsen/logrus"
"github.com/apache/pulsar-client-go/pulsar"
)
// global flags
var FlagProfile bool
var flagDebug bool
type ClientArgs struct {
ServiceURL string
}
var clientArgs ClientArgs
func NewClient() (pulsar.Client, error) {
clientOpts := pulsar.ClientOptions{
URL: clientArgs.ServiceURL,
}
return pulsar.NewClient(clientOpts)
}
func initLogger(debug bool) {
log.SetFormatter(&log.TextFormatter{
FullTimestamp: true,
TimestampFormat: "15:04:05.000",
})
level := log.InfoLevel
if debug {
level = log.DebugLevel
}
log.SetLevel(level)
}
func main() {
rootCmd := &cobra.Command{
PersistentPreRun: func(cmd *cobra.Command, args []string) {
initLogger(flagDebug)
},
Use: "pulsar-perf-go",
}
flags := rootCmd.PersistentFlags()
flags.BoolVar(&FlagProfile, "profile", false, "enable profiling")
flags.BoolVar(&flagDebug, "debug", false, "enable debug output")
flags.StringVarP(&clientArgs.ServiceURL, "service-url", "u",
"pulsar://localhost:6650", "The Pulsar service URL")
rootCmd.AddCommand(newProducerCommand())
rootCmd.AddCommand(newConsumerCommand())
err := rootCmd.Execute()
if err != nil { | os.Exit(1)
}
}
func stopCh() <-chan struct{} {
stop := make(chan struct{})
signalCh := make(chan os.Signal, 1)
signal.Notify(signalCh, os.Interrupt)
go func() {
<-signalCh
close(stop)
}()
return stop
}
func RunProfiling(stop <-chan struct{}) {
go func() {
if err := serveProfiling("0.0.0.0:6060", stop); err != nil && err != http.ErrServerClosed {
log.WithError(err).Error("Unable to start debug profiling server")
}
}()
}
// use `go tool pprof http://addr/debug/pprof/profile` to get pprof file(cpu info)
// use `go tool pprof http://addr/debug/pprof/heap` to get inuse_space file
func serveProfiling(addr string, stop <-chan struct{}) error {
s := http.Server{
Addr: addr,
Handler: http.DefaultServeMux,
}
go func() {
<-stop
log.Infof("Shutting down pprof server")
s.Shutdown(context.Background())
}()
fmt.Printf("Starting pprof server at: %s\n", addr)
fmt.Printf(" use go tool pprof http://%s/debug/pprof/prof to get pprof file(cpu info)\n", addr)
fmt.Printf(" use go tool pprof http://%s/debug/pprof/heap to get inuse_space file\n", addr)
fmt.Println()
return s.ListenAndServe()
} | fmt.Fprintf(os.Stderr, "executing command error=%+v\n", err) |
post.js | var Q = require('q');
var url = require('url');
var _ = require('lodash');
var qs = require('querystring');
var crypto = require('./crypto');
var http = require('http');
var https = require('https');
var baseLocation = "://tuner.pandora.com/services/json/";
function createRequestor(clientInfo) {
var encryptor = crypto.e(clientInfo.encrypt);
return function request(secure, query, body, skipEncrypt) {
//figure out where to send our request
var sender = secure === true ? https : http;
var location = ["http" + (secure === true ? "s" : "") + baseLocation, qs.stringify(query)].join("?");
location = url.parse(location);
// console.log("requesting %s", location.path);
location.method = "POST";
location.headers = {
"User-Agent": "pianist-js",
"Content-Type": "text/plain"
};
//encode the body
body = JSON.stringify(body);
// console.log("sending (raw) body %s", body);
body = new Buffer(body);
if (skipEncrypt !== true) {
body = encryptor(body).toString('hex');
body = new Buffer(body);
}
// console.log("sending (final) body %s", body);
return Q.promise(function(resolve, reject) {
var req = sender.request(location, function(res) {
var data = [];
res.on('data', function(d) {
data.push(d);
});
res.on('end', function() {
data = Buffer.concat(data);
if (res.statusCode !== 200) {
return reject("Server returned non-200 status", res);
}
data = parseJSON(data);
if (data === null) {
return reject("Unable to parse JSON");
} | case "ok":
if (!data.hasOwnProperty("result")) {
return reject("Response lacks a `result` property", res);
}
return resolve(data.result);
case "fail":
return reject(data.message, data.code, res);
default:
return reject("Invalid response from server", res);
}
});
});
req.on('error', reject);
req.end(body);
});
}
}
function parseJSON(input) {
try {
return JSON.parse(input);
} catch (e) {
return null;
}
}
module.exports = createRequestor; |
switch (data.stat) { |
main.go | package main
import (
"log" | )
func main() {
// Step 1: table generation
log.Println(ctable.New())
} |
"github.com/deeper-x/gocyph/ctable" |
constraints.rs | use core::{borrow::Borrow, marker::PhantomData};
use crate::{
crh::{
bowe_hopwood::{Parameters, CHUNK_SIZE, CRH},
pedersen::Window,
CRHGadget as CRGGadgetTrait,
},
Vec,
};
use ark_ec::{ModelParameters, TEModelParameters};
use ark_ff::Field;
use ark_r1cs_std::{
alloc::AllocVar, groups::curves::twisted_edwards::AffineVar, prelude::*, uint8::UInt8,
};
use ark_relations::r1cs::{Namespace, SynthesisError};
use ark_r1cs_std::bits::boolean::Boolean;
type ConstraintF<P> = <<P as ModelParameters>::BaseField as Field>::BasePrimeField;
#[derive(Derivative)]
#[derivative(Clone(bound = "P: TEModelParameters, W: Window"))]
pub struct ParametersVar<P: TEModelParameters, W: Window> {
params: Parameters<P>,
#[doc(hidden)]
_window: PhantomData<W>,
}
pub struct CRHGadget<P: TEModelParameters, F: FieldVar<P::BaseField, ConstraintF<P>>>
where
for<'a> &'a F: FieldOpsBounds<'a, P::BaseField, F>,
{
#[doc(hidden)]
_params: PhantomData<P>,
#[doc(hidden)]
_base_field: PhantomData<F>,
}
impl<P, F, W> CRGGadgetTrait<CRH<P, W>, ConstraintF<P>> for CRHGadget<P, F>
where
for<'a> &'a F: FieldOpsBounds<'a, P::BaseField, F>,
F: FieldVar<P::BaseField, ConstraintF<P>>,
F: TwoBitLookupGadget<ConstraintF<P>, TableConstant = P::BaseField>
+ ThreeBitCondNegLookupGadget<ConstraintF<P>, TableConstant = P::BaseField>,
P: TEModelParameters,
W: Window,
{
type OutputVar = AffineVar<P, F>;
type ParametersVar = ParametersVar<P, W>;
#[tracing::instrument(target = "r1cs", skip(parameters, input))]
fn evaluate(
parameters: &Self::ParametersVar,
input: &[UInt8<ConstraintF<P>>],
) -> Result<Self::OutputVar, SynthesisError> {
// Pad the input if it is not the current length.
let mut input_in_bits: Vec<Boolean<_>> = input
.iter()
.flat_map(|byte| byte.to_bits_le().unwrap())
.collect();
if (input_in_bits.len()) % CHUNK_SIZE != 0 {
let current_length = input_in_bits.len();
for _ in 0..(CHUNK_SIZE - current_length % CHUNK_SIZE) {
input_in_bits.push(Boolean::constant(false));
}
}
assert!(input_in_bits.len() % CHUNK_SIZE == 0);
assert_eq!(parameters.params.generators.len(), W::NUM_WINDOWS);
for generators in parameters.params.generators.iter() {
assert_eq!(generators.len(), W::WINDOW_SIZE);
}
// Allocate new variable for the result.
let input_in_bits = input_in_bits
.chunks(W::WINDOW_SIZE * CHUNK_SIZE)
.map(|x| x.chunks(CHUNK_SIZE).collect::<Vec<_>>())
.collect::<Vec<_>>();
let result = AffineVar::precomputed_base_3_bit_signed_digit_scalar_mul(
¶meters.params.generators,
&input_in_bits,
)?;
Ok(result)
}
}
impl<P, W> AllocVar<Parameters<P>, ConstraintF<P>> for ParametersVar<P, W>
where
P: TEModelParameters,
W: Window,
{
#[tracing::instrument(target = "r1cs", skip(_cs, f))]
fn new_variable<T: Borrow<Parameters<P>>>(
_cs: impl Into<Namespace<ConstraintF<P>>>,
f: impl FnOnce() -> Result<T, SynthesisError>,
_mode: AllocationMode,
) -> Result<Self, SynthesisError> {
let params = f()?.borrow().clone();
Ok(ParametersVar {
params,
_window: PhantomData,
})
}
}
#[cfg(test)]
mod test {
use ark_std::rand::Rng;
use crate::crh::bowe_hopwood;
use crate::crh::pedersen;
use crate::{CRHGadget, CRH};
use ark_ec::ProjectiveCurve;
use ark_ed_on_bls12_381::{constraints::FqVar, EdwardsParameters, Fq as Fr};
use ark_r1cs_std::{alloc::AllocVar, uint8::UInt8, R1CSVar};
use ark_relations::r1cs::{ConstraintSystem, ConstraintSystemRef};
use ark_std::test_rng;
type TestCRH = bowe_hopwood::CRH<EdwardsParameters, Window>;
type TestCRHGadget = bowe_hopwood::constraints::CRHGadget<EdwardsParameters, FqVar>;
#[derive(Clone, PartialEq, Eq, Hash)]
pub(super) struct Window;
impl pedersen::Window for Window {
const WINDOW_SIZE: usize = 63;
const NUM_WINDOWS: usize = 8;
}
fn generate_input<R: Rng>(
cs: ConstraintSystemRef<Fr>,
rng: &mut R,
) -> ([u8; 189], Vec<UInt8<Fr>>) {
let mut input = [1u8; 189];
rng.fill_bytes(&mut input);
let mut input_bytes = vec![];
for byte in input.iter() {
input_bytes.push(UInt8::new_witness(cs.clone(), || Ok(byte)).unwrap());
}
(input, input_bytes)
}
#[test]
fn test_native_equality() |
}
| {
let rng = &mut test_rng();
let cs = ConstraintSystem::<Fr>::new_ref();
let (input, input_var) = generate_input(cs.clone(), rng);
println!("number of constraints for input: {}", cs.num_constraints());
let parameters = TestCRH::setup(rng).unwrap();
let primitive_result = TestCRH::evaluate(¶meters, &input).unwrap();
let parameters_var = <TestCRHGadget as CRHGadget<TestCRH, Fr>>::ParametersVar::new_witness(
ark_relations::ns!(cs, "parameters_var"),
|| Ok(¶meters),
)
.unwrap();
println!(
"number of constraints for input + params: {}",
cs.num_constraints()
);
let result_var = TestCRHGadget::evaluate(¶meters_var, &input_var).unwrap();
println!("number of constraints total: {}", cs.num_constraints());
let primitive_result = primitive_result.into_affine();
assert_eq!(primitive_result, result_var.value().unwrap().into_affine());
assert!(cs.is_satisfied().unwrap());
} |
01. array extension.js | (function () {
Array.prototype.last = function () {
return this[this.length - 1];
}
Array.prototype.skip = function (n) {
return this.slice(n);
};
Array.prototype.take = function (n) {
return this.slice(0, n);
};
Array.prototype.sum = function () { | let sum = 0;
for (let i = 0; i < this.length; i++) {
sum += this[i];
}
return sum;
};
Array.prototype.average = function () {
let sum = 0;
for (let i = 0; i < this.length; i++) {
sum += this[i];
}
return sum / this.length;
};
}()) | |
hyfed_compensator_project.py | """
The main class to obtain the compensation parameters from the clients, aggregate them,
and share the aggregation results with the server
Copyright 2021 Reza NasiriGerdeh. All Rights Reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
from hyfed_compensator.util.hyfed_parameters import Parameter, AuthenticationParameter, SyncParameter, ConnectionParameter, MonitoringParameter
from hyfed_compensator.util.status import OperationStatus
from hyfed_compensator.util.endpoint import EndPoint
from hyfed_compensator.util.utils import aggregate
from hyfed_compensator.util.monitoring import Timer, Counter
import pickle
import numpy as np
import time
import hashlib
import requests
from datetime import datetime
import logging
logger = logging.getLogger(__name__)
class HyFedCompensatorProject:
"""
Provide main functions to communicate with the clients and server,
and to aggregate the compensation parameters from the clients
"""
def __init__(self, project_id_hash, client_count):
|
def add_client_parameters(self, request):
""" Append client's authentication, sync, connection, and compensation parameters to the corresponding lists """
try:
# new communication round starts for compensator if the parameters from the first client is received
if len(self.client_compensation_parameters) == 0:
self.computation_timer.new_round()
self.network_send_timer.new_round()
# add traffic size to client -> compensator traffic counter
traffic_size = int(request.headers['Content-Length'])
self.client_compensator_traffic.increment(traffic_size)
logger.debug(f'Project {self.project_id_hash}: {traffic_size} bytes added to client -> compensator traffic.')
self.computation_timer.start()
# extract client parameters from the request body
request_body = pickle.loads(request.body)
authentication_parameters = request_body[Parameter.AUTHENTICATION]
sync_parameters = request_body[Parameter.SYNCHRONIZATION]
compensation_parameters = request_body[Parameter.COMPENSATION]
connection_parameters = request_body[Parameter.CONNECTION]
data_type_parameters = request_body[Parameter.DATA_TYPE]
# authentication parameters
hash_username = authentication_parameters[AuthenticationParameter.HASH_USERNAME]
hash_token = authentication_parameters[AuthenticationParameter.HASH_TOKEN]
# sync parameters
step = sync_parameters[SyncParameter.PROJECT_STEP]
comm_round = sync_parameters[SyncParameter.COMM_ROUND]
# connection parameter
server_url = connection_parameters[ConnectionParameter.SERVER_URL]
# add the parameters to the lists
self.client_username_hashes.append(hash_username)
self.client_token_hashes.append(hash_token)
self.client_steps.append(step)
self.client_comm_rounds.append(comm_round)
self.server_urls.append(server_url)
self.client_compensation_parameters.append(compensation_parameters)
self.client_data_type_parameters.append(data_type_parameters)
self.computation_timer.stop()
logger.debug(f'Project {self.project_id_hash}: Client parameters added!')
except Exception as add_parameter_exp:
logger.error(f'Project {self.project_id_hash}: Adding client parameters was failed!')
logger.error(f'Project {self.project_id_hash}: The exception is: {add_parameter_exp}')
self.computation_timer.stop()
self.set_operation_status_failed()
def aggregate_client_parameters(self):
""" Aggregate client parameters including the compensation parameters from all clients """
try:
self.computation_timer.start()
logger.debug(f"Project {self.project_id_hash}: Aggregating client parameters ...")
# make sure all clients are in the same step and communication round
if not self.is_client_sync_ok():
logger.error(f'Project {self.project_id_hash}: The step/comm_round of the clients are different!')
self.computation_timer.stop()
self.set_operation_status_failed()
return
# ensure all clients are coordinated by the same server
if not self.is_server_url_same():
logger.error(f'Project {self.project_id_hash}: Server URL is different for the clients!')
self.computation_timer.stop()
self.set_operation_status_failed()
return
# make sure compensator parameter names are the same across the clients
if not self.is_client_compensation_parameters_ok():
logger.error(f'Project {self.project_id_hash}: Compensation parameter names are different across clients!')
self.computation_timer.stop()
self.set_operation_status_failed()
return
# aggregate the compensation parameters
for parameter_name in self.client_compensation_parameters[0].keys():
compensation_values = self.compensation_parameter_to_list(parameter_name)
parameter_data_type = self.client_data_type_parameters[0][parameter_name]
aggregated_compensation_value = aggregate(compensation_values, parameter_data_type)
self.aggregated_compensation_parameters[parameter_name] = -aggregated_compensation_value
self.computation_timer.stop()
except Exception as aggregate_exp:
logger.error(f'Project {self.project_id_hash}: Aggregating the compensation parameters was failed!')
logger.error(f'Project {self.project_id_hash}: The exception is: {aggregate_exp}')
self.computation_timer.stop()
self.set_operation_status_failed()
def send_to_server(self):
""" Send aggregated authentication, sync, monitoring, and compensation parameters to the server """
# create and serialize request body
parameters_serialized = self.prepare_server_parameters()
max_tries = 10
for _ in range(max_tries):
try:
logger.debug(f"Project {self.project_id_hash}: Sending the aggregated parameters to the server ...")
self.network_send_timer.start()
response = requests.post(url=f'{self.server_urls[0]}/{EndPoint.MODEL_COMPENSATION}',
data=parameters_serialized,
timeout=self.upload_parameters_timeout)
if response.status_code == 200:
logger.debug(f"Project {self.project_id_hash}: Sending done!")
self.network_send_timer.stop()
return
logger.error(f"Project {self.project_id_hash}: Sending failed, got {response.status_code} status code from the server!")
self.network_send_timer.stop()
time.sleep(30)
continue
except Exception as send_server_exp:
logger.error(f"Project {self.project_id_hash}: Sending failed!")
logger.error(f'Project {self.project_id_hash}: The exception is: {send_server_exp}')
self.network_send_timer.stop()
time.sleep(30)
def aggregate_and_send(self):
""" First aggregate, and then, send aggregated parameters to the server """
# aggregate client parameters including compensation parameters
self.aggregate_client_parameters()
# send the aggregated parameters to the server
self.send_to_server()
# empty the lists/dictionaries for the next round
self.client_token_hashes = list()
self.client_username_hashes = list()
self.client_steps = list()
self.client_comm_rounds = list()
self.client_compensation_parameters = list()
self.client_data_type_parameters = list()
self.server_urls = list()
self.aggregated_compensation_parameters = dict()
# ########## setter/getter functions
def set_operation_status_done(self):
""" If current operation is still in progress (not failed), then set it to Done """
if self.operation_status == OperationStatus.IN_PROGRESS:
self.operation_status = OperationStatus.DONE
def set_operation_status_in_progress(self):
""" If previous operation is done (not failed), then set current operation status to In Progress """
if self.operation_status == OperationStatus.DONE:
self.operation_status = OperationStatus.IN_PROGRESS
def set_operation_status_failed(self):
""" Regardless of the current status, set the operation status to Failed """
logger.error("Operation failed!")
self.operation_status = OperationStatus.FAILED
def set_last_updated_date(self):
self.last_updated_date = datetime.now().timestamp()
def is_operation_failed(self):
return self.operation_status == OperationStatus.FAILED
def get_last_updated_date(self):
return self.last_updated_date
# ########## Helper functions
def is_client_sync_ok(self):
""" Ensure the project step and communication round of all clients is the same """
try:
logger.debug(f"Project {self.project_id_hash}: checking synchronization status of the clients ...")
return (np.all(np.array(self.client_steps) == self.client_steps[0]) and
np.all(np.array(self.client_comm_rounds) == self.client_comm_rounds[0]))
except Exception as sync_exp:
logger.error(f'Project {self.project_id_hash}: Checking sync status of the clients was failed')
logger.error(f'Project {self.project_id_hash}: The exception is: {sync_exp}')
return False
def is_server_url_same(self):
""" Ensure the the server urls from all clients are the same """
try:
logger.debug(f"Project {self.project_id_hash}: Checking whether clients are coordinated by the same server ...")
return np.all(np.array(self.server_urls) == self.server_urls[0])
except Exception as server_url_exp:
logger.error(f'Project {self.project_id_hash}: Checking server urls was failed!')
logger.error(f'Project {self.project_id_hash}: The exception is: {server_url_exp}')
return False
def is_client_compensation_parameters_ok(self):
""" Make sure the names of the compensation parameters are consistent across clients """
try:
logger.debug(f"Project {self.project_id_hash}: checking whether compensation parameter names are consistent across all clients ...")
client1_compensation_parameter_names = self.client_compensation_parameters[0].keys()
for client_parameters in self.client_compensation_parameters:
if client_parameters.keys() != client1_compensation_parameter_names:
return False
return True
except Exception as compensation_param_exp:
logger.error(f'Project {self.project_id_hash}: Checking compensation parameter names was failed!')
logger.error(f'Project {self.project_id_hash}: The exception is: {compensation_param_exp}')
return False
def is_client_data_type_parameters_ok(self):
""" Make sure the names of the data type parameters are consistent across clients """
try:
logger.debug(f"Project {self.project_id_hash}: checking whether data type parameter names are consistent across all clients ...")
client1_data_type_parameter_names = self.client_data_type_parameters[0].keys()
for client_parameters in self.client_data_type_parameters:
if client_parameters.keys() != client1_data_type_parameter_names:
return False
return True
except Exception as compensation_param_exp:
logger.error(f'Project {self.project_id_hash}: Checking data type parameter names was failed!')
logger.error(f'Project {self.project_id_hash}: The exception is: {compensation_param_exp}')
return False
def should_aggregate_and_send(self):
""" Check whether compensation parameters from all clients received """
return len(self.client_username_hashes) == self.client_count
def compensation_parameter_to_list(self, parameter_name):
"""
Extract the compensation parameter of the clients specified with parameter_name as a list
"""
compensation_parameter_list = []
try:
for compensation_parameter in self.client_compensation_parameters:
compensation_parameter_list.append(compensation_parameter[parameter_name])
except Exception as convert_exp:
logger.error(f'Project {self.project_id_hash}: Converting compensation parameters to list was failed!')
logger.error(f'Project {self.project_id_hash}: The exception is: {convert_exp}')
self.set_operation_status_failed()
return compensation_parameter_list
def prepare_server_parameters(self):
""" Prepare the parameters shared with the server """
try:
self.computation_timer.start()
# initialize authentication parameters
authentication_parameters = dict()
hash_username_hashes = hashlib.sha256(''.join(sorted(self.client_username_hashes)).encode('utf-8')).hexdigest()
hash_token_hashes = hashlib.sha256(''.join(sorted(self.client_token_hashes)).encode('utf-8')).hexdigest()
authentication_parameters[AuthenticationParameter.HASH_PROJECT_ID] = self.project_id_hash
authentication_parameters[AuthenticationParameter.HASH_USERNAME_HASHES] = hash_username_hashes
authentication_parameters[AuthenticationParameter.HASH_TOKEN_HASHES] = hash_token_hashes
# initialize synchronization parameters
sync_parameters = dict()
sync_parameters[SyncParameter.PROJECT_STEP] = self.client_steps[0]
sync_parameters[SyncParameter.COMM_ROUND] = self.client_comm_rounds[0]
sync_parameters[SyncParameter.OPERATION_STATUS] = self.operation_status
monitoring_parameters = dict()
monitoring_parameters[MonitoringParameter.COMPUTATION_TIME] = self.computation_timer.get_total_duration()
monitoring_parameters[MonitoringParameter.NETWORK_SEND_TIME] = self.network_send_timer.get_total_duration()
monitoring_parameters[MonitoringParameter.CLIENT_COMPENSATOR_TRAFFIC] = self.client_compensator_traffic.total_count
# server parameters in json
server_parameters_json = {Parameter.AUTHENTICATION: authentication_parameters,
Parameter.SYNCHRONIZATION: sync_parameters,
Parameter.MONITORING: monitoring_parameters,
Parameter.COMPENSATION: self.aggregated_compensation_parameters
}
server_parameters_serialized = pickle.dumps(server_parameters_json)
self.computation_timer.stop()
return server_parameters_serialized
except Exception as prepare_exp:
logger.error(f'Project {self.project_id_hash}: Preparing server parameters was failed!')
logger.error(f'Project {self.project_id_hash}: The exception is: {prepare_exp}')
self.computation_timer.stop()
self.set_operation_status_failed()
| """ Initialize the compensator project using the hash of the project ID and the number of clients """
# for compensator to know whether it has received compensation parameters from all clients
self.client_count = client_count
# hash of the project ID, which should be the same for all clients
self.project_id_hash = project_id_hash
# authentication parameters from the clients
self.client_token_hashes = list()
self.client_username_hashes = list()
# sync parameters from the clients
self.client_steps = list()
self.client_comm_rounds = list()
# compensation parameters (noise values) from the clients
self.client_compensation_parameters = list()
# data type parameters from clients
self.client_data_type_parameters = list()
# clients tell compensator where to send the aggregated noise values
self.server_urls = list()
# aggregated parameters have the same parameter names as the local model parameters of the clients
self.aggregated_compensation_parameters = dict()
# to tell the server whether the aggregation of noise values have been successful
self.operation_status = OperationStatus.DONE
# monitoring timers
self.computation_timer = Timer(name='Computation')
self.network_send_timer = Timer(name='Network Send')
# counter to track the traffic client -> compensator (in terms of bytes)
self.client_compensator_traffic = Counter("client->compensator")
self.upload_parameters_timeout = 600
# used for garbage collection purposes
self.last_updated_date = datetime.now().timestamp() |
base.py | """Base classes for all estimators."""
# Author: Gael Varoquaux <[email protected]>
# License: BSD 3 clause
import copy
import inspect
import warnings
import numpy as np
from scipy import sparse
from .externals import six
###############################################################################
def clone(estimator, safe=True):
"""Constructs a new estimator with the same parameters.
Clone does a deep copy of the model in an estimator
without actually copying attached data. It yields a new estimator
with the same parameters that has not been fit on any data.
Parameters
----------
estimator: estimator object, or list, tuple or set of objects
The estimator or group of estimators to be cloned
safe: boolean, optional
If safe is false, clone will fall back to a deepcopy on objects
that are not estimators.
"""
estimator_type = type(estimator)
# XXX: not handling dictionaries
if estimator_type in (list, tuple, set, frozenset):
return estimator_type([clone(e, safe=safe) for e in estimator])
elif not hasattr(estimator, 'get_params'):
if not safe:
return copy.deepcopy(estimator)
else:
raise TypeError("Cannot clone object '%s' (type %s): "
"it does not seem to be a scikit-learn estimator "
"it does not implement a 'get_params' methods."
% (repr(estimator), type(estimator)))
klass = estimator.__class__
new_object_params = estimator.get_params(deep=False)
for name, param in six.iteritems(new_object_params):
new_object_params[name] = clone(param, safe=False)
new_object = klass(**new_object_params)
params_set = new_object.get_params(deep=False)
# quick sanity check of the parameters of the clone
for name in new_object_params:
param1 = new_object_params[name]
param2 = params_set[name]
if isinstance(param1, np.ndarray):
# For most ndarrays, we do not test for complete equality
if not isinstance(param2, type(param1)):
equality_test = False
elif (param1.ndim > 0
and param1.shape[0] > 0
and isinstance(param2, np.ndarray)
and param2.ndim > 0
and param2.shape[0] > 0):
equality_test = (
param1.shape == param2.shape
and param1.dtype == param2.dtype
# We have to use '.flat' for 2D arrays
and param1.flat[0] == param2.flat[0]
and param1.flat[-1] == param2.flat[-1]
)
else:
equality_test = np.all(param1 == param2)
elif sparse.issparse(param1):
# For sparse matrices equality doesn't work
if not sparse.issparse(param2):
equality_test = False
elif param1.size == 0 or param2.size == 0:
equality_test = (
param1.__class__ == param2.__class__
and param1.size == 0
and param2.size == 0
)
else:
equality_test = (
param1.__class__ == param2.__class__
and param1.data[0] == param2.data[0]
and param1.data[-1] == param2.data[-1]
and param1.nnz == param2.nnz
and param1.shape == param2.shape
)
else:
equality_test = new_object_params[name] == params_set[name]
if not equality_test:
raise RuntimeError('Cannot clone object %s, as the constructor '
'does not seem to set parameter %s' %
(estimator, name))
return new_object
###############################################################################
def _pprint(params, offset=0, printer=repr):
"""Pretty print the dictionary 'params'
Parameters
----------
params: dict
The dictionary to pretty print
offset: int
The offset in characters to add at the begin of each line.
printer:
The function to convert entries to strings, typically
the builtin str or repr
"""
# Do a multi-line justified repr:
options = np.get_printoptions()
np.set_printoptions(precision=5, threshold=64, edgeitems=2)
params_list = list()
this_line_length = offset
line_sep = ',\n' + (1 + offset // 2) * ' '
for i, (k, v) in enumerate(sorted(six.iteritems(params))):
if type(v) is float:
# use str for representing floating point numbers
# this way we get consistent representation across
# architectures and versions.
this_repr = '%s=%s' % (k, str(v))
else:
# use repr of the rest
this_repr = '%s=%s' % (k, printer(v))
if len(this_repr) > 500:
this_repr = this_repr[:300] + '...' + this_repr[-100:]
if i > 0:
if (this_line_length + len(this_repr) >= 75 or '\n' in this_repr):
params_list.append(line_sep)
this_line_length = len(line_sep)
else:
params_list.append(', ')
this_line_length += 2
params_list.append(this_repr)
this_line_length += len(this_repr)
np.set_printoptions(**options)
lines = ''.join(params_list)
# Strip trailing space to avoid nightmare in doctests
lines = '\n'.join(l.rstrip(' ') for l in lines.split('\n'))
return lines
###############################################################################
class BaseEstimator(object):
"""Base class for all estimators in scikit-learn
Notes
-----
All estimators should specify all the parameters that can be set
at the class level in their ``__init__`` as explicit keyword
arguments (no ``*args`` or ``**kwargs``).
"""
@classmethod
def _get_param_names(cls):
|
def get_params(self, deep=True):
"""Get parameters for this estimator.
Parameters
----------
deep: boolean, optional
If True, will return the parameters for this estimator and
contained subobjects that are estimators.
Returns
-------
params : mapping of string to any
Parameter names mapped to their values.
"""
out = dict()
for key in self._get_param_names():
# We need deprecation warnings to always be on in order to
# catch deprecated param values.
# This is set in utils/__init__.py but it gets overwritten
# when running under python3 somehow.
warnings.simplefilter("always", DeprecationWarning)
try:
with warnings.catch_warnings(record=True) as w:
value = getattr(self, key, None)
if len(w) and w[0].category == DeprecationWarning:
# if the parameter is deprecated, don't show it
continue
finally:
warnings.filters.pop(0)
# XXX: should we rather test if instance of estimator?
if deep and hasattr(value, 'get_params'):
deep_items = value.get_params().items()
out.update((key + '__' + k, val) for k, val in deep_items)
out[key] = value
return out
def set_params(self, **params):
"""Set the parameters of this estimator.
The method works on simple estimators as well as on nested objects
(such as pipelines). The former have parameters of the form
``<component>__<parameter>`` so that it's possible to update each
component of a nested object.
Returns
-------
self
"""
if not params:
# Simple optimisation to gain speed (inspect is slow)
return self
valid_params = self.get_params(deep=True)
for key, value in six.iteritems(params):
split = key.split('__', 1)
if len(split) > 1:
# nested objects case
name, sub_name = split
if not name in valid_params:
raise ValueError('Invalid parameter %s for estimator %s' %
(name, self))
sub_object = valid_params[name]
sub_object.set_params(**{sub_name: value})
else:
# simple objects case
if not key in valid_params:
raise ValueError('Invalid parameter %s ' 'for estimator %s'
% (key, self.__class__.__name__))
setattr(self, key, value)
return self
def __repr__(self):
class_name = self.__class__.__name__
return '%s(%s)' % (class_name, _pprint(self.get_params(deep=False),
offset=len(class_name),),)
###############################################################################
class ClassifierMixin(object):
"""Mixin class for all classifiers in scikit-learn."""
def score(self, X, y, sample_weight=None):
"""Returns the mean accuracy on the given test data and labels.
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Test samples.
y : array-like, shape = (n_samples,)
True labels for X.
sample_weight : array-like, shape = [n_samples], optional
Sample weights.
Returns
-------
score : float
Mean accuracy of self.predict(X) wrt. y.
"""
from .metrics import accuracy_score
return accuracy_score(y, self.predict(X), sample_weight=sample_weight)
###############################################################################
class RegressorMixin(object):
"""Mixin class for all regression estimators in scikit-learn."""
def score(self, X, y, sample_weight=None):
"""Returns the coefficient of determination R^2 of the prediction.
The coefficient R^2 is defined as (1 - u/v), where u is the regression
sum of squares ((y_true - y_pred) ** 2).sum() and v is the residual
sum of squares ((y_true - y_true.mean()) ** 2).sum().
Best possible score is 1.0, lower values are worse.
Parameters
----------
X : array-like, shape = (n_samples, n_features)
Test samples.
y : array-like, shape = (n_samples,)
True values for X.
sample_weight : array-like, shape = [n_samples], optional
Sample weights.
Returns
-------
score : float
R^2 of self.predict(X) wrt. y.
"""
from .metrics import r2_score
return r2_score(y, self.predict(X), sample_weight=sample_weight)
###############################################################################
class ClusterMixin(object):
"""Mixin class for all cluster estimators in scikit-learn."""
def fit_predict(self, X, y=None):
"""Performs clustering on X and returns cluster labels.
Parameters
----------
X : ndarray, shape (n_samples, n_features)
Input data.
Returns
-------
y : ndarray, shape (n_samples,)
cluster labels
"""
# non-optimized default implementation; override when a better
# method is possible for a given clustering algorithm
self.fit(X)
return self.labels_
class BiclusterMixin(object):
"""Mixin class for all bicluster estimators in scikit-learn"""
@property
def biclusters_(self):
"""Convenient way to get row and column indicators together.
Returns the ``rows_`` and ``columns_`` members.
"""
return self.rows_, self.columns_
def get_indices(self, i):
"""Row and column indices of the i'th bicluster.
Only works if ``rows_`` and ``columns_`` attributes exist.
Returns
-------
row_ind : np.array, dtype=np.intp
Indices of rows in the dataset that belong to the bicluster.
col_ind : np.array, dtype=np.intp
Indices of columns in the dataset that belong to the bicluster.
"""
from .cluster.bicluster.utils import get_indices
return get_indices(self.rows_[i], self.columns_[i])
def get_shape(self, i):
"""Shape of the i'th bicluster.
Returns
-------
shape : (int, int)
Number of rows and columns (resp.) in the bicluster.
"""
from .cluster.bicluster.utils import get_shape
return get_shape(self.rows_[i], self.columns_[i])
def get_submatrix(self, i, data):
"""Returns the submatrix corresponding to bicluster `i`.
Works with sparse matrices. Only works if ``rows_`` and
``columns_`` attributes exist.
"""
from .cluster.bicluster.utils import get_submatrix
return get_submatrix(self.rows_[i], self.columns_[i], data)
###############################################################################
class TransformerMixin(object):
"""Mixin class for all transformers in scikit-learn."""
def fit_transform(self, X, y=None, **fit_params):
"""Fit to data, then transform it.
Fits transformer to X and y with optional parameters fit_params
and returns a transformed version of X.
Parameters
----------
X : numpy array of shape [n_samples, n_features]
Training set.
y : numpy array of shape [n_samples]
Target values.
Returns
-------
X_new : numpy array of shape [n_samples, n_features_new]
Transformed array.
"""
# non-optimized default implementation; override when a better
# method is possible for a given clustering algorithm
if y is None:
# fit method of arity 1 (unsupervised transformation)
return self.fit(X, **fit_params).transform(X)
else:
# fit method of arity 2 (supervised transformation)
return self.fit(X, y, **fit_params).transform(X)
###############################################################################
class MetaEstimatorMixin(object):
"""Mixin class for all meta estimators in scikit-learn."""
# this is just a tag for the moment
###############################################################################
# XXX: Temporary solution to figure out if an estimator is a classifier
def _get_sub_estimator(estimator):
"""Returns the final estimator if there is any."""
if hasattr(estimator, 'estimator'):
# GridSearchCV and other CV-tuned estimators
return _get_sub_estimator(estimator.estimator)
if hasattr(estimator, 'steps'):
# Pipeline
return _get_sub_estimator(estimator.steps[-1][1])
return estimator
def is_classifier(estimator):
"""Returns True if the given estimator is (probably) a classifier."""
estimator = _get_sub_estimator(estimator)
return isinstance(estimator, ClassifierMixin)
| """Get parameter names for the estimator"""
# fetch the constructor or the original constructor before
# deprecation wrapping if any
init = getattr(cls.__init__, 'deprecated_original', cls.__init__)
if init is object.__init__:
# No explicit constructor to introspect
return []
# introspect the constructor arguments to find the model parameters
# to represent
args, varargs, kw, default = inspect.getargspec(init)
if varargs is not None:
raise RuntimeError("scikit-learn estimators should always "
"specify their parameters in the signature"
" of their __init__ (no varargs)."
" %s doesn't follow this convention."
% (cls, ))
# Remove 'self'
# XXX: This is going to fail if the init is a staticmethod, but
# who would do this?
args.pop(0)
args.sort()
return args |
lib.rs | pub fn add(a: i32, b: i32) -> i32 {
a + b
}
// This is a really bad adding function, its purpose is to fail in this
// example.
#[allow(dead_code)]
fn bad_add(a: i32, b: i32) -> i32 {
a - b
}
#[cfg(test)]
mod tests {
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
#[test]
fn test_add() {
assert_eq!(add(1, 2), 3);
}
#[test]
fn test_bad_add() {
// This assert would fire and test will fail.
// Please note, that private functions can be tested too!
assert_eq!(bad_add(2, 1), 1); | }
} | |
getOpenShiftCluster.go | // *** WARNING: this file was generated by the Pulumi SDK Generator. ***
// *** Do not edit by hand unless you're certain you know what you are doing! ***
package v20200430
import ( | var rv LookupOpenShiftClusterResult
err := ctx.Invoke("azure-nextgen:redhatopenshift/v20200430:getOpenShiftCluster", args, &rv, opts...)
if err != nil {
return nil, err
}
return &rv, nil
}
type LookupOpenShiftClusterArgs struct {
// The name of the resource group. The name is case insensitive.
ResourceGroupName string `pulumi:"resourceGroupName"`
// The name of the OpenShift cluster resource.
ResourceName string `pulumi:"resourceName"`
}
// OpenShiftCluster represents an Azure Red Hat OpenShift cluster.
type LookupOpenShiftClusterResult struct {
// The cluster API server profile.
ApiserverProfile *APIServerProfileResponse `pulumi:"apiserverProfile"`
// The cluster profile.
ClusterProfile *ClusterProfileResponse `pulumi:"clusterProfile"`
// The console profile.
ConsoleProfile *ConsoleProfileResponse `pulumi:"consoleProfile"`
// The cluster ingress profiles.
IngressProfiles []IngressProfileResponse `pulumi:"ingressProfiles"`
// The geo-location where the resource lives
Location string `pulumi:"location"`
// The cluster master profile.
MasterProfile *MasterProfileResponse `pulumi:"masterProfile"`
// The name of the resource
Name string `pulumi:"name"`
// The cluster network profile.
NetworkProfile *NetworkProfileResponse `pulumi:"networkProfile"`
// The cluster provisioning state (immutable).
ProvisioningState *string `pulumi:"provisioningState"`
// The cluster service principal profile.
ServicePrincipalProfile *ServicePrincipalProfileResponse `pulumi:"servicePrincipalProfile"`
// Resource tags.
Tags map[string]string `pulumi:"tags"`
// The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
Type string `pulumi:"type"`
// The cluster worker profiles.
WorkerProfiles []WorkerProfileResponse `pulumi:"workerProfiles"`
} | "github.com/pulumi/pulumi/sdk/v2/go/pulumi"
)
func LookupOpenShiftCluster(ctx *pulumi.Context, args *LookupOpenShiftClusterArgs, opts ...pulumi.InvokeOption) (*LookupOpenShiftClusterResult, error) { |
write-start-failed.ts | import { Message, buildMessage, publishMessage } from 'amqp-extension';
import { MessageQueueSelfToUIRoutingKey } from '../../config/services/rabbitmq';
import { TrainResultEvent, TrainResultStep } from '../../domains/train-result/type';
export async function writeStartFailedEvent(message: Message, error: Error) {
await publishMessage(buildMessage({
options: {
routingKey: MessageQueueSelfToUIRoutingKey.EVENT,
},
type: TrainResultEvent.FAILED,
data: {
...message.data,
error: {
message: error.message,
step: TrainResultStep.START,
},
},
metadata: message.metadata,
}));
return message;
} | ||
client_test.go | /*
* Copyright (C) 2020 Intel Corporation
* SPDX-License-Identifier: BSD-3-Clause
*/
package vs
import (
"crypto/x509"
"fmt"
"io/ioutil"
"net"
"net/http"
"net/url"
"testing"
"github.com/gorilla/mux"
"github.com/intel-secl/intel-secl/v3/pkg/clients/aas"
)
var SampleSamlReportPath = "../../ihub/test/resources/saml_report.xml"
//AASToken token for AAS
var AASToken = "eyJhbGciOiJSUzM4NCIsImtpZCI6ImU5NjI1NzI0NTUwNzMwZGI3N2I2YmEyMjU1OGNjZTEyOTBkNjRkNTciLCJ0eXAiOiJKV1QifQ.eyJyb2xlcyI6W3sic2VydmljZSI6IkFBUyIsIm5hbWUiOiJSb2xlTWFuYWdlciJ9LHsic2VydmljZSI6IkFBUyIsIm5hbWUiOiJVc2VyTWFuYWdlciJ9LHsic2VydmljZSI6IkFBUyIsIm5hbWUiOiJVc2VyUm9sZU1hbmFnZXIifSx7InNlcnZpY2UiOiJUQSIsIm5hbWUiOiJBZG1pbmlzdHJhdG9yIn0seyJzZXJ2aWNlIjoiVlMiLCJuYW1lIjoiQWRtaW5pc3RyYXRvciJ9LHsic2VydmljZSI6IktNUyIsIm5hbWUiOiJLZXlDUlVEIn0seyJzZXJ2aWNlIjoiQUgiLCJuYW1lIjoiQWRtaW5pc3RyYXRvciJ9LHsic2VydmljZSI6IldMUyIsIm5hbWUiOiJBZG1pbmlzdHJhdG9yIn1dLCJwZXJtaXNzaW9ucyI6W3sic2VydmljZSI6IkFIIiwicnVsZXMiOlsiKjoqOioiXX0seyJzZXJ2aWNlIjoiS01TIiwicnVsZXMiOlsiKjoqOioiXX0seyJzZXJ2aWNlIjoiVEEiLCJydWxlcyI6WyIqOio6KiJdfSx7InNlcnZpY2UiOiJWUyIsInJ1bGVzIjpbIio6KjoqIl19LHsic2VydmljZSI6IldMUyIsInJ1bGVzIjpbIio6KjoqIl19XSwiZXhwIjoxNTk0NDgxMjAxLCJpYXQiOjE1OTQ0NzQwMDEsImlzcyI6IkFBUyBKV1QgSXNzdWVyIiwic3ViIjoiZ2xvYmFsX2FkbWluX3VzZXIifQ.euPkZEv0P9UC8ni05hb5wczFa9_C2G4mNAl4nVtBQ0oS-00qK4wC52Eg1UZqAjkVWXafHRcEjjsdQHs1LtjECFmU6zUNOMEtLLIOZwhnD7xlHkC-flpzLMT0W5162nsW4xSp-cF-r_05C7PgFcK9zIfMtn6_MUMcxlSXkX21AJWwfhVfz4ogEY2mqt73Ramd1tvhGbsz7i3XaljnopSTV7djNMeMZ33MPzJYGl5ph_AKBZwhBTA0DV3JAPTE9jXqrhtOG1iR1yM9kHChskzxAaRDm0v3V07ySgkxyv7dAzMW5Ek_NGCulyjP5N_WgSeuTkw26A8kZpSrNRWdbnyOr_EZ4y6wDX9GMARrR4PyTb6hU9x3ejahxs3L_Z7BzbYpO4WF1CvlYl5BoH71PnFPNKMkvbIFv1XcLPwKeLQpohEOr7zEN4EeltjpqBGCgiCFz4vHu5rk2iFCu1JJPDTVR3jJplJRZgCFiwsh42R3oomP-q43k8_PPLIMjaxAADgd"
func mockServer(t *testing.T) (*http.Server, string) {
router := mux.NewRouter()
router.HandleFunc("/aas/token", func(w http.ResponseWriter, router *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.Header().Add("Strict-Transport-Security", "max-age=63072000; includeSubDomains")
w.Write([]byte(AASToken))
}).Methods("POST")
router.HandleFunc("/mtwilson/v2/reports", func(w http.ResponseWriter, router *http.Request) {
w.Header().Set("Content-Type", "application/json")
w.Header().Add("Strict-Transport-Security", "max-age=63072000; includeSubDomains")
samlReport, err := ioutil.ReadFile(SampleSamlReportPath)
if err != nil {
t.Log("vs/client_test:mockServer(): Unable to read file", err)
}
w.Write([]byte(samlReport))
}).Methods("GET")
return serveController(t, router)
}
func serveController(t *testing.T, handler http.Handler) (*http.Server, string) {
//Listener Implementations
listener, err := net.Listen("tcp", ":0")
if err != nil {
t.Log("vs/client_test:ServeController() : Unable to initiate Listener", err)
}
port := listener.Addr().(*net.TCPAddr).Port
err = listener.Close()
if err != nil {
t.Log("vs/client_test:ServeController() : Unable to close Listener", err)
}
portString := fmt.Sprintf(":%d", port)
httpSer := &http.Server{
Addr: portString,
Handler: handler,
}
go httpSer.ListenAndServe()
return httpSer, portString
}
func TestClient_GetCaCerts(t *testing.T) {
server, portString := mockServer(t)
defer func() {
derr := server.Close()
if derr != nil {
log.WithError(derr).Error("Error closing server")
}
}()
aasUrl, _ := url.Parse("http://localhost" + portString + "/aas")
baseURL, _ := url.Parse("http://localhost" + portString + "/mtwilson/v2")
client1 := Client{
AASURL: aasUrl,
BaseURL: baseURL,
Password: "admin@ihub",
UserName: "hubadminpass",
CertArray: []x509.Certificate{},
}
type args struct {
domain string
}
tests := []struct {
name string
c Client
args args
wantErr bool
}{
{
name: "Test 1 Negative Case",
c: client1,
wantErr: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
_, err := tt.c.GetCaCerts(tt.args.domain)
if (err != nil) != tt.wantErr {
t.Errorf("Client.GetCaCerts() error = %v, wantErr %v", err, tt.wantErr)
return
}
})
}
}
func | (t *testing.T) {
server, portString := mockServer(t)
defer func() {
derr := server.Close()
if derr != nil {
log.WithError(derr).Error("Error closing server")
}
}()
aasUrl, _ := url.Parse("http://localhost" + portString + "/aas")
baseURL, _ := url.Parse("http://localhost" + portString + "/mtwilson/v2")
client1 := Client{
AASURL: aasUrl,
BaseURL: baseURL,
Password: "admin@ihub",
UserName: "hubadminpass",
CertArray: []x509.Certificate{},
}
fmt.Println(client1)
type args struct {
url string
}
tests := []struct {
name string
c Client
args args
wantErr bool
}{
{
name: "Test 1 Positive Case",
c: client1,
wantErr: false,
args: args{
url: "http://localhost" + portString + "/mtwilson/v2/reports",
},
},
}
for _, tt := range tests {
_ = aas.NewJWTClient("")
t.Run(tt.name, func(t *testing.T) {
_, err := tt.c.GetSamlReports(tt.args.url)
if (err != nil) != tt.wantErr {
t.Errorf("Client.GetSamlReports() error = %v, wantErr %v", err, tt.wantErr)
}
})
}
}
| TestClient_GetSamlReports |
alumin.js | function alimunimJoinery(input) {
let countJoinery = Number(input[0]);
let type = input[1];
let deliveryOrNo = input[2];
let singlePrice = 0;
let totalPrice = 0;
switch (type) {
case "90X130":
singlePrice = 110;
break;
case "100X150":
singlePrice = 140;
break;
case "130X180":
singlePrice = 190;
break;
case "200X300":
singlePrice = 250;
break;
}
totalPrice = countJoinery * singlePrice;
if (type === "90X130" && countJoinery > 60) {
totalPrice *= 0.92;
} else if (type === "90X130" && countJoinery > 30) {
totalPrice *= 0.95; | } else if (type === "130X180" && countJoinery > 50) {
totalPrice *= 0.88;
} else if (type === "130X180" && countJoinery > 20) {
totalPrice *= 0.93;
} else if (type === "200X300" && countJoinery > 50) {
totalPrice *= 0.86;
} else if (type === "200X300" && countJoinery > 25) {
totalPrice *= 0.91;
}
if (deliveryOrNo === "With delivery") {
totalPrice += 60;
}
if (countJoinery > 99) {
totalPrice *= 0.96;
}
if (countJoinery < 10) {
console.log("Invalid order");
} else {
console.log(`${totalPrice.toFixed(2)} BGN`);
}
} | } else if (type === "100X150" && countJoinery > 80) {
totalPrice *= 0.9;
} else if (type === "100X150" && countJoinery > 40) {
totalPrice *= 0.94; |
derive.rs | use crate::{
openapi::case::RenameRule,
parse::{Delimited, KeyValue, Paren},
route::EqStr,
util::ItemImplExt,
};
use pmutil::{q, ToTokensExt};
use proc_macro2::{Ident, TokenStream};
use syn::{
parse2,
punctuated::{Pair, Punctuated},
Attribute, Block, Data, DeriveInput, Expr, Field, FieldValue, Fields, GenericParam, ItemImpl,
Lit, LitStr, Meta, NestedMeta, Stmt, Token, TraitBound, TraitBoundModifier, TypeParamBound,
};
/// Search for `#[serde(rename_all = '')]`
fn get_rename_all(attrs: &[Attribute]) -> RenameRule {
attrs
.iter()
.find_map(|attr| {
//
if !attr.path.is_ident("serde") {
return None;
}
match parse2::<Paren<KeyValue<Ident, LitStr>>>(attr.tokens.clone()).map(|v| v.inner) {
Ok(kv) if kv.key == "rename_all" => Some(kv.value.value().parse().unwrap()),
_ => None,
}
})
.unwrap_or(RenameRule::None)
}
/// Search for `#[serde(rename = '')]`
fn get_rename(attrs: &[Attribute]) -> Option<String> {
attrs.iter().find_map(|attr| {
//
if !attr.path.is_ident("serde") {
return None;
}
// Handle #[serde(rename = "foo")]
let meta = match parse2::<Paren<Meta>>(attr.tokens.clone()) {
Ok(v) => v.inner,
Err(..) => return None,
};
if meta.path().is_ident("rename") {
return match meta {
Meta::NameValue(meta) => match meta.lit {
Lit::Str(s) => Some(s.value()),
_ => None,
},
_ => None,
};
}
None
})
} | let mut de = false;
for attr in attrs {
if attr.path.is_ident("serde") {
match parse2::<Paren<Meta>>(attr.tokens.clone()) {
Ok(Paren {
inner: Meta::Path(pa),
}) => {
if pa.is_ident("skip") {
return (true, true);
} else if pa.is_ident("skip_serializing") {
ser = true
} else if pa.is_ident("skip_deserializing") {
de = true
}
}
Ok(..) | Err(..) => {}
};
}
}
(ser, de)
}
fn field_name(type_attrs: &[Attribute], field: &Field) -> String {
if let Some(s) = get_rename(&field.attrs) {
return s;
}
let rule = get_rename_all(type_attrs);
rule.apply_to_field(&field.ident.as_ref().unwrap().to_string())
}
macro_rules! invalid_schema_usage {
($act:expr) => {
// rust-lang/rust#54140
// panic!("{}", $act.__span().error("Correct usage: #[schema(description =
// \"foo\", example = \"bar\")]"));
panic!(
"Invalid schema usage: {}
Correct usage: #[schema(description = \"foo\", example = \"bar\")]",
$act.dump()
);
};
}
fn extract_example(attrs: &mut Vec<Attribute>) -> Option<TokenStream> {
let mut v = None;
let mut process_nv = |n: syn::MetaNameValue| {
if n.path.is_ident("example") {
assert!(
v.is_none(),
"duplicate #[schema(example = \"foo\")] detected"
);
v = Some(match n.lit {
Lit::Str(s) => s
.value()
.parse::<TokenStream>()
.expect("expected example to be path"),
l => panic!(
"#[schema(example = \"foo\")]: value of example should be a string literal, \
but got {}",
l.dump()
),
});
}
};
for attr in attrs {
if attr.path.is_ident("schema") {
for config in parse2::<Paren<Delimited<Meta>>>(attr.tokens.clone())
.expect("invalid schema config found while extracting example")
.inner
.inner
{
match config {
Meta::NameValue(n) => process_nv(n),
Meta::List(l) => {
for el in l.nested {
match el {
NestedMeta::Meta(Meta::NameValue(n)) => process_nv(n),
_ => invalid_schema_usage!(attr),
}
}
}
_ => invalid_schema_usage!(attr),
}
}
}
}
let v = v?;
match syn::parse2::<Lit>(v.clone()) {
Ok(v) => {
let v = match v {
Lit::Str(v) => q!(Vars { v }, { String(v.into()) }),
Lit::ByteStr(_) => panic!("byte string is not a valid example"),
Lit::Byte(_) => panic!("byte is not a valid example"),
Lit::Char(v) => q!(Vars { v }, { String(v.into()) }),
Lit::Int(v) => q!(Vars { v }, { Number(v.into()) }),
Lit::Float(v) => q!(Vars { v }, { Number(v.into()) }),
Lit::Bool(v) => q!(Vars { v }, { Bool(v) }),
Lit::Verbatim(_) => unimplemented!("Verbatim?"),
};
Some(q!(Vars { v }, (rweb::rt::serde_json::Value::v)).into())
}
Err(..) => Some(v),
}
}
fn extract_doc(attrs: &mut Vec<Attribute>) -> String {
let mut doc = None;
let mut comments = String::new();
let mut process_doc_nv = |nv: syn::MetaNameValue| {
if nv.path.is_ident("description") {
if let Lit::Str(s) = nv.lit {
doc = Some(s.value())
} else {
panic!(
"#[schema(description = \"foo\")]: value of example should be a string \
literal, but got {}",
nv.dump()
)
}
}
};
for attr in attrs {
if attr.path.is_ident("schema") {
for config in parse2::<Paren<Delimited<Meta>>>(attr.tokens.clone())
.expect("invalid schema config found while extracting example")
.inner
.inner
{
match config {
Meta::List(l) => {
for tag in l.nested {
match tag {
NestedMeta::Meta(Meta::NameValue(nv)) => process_doc_nv(nv),
_ => invalid_schema_usage!(attr),
}
}
}
Meta::NameValue(nv) => process_doc_nv(nv),
_ => invalid_schema_usage!(attr),
}
}
} else if attr.path.is_ident("doc") {
if let Ok(v) = parse2::<EqStr>(attr.tokens.clone()) {
if !comments.is_empty() {
comments.push(' ');
}
comments.push_str(&v.value.value());
};
}
}
match doc {
Some(v) => v,
None => comments,
}
}
fn handle_field(type_attrs: &[Attribute], f: &mut Field) -> Stmt {
let name_str = field_name(type_attrs, &*f);
let desc = extract_doc(&mut f.attrs);
let example_v = extract_example(&mut f.attrs);
let (skip_ser, skip_de) = get_skip_mode(&f.attrs);
// We don't require it to be `Entity`
if skip_ser && skip_de {
return q!({ {} }).parse();
}
q!(
Vars {
name_str,
desc,
Type: &f.ty,
example_v: super::quote_option(example_v),
skip_ser,
skip_de,
},
{
fields.insert(rweb::rt::Cow::Borrowed(name_str), {
{
#[allow(unused_mut)]
let mut s = <Type as rweb::openapi::Entity>::describe(comp_d);
if comp_d.get_unpack(&s).nullable != Some(true) {
required_fields.push(rweb::rt::Cow::Borrowed(name_str));
}
if let rweb::openapi::ComponentOrInlineSchema::Inline(s) = &mut s {
let description = desc;
if !description.is_empty() {
s.description = rweb::rt::Cow::Borrowed(description);
}
let example = example_v;
if let Some(example) = example {
s.example = Some(example);
}
if skip_ser {
s.write_only = Some(true);
}
if skip_de {
s.read_only = Some(true);
}
}
s
}
});
}
)
.parse()
}
fn handle_fields(type_attrs: &[Attribute], fields: &mut Fields) -> Block {
// Properties
let mut block: Block = q!({ {} }).parse();
block.stmts.push(
q!({
#[allow(unused_mut)]
let mut fields: rweb::rt::IndexMap<rweb::rt::Cow<'static, str>, _> =
rweb::rt::IndexMap::default();
})
.parse(),
);
block.stmts.push(
q!({
#[allow(unused_mut)]
let mut required_fields: std::vec::Vec<rweb::rt::Cow<'static, str>> =
std::vec::Vec::default();
})
.parse(),
);
for f in fields {
block.stmts.push(handle_field(type_attrs, f));
}
block
.stmts
.push(Stmt::Expr(q!({ (fields, required_fields) }).parse()));
block
}
fn extract_component(attrs: &[Attribute]) -> Option<String> {
let mut component = None;
let mut process_nv = |nv: syn::MetaNameValue| {
if nv.path.is_ident("component") {
if let Lit::Str(s) = nv.lit {
assert!(
component.is_none(),
"duplicate #[schema(component = \"foo\")] detected"
);
component = Some(s.value())
} else {
panic!(
"#[schema]: value of component should be a string literal, but got {}",
nv.dump()
)
}
} else {
panic!("#[schema]: unknown option {}", nv.path.dump())
}
};
for attr in attrs {
if attr.path.is_ident("schema") {
for config in parse2::<Paren<Delimited<Meta>>>(attr.tokens.clone())
.expect("schema config of type is invalid")
.inner
.inner
{
match config {
Meta::List(l) => {
for el in l.nested {
match el {
syn::NestedMeta::Meta(Meta::NameValue(n)) => process_nv(n),
syn::NestedMeta::Meta(unk) => panic!(
"#[schema]: parameters are name-value pair(s), but got {}",
unk.dump()
),
syn::NestedMeta::Lit(unk) => panic!(
"#[schema]: parameters are name-value pair(s), but got {}",
unk.dump()
),
}
}
}
Meta::NameValue(nv) => process_nv(nv),
_ => panic!(
"#[schema]: parameters are name-value pair(s), but got {}",
config.dump()
),
}
}
}
}
component
}
pub fn derive_schema(input: DeriveInput) -> TokenStream {
let DeriveInput {
mut attrs,
mut data,
ident,
generics,
..
} = input;
let desc = extract_doc(&mut attrs);
let component = extract_component(&attrs);
let example = extract_example(&mut attrs);
let mut block: Block = q!({ {} }).parse();
let mut fields: Punctuated<FieldValue, Token![,]> = Default::default();
if let Some(tts) = example {
fields.push(q!(Vars { tts }, ({ example: Some(tts) })).parse());
}
match data {
Data::Struct(ref mut data) => {
match data.fields {
Fields::Named(_) => {
let fields_block = handle_fields(&attrs, &mut data.fields);
block.stmts.push(
q!(Vars { fields_block }, {
let (fields, required_fields) = fields_block;
})
.parse(),
);
fields.push(q!({ properties: fields }).parse());
fields.push(q!({ required: required_fields }).parse());
}
Fields::Unnamed(ref n) if n.unnamed.len() == 1 => {}
_ => {}
}
fields.push(q!({ schema_type: Some(rweb::openapi::Type::Object) }).parse());
}
Data::Enum(ref mut data) => {
if data
.variants
.iter()
.all(|variant| variant.fields.is_empty())
{
// c-like enums
let exprs: Punctuated<Expr, Token![,]> = data
.variants
.iter()
.map(|variant| {
let name = if let Some(v) = get_rename(&variant.attrs) {
v
} else {
let rule = get_rename_all(&attrs);
rule.apply_to_variant(&variant.ident.to_string())
};
Pair::Punctuated(
q!(Vars { name }, { rweb::rt::Cow::Borrowed(name) }).parse(),
Default::default(),
)
})
.collect();
fields.push(q!(Vars { exprs }, { enum_values: vec![exprs] }).parse());
fields.push(q!({ schema_type: Some(rweb::openapi::Type::String) }).parse());
} else {
let exprs: Punctuated<Expr, Token![,]> = data
.variants
.iter_mut()
.filter_map(|v| {
let desc = extract_doc(&mut v.attrs);
match v.fields {
Fields::Named(..) => Some(Pair::Punctuated(
{
let fields_block = handle_fields(&attrs, &mut v.fields);
q!(
Vars { fields_block, desc },
({
let (fields, fields_required) = fields_block;
#[allow(unused_mut)]
let mut s = rweb::openapi::Schema {
properties: fields,
required: fields_required,
..rweb::rt::Default::default()
};
let description = desc;
if !description.is_empty() {
s.description =
rweb::rt::Cow::Borrowed(description);
}
rweb::openapi::ComponentOrInlineSchema::Inline(s)
})
)
.parse()
},
Default::default(),
)),
Fields::Unnamed(ref f) => {
//
assert!(f.unnamed.len() <= 1);
if f.unnamed.is_empty() {
return None;
}
Some(Pair::Punctuated(
q!(
Vars {
Type: &f.unnamed.first().unwrap().ty,
desc
},
({
#[allow(unused_mut)]
let mut s =
<Type as rweb::openapi::Entity>::describe(comp_d);
if let rweb::openapi::ComponentOrInlineSchema::Inline(
s,
) = &mut s
{
let description = desc;
if !description.is_empty() {
s.description =
rweb::rt::Cow::Borrowed(description);
}
}
s
})
)
.parse(),
Default::default(),
))
}
Fields::Unit => None,
}
})
.collect();
fields.push(q!(Vars { exprs }, { one_of: vec![exprs] }).parse());
}
}
Data::Union(_) => unimplemented!("#[derive(Schema)] for union"),
}
block.stmts.push(Stmt::Expr(
if component.is_some() {
q!(Vars { desc, fields }, {
comp_d.describe_component(&Self::type_name(), |comp_d| rweb::openapi::Schema {
fields,
description: rweb::rt::Cow::Borrowed(desc),
..rweb::rt::Default::default()
})
})
} else {
q!(Vars { desc, fields }, {
rweb::openapi::ComponentOrInlineSchema::Inline(rweb::openapi::Schema {
fields,
description: rweb::rt::Cow::Borrowed(desc),
..rweb::rt::Default::default()
})
})
}
.parse(),
));
let typename = component.clone().unwrap_or_else(|| ident.to_string());
let typename: Expr = if generics.params.is_empty() {
q!(Vars { typename }, { rweb::rt::Cow::Borrowed(typename) }).parse()
} else {
let generics_typenames: Punctuated<pmutil::Quote, Token![,]> = generics
.params
.iter()
.flat_map(|g| match g {
syn::GenericParam::Type(t) => Some({
let tpn = &t.ident;
q!(Vars { tpn }, {
{
<tpn as rweb::openapi::Entity>::type_name().to_string()
}
})
}),
syn::GenericParam::Const(con) => Some({
let tpn = &con.ident;
q!(Vars { tpn }, {
{
tpn.to_string()
}
})
}),
_ => None,
})
.map(|q| Pair::Punctuated(q, Default::default()))
.collect();
q!(
Vars {
typename,
generics_typenames
},
{
rweb::rt::Cow::Owned(format!(
"{}-{}-",
typename,
vec![generics_typenames].join("_")
))
}
)
.parse()
};
let mut item = q!(
Vars {
Type: &ident,
typename,
block,
},
{
impl rweb::openapi::Entity for Type {
fn type_name() -> rweb::rt::Cow<'static, str> {
typename
}
fn describe(
comp_d: &mut rweb::openapi::ComponentDescriptor,
) -> rweb::openapi::ComponentOrInlineSchema {
block
}
}
}
)
.parse::<ItemImpl>()
.with_generics(generics);
for param in item.generics.params.iter_mut() {
match param {
GenericParam::Type(ref mut ty) => ty.bounds.push(TypeParamBound::Trait(TraitBound {
paren_token: None,
modifier: TraitBoundModifier::None,
lifetimes: None,
path: q!({ rweb::openapi::Entity }).parse(),
})),
_ => continue,
}
}
item.dump()
} |
fn get_skip_mode(attrs: &[Attribute]) -> (bool, bool) {
let mut ser = false; |
publish_workflow_run.go | package event
import (
"fmt"
"time"
"github.com/fatih/structs"
"github.com/go-gorp/gorp"
"github.com/ovh/cds/engine/api/notification"
"github.com/ovh/cds/sdk"
)
func publishRunWorkflow(payload interface{}, key, workflowName, appName, pipName, envName string, num int64, sub int64, status string, u *sdk.User) {
event := sdk.Event{
Timestamp: time.Now(),
Hostname: hostname,
CDSName: cdsname,
EventType: fmt.Sprintf("%T", payload),
Payload: structs.Map(payload),
ProjectKey: key,
ApplicationName: appName,
PipelineName: pipName,
WorkflowName: workflowName,
EnvironmentName: envName,
WorkflowRunNum: num,
WorkflowRunNumSub: sub,
Status: status,
}
if u != nil {
event.Username = u.Username
event.UserMail = u.Email
}
publishEvent(event)
}
// PublishWorkflowRun publish event on a workflow run
func PublishWorkflowRun(wr sdk.WorkflowRun, projectKey string) |
// PublishWorkflowNodeRun publish event on a workflow node run
func PublishWorkflowNodeRun(db gorp.SqlExecutor, nr sdk.WorkflowNodeRun, w sdk.Workflow, previousWR *sdk.WorkflowNodeRun) {
// get and send all user notifications
for _, event := range notification.GetUserWorkflowEvents(db, w, previousWR, nr) {
Publish(event, nil)
}
e := sdk.EventRunWorkflowNode{
ID: nr.ID,
Number: nr.Number,
SubNumber: nr.SubNumber,
Status: nr.Status,
Start: nr.Start.Unix(),
Manual: nr.Manual,
HookEvent: nr.HookEvent,
Payload: nr.Payload,
SourceNodeRuns: nr.SourceNodeRuns,
Hash: nr.VCSHash,
BranchName: nr.VCSBranch,
NodeID: nr.WorkflowNodeID,
RunID: nr.WorkflowRunID,
StagesSummary: make([]sdk.StageSummary, len(nr.Stages)),
}
for i := range nr.Stages {
e.StagesSummary[i] = nr.Stages[i].ToSummary()
}
var pipName string
node := w.GetNode(nr.WorkflowNodeID)
if node != nil {
pipName = w.Pipelines[node.PipelineID].Name
e.NodeName = node.Name
}
var envName string
var appName string
if node.Context != nil {
if node.Context.Application != nil {
appName = node.Context.Application.Name
e.RepositoryManagerName = node.Context.Application.VCSServer
e.RepositoryFullName = node.Context.Application.RepositoryFullname
}
if node.Context.Environment != nil {
envName = node.Context.Environment.Name
}
}
if sdk.StatusIsTerminated(nr.Status) {
e.Done = nr.Done.Unix()
}
publishRunWorkflow(e, w.ProjectKey, w.Name, appName, pipName, envName, nr.Number, nr.SubNumber, nr.Status, nil)
}
// PublishWorkflowNodeJobRun publish a WorkflowNodeJobRun
func PublishWorkflowNodeJobRun(db gorp.SqlExecutor, pkey, wname string, jr sdk.WorkflowNodeJobRun) {
e := sdk.EventRunWorkflowJob{
ID: jr.ID,
Status: jr.Status,
Start: jr.Start.Unix(),
}
if sdk.StatusIsTerminated(jr.Status) {
e.Done = jr.Done.Unix()
}
publishRunWorkflow(e, pkey, wname, "", "", "", 0, 0, jr.Status, nil)
}
| {
e := sdk.EventRunWorkflow{
ID: wr.ID,
Number: wr.Number,
Status: wr.Status,
Start: wr.Start.Unix(),
LastExecution: wr.LastExecution.Unix(),
LastModified: wr.LastModified.Unix(),
Tags: wr.Tags,
}
publishRunWorkflow(e, projectKey, wr.Workflow.Name, "", "", "", wr.Number, wr.LastSubNumber, wr.Status, nil)
} |
traits1.rs | // traits1.rs
// Time to implement some traits!
//
// Your task is to implement the trait
// `AppendBar' for the type `String'.
//
// The trait AppendBar has only one function,
// which appends "Bar" to any object
// implementing this trait.
trait AppendBar {
fn append_bar(self) -> Self;
}
impl AppendBar for String {
//Add your code here
fn append_bar(mut self) -> Self {
self += "Bar";
self
}
}
fn main() {
let s = String::from("Foo");
let s = s.append_bar();
println!("s: {}", s);
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn is_FooBar() {
assert_eq!(String::from("Foo").append_bar(), String::from("FooBar"));
}
#[test]
fn | () {
assert_eq!(
String::from("").append_bar().append_bar(),
String::from("BarBar")
);
}
}
| is_BarBar |
mco_metric.go | // Copyright (c) 2021 Red Hat, Inc.
// Copyright Contributors to the Open Cluster Management project
package utils
import (
"crypto/tls"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
"strings"
"k8s.io/klog"
)
func ContainManagedClusterMetric(opt TestOptions, query string, matchedLabels []string) (error, bool) {
grafanaConsoleURL := GetGrafanaURL(opt)
path := "/api/datasources/proxy/1/api/v1/query?"
// TODO(morvencao): remove this after accessing metrics from grafana url with bearer token is supported
if os.Getenv("IS_CANARY_ENV") != "true" && os.Getenv("THANOS_QUERY_FRONTEND_URL") != "" {
grafanaConsoleURL = os.Getenv("THANOS_QUERY_FRONTEND_URL")
path = "/api/v1/query?"
}
queryParams := url.PathEscape(fmt.Sprintf("query=%s", query))
klog.V(5).Infof("request url is: %s\n", grafanaConsoleURL+path+queryParams)
req, err := http.NewRequest(
"GET",
grafanaConsoleURL+path+queryParams,
nil)
if err != nil {
return err, false
}
tr := &http.Transport{
TLSClientConfig: &tls.Config{InsecureSkipVerify: true},
}
client := &http.Client{Transport: tr}
token, err := FetchBearerToken(opt)
if err != nil {
return err, false
}
if token != "" {
req.Header.Set("Authorization", "Bearer "+token)
}
req.Host = opt.HubCluster.GrafanaHost
resp, err := client.Do(req)
if err != nil {
return err, false
}
if resp.StatusCode != http.StatusOK {
klog.Errorf("resp.StatusCode: %v\n", resp.StatusCode)
return fmt.Errorf("Failed to access managed cluster metrics via grafana console"), false
}
metricResult, err := ioutil.ReadAll(resp.Body)
klog.V(5).Infof("metricResult: %s\n", metricResult)
if err != nil |
if !strings.Contains(string(metricResult), `"status":"success"`) {
return fmt.Errorf("Failed to find valid status from response"), false
}
if strings.Contains(string(metricResult), `"result":[]`) {
return fmt.Errorf("Failed to find metric name from response"), false
}
contained := true
for _, label := range matchedLabels {
if !strings.Contains(string(metricResult), label) {
contained = false
break
}
}
if !contained {
return fmt.Errorf("Failed to find metric name from response"), false
}
return nil, true
}
| {
return err, false
} |
progress.go | package main
import (
"bytes"
"fmt"
"io"
"os"
)
// Progress responsible for writing the progress of the app
type Progress struct {
out io.Writer
maxBars int
}
// CreateProgress new progress object
func | (verbosity string) (p Progress, err error) {
switch verbosity {
case "v":
return Progress{os.Stdout, 20}, nil
case "quiet":
return Progress{&bytes.Buffer{}, 20}, nil
}
return Progress{&bytes.Buffer{}, 20}, fmt.Errorf("Undefined verbosity '%s'", verbosity)
}
func (p Progress) write(a interface{}) {
fmt.Fprint(p.out, a)
}
func (p Progress) writeln(a interface{}) {
fmt.Fprintln(p.out, a)
}
func (p Progress) bar(a interface{}, total int, portion int) {
p.write("\r")
p.write(a)
p.write(" [")
bars := 0
if portion > 0 {
bars = int(float32(p.maxBars) / (float32(total) / float32(portion)))
}
spaces := p.maxBars - bars - 1
for i := 0; i < bars; i++ {
p.write("=")
}
if bars < p.maxBars {
p.write(">")
}
for i := 0; i < spaces; i++ {
p.write(" ")
}
p.write("] ")
}
| CreateProgress |
main.rs | //! Substrate Node Template CLI library.
#![warn(missing_docs)]
#![warn(unused_extern_crates)]
| mod cli;
pub use sc_cli::{VersionInfo, IntoExit, error};
fn main() -> Result<(), cli::error::Error> {
let version = VersionInfo {
name: "ISS Tracker Node",
commit: env!("VERGEN_SHA_SHORT"),
version: env!("CARGO_PKG_VERSION"),
executable_name: "tracker",
author: "Alekspickle",
description: "ISS Tracker Node",
support_url: "[email protected]",
};
cli::run(std::env::args(), cli::Exit, version)
} | mod chain_spec;
#[macro_use]
mod service; |
jitdump_linux.rs | //! Support for jitdump files which can be used by perf for profiling jitted code.
//! Spec definitions for the output format is as described here:
//! <https://git.kernel.org/pub/scm/linux/kernel/git/torvalds/linux.git/tree/tools/perf/Documentation/jitdump-specification.txt>
//!
//! Usage Example:
//! Record
//! sudo perf record -k 1 -e instructions:u target/debug/wasmtime -g --jitdump test.wasm
//! Combine
//! sudo perf inject -v -j -i perf.data -o perf.jit.data
//! Report
//! sudo perf report -i perf.jit.data -F+period,srcline
//! Note: For descriptive results, the WASM file being executed should contain dwarf debug data
use crate::{CompiledModule, ProfilingAgent};
use anyhow::Result;
use object::{Object, ObjectSection};
use std::fmt::Debug;
use std::fs::{File, OpenOptions};
use std::io::Write;
use std::ptr;
use std::sync::Mutex;
use std::{borrow, mem, process};
use target_lexicon::Architecture;
use wasmtime_environ::EntityRef;
use object::elf;
/// Defines jitdump record types
#[repr(u32)]
pub enum RecordId {
/// Value 0: JIT_CODE_LOAD: record describing a jitted function
JitCodeLoad = 0,
/// Value 1: JIT_CODE_MOVE: record describing an already jitted function which is moved
_JitCodeMove = 1,
/// Value 2: JIT_CODE_DEBUG_INFO: record describing the debug information for a jitted function
JitCodeDebugInfo = 2,
/// Value 3: JIT_CODE_CLOSE: record marking the end of the jit runtime (optional)
_JitCodeClose = 3,
/// Value 4: JIT_CODE_UNWINDING_INFO: record describing a function unwinding information
_JitCodeUnwindingInfo = 4,
}
/// Each record starts with this fixed size record header which describes the record that follows
#[derive(Debug, Default, Clone, Copy)]
#[repr(C)]
pub struct | {
/// uint32_t id: a value identifying the record type (see below)
id: u32,
/// uint32_t total_size: the size in bytes of the record including the header.
record_size: u32,
/// uint64_t timestamp: a timestamp of when the record was created.
timestamp: u64,
}
unsafe impl object::Pod for RecordHeader {}
/// The CodeLoadRecord is used for describing jitted functions
#[derive(Debug, Default, Clone, Copy)]
#[repr(C)]
pub struct CodeLoadRecord {
/// Fixed sized header that describes this record
header: RecordHeader,
/// uint32_t pid: OS process id of the runtime generating the jitted code
pid: u32,
/// uint32_t tid: OS thread identification of the runtime thread generating the jitted code
tid: u32,
/// uint64_t vma: virtual address of jitted code start
virtual_address: u64,
/// uint64_t code_addr: code start address for the jitted code. By default vma = code_addr
address: u64,
/// uint64_t code_size: size in bytes of the generated jitted code
size: u64,
/// uint64_t code_index: unique identifier for the jitted code (see below)
index: u64,
}
unsafe impl object::Pod for CodeLoadRecord {}
/// Describes source line information for a jitted function
#[derive(Debug, Default)]
#[repr(C)]
pub struct DebugEntry {
/// uint64_t code_addr: address of function for which the debug information is generated
address: u64,
/// uint32_t line: source file line number (starting at 1)
line: u32,
/// uint32_t discrim: column discriminator, 0 is default
discriminator: u32,
/// char name[n]: source file name in ASCII, including null termination
filename: String,
}
/// Describes debug information for a jitted function. An array of debug entries are
/// appended to this record during writting. Note, this record must preceed the code
/// load record that describes the same jitted function.
#[derive(Debug, Default, Clone, Copy)]
#[repr(C)]
pub struct DebugInfoRecord {
/// Fixed sized header that describes this record
header: RecordHeader,
/// uint64_t code_addr: address of function for which the debug information is generated
address: u64,
/// uint64_t nr_entry: number of debug entries for the function appended to this record
count: u64,
}
unsafe impl object::Pod for DebugInfoRecord {}
/// Fixed-sized header for each jitdump file
#[derive(Debug, Default, Clone, Copy)]
#[repr(C)]
pub struct FileHeader {
/// uint32_t magic: a magic number tagging the file type. The value is 4-byte long and represents the
/// string "JiTD" in ASCII form. It is 0x4A695444 or 0x4454694a depending on the endianness. The field can
/// be used to detect the endianness of the file
magic: u32,
/// uint32_t version: a 4-byte value representing the format version. It is currently set to 2
version: u32,
/// uint32_t total_size: size in bytes of file header
size: u32,
/// uint32_t elf_mach: ELF architecture encoding (ELF e_machine value as specified in /usr/include/elf.h)
e_machine: u32,
/// uint32_t pad1: padding. Reserved for future use
pad1: u32,
/// uint32_t pid: JIT runtime process identification (OS specific)
pid: u32,
/// uint64_t timestamp: timestamp of when the file was created
timestamp: u64,
/// uint64_t flags: a bitmask of flags
flags: u64,
}
unsafe impl object::Pod for FileHeader {}
/// Interface for driving the creation of jitdump files
pub struct JitDumpAgent {
// Note that we use a mutex internally to serialize writing out to our
// `jitdump_file` within this process, since multiple threads may be sharing
// this jit agent.
state: Mutex<State>,
}
struct State {
/// File instance for the jit dump file
jitdump_file: File,
map_addr: usize,
/// Unique identifier for jitted code
code_index: u64,
/// Flag for experimenting with dumping code load record
/// after each function (true) or after each module. This
/// flag is currently set to true.
dump_funcs: bool,
}
impl JitDumpAgent {
/// Intialize a JitDumpAgent and write out the header
pub fn new() -> Result<Self> {
let filename = format!("./jit-{}.dump", process::id());
let jitdump_file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.truncate(true)
.open(&filename)?;
// After we make our `*.dump` file we execute an `mmap` syscall,
// specifically with executable permissions, to map it into our address
// space. This is required so `perf inject` will work later. The `perf
// inject` command will see that an mmap syscall happened, and it'll see
// the filename we mapped, and that'll trigger it to actually read and
// parse the file.
//
// To match what some perf examples are doing we keep this `mmap` alive
// until this agent goes away.
let map_addr = unsafe {
let ptr = rustix::io::mmap(
ptr::null_mut(),
rustix::process::page_size(),
rustix::io::ProtFlags::EXEC | rustix::io::ProtFlags::READ,
rustix::io::MapFlags::PRIVATE,
&jitdump_file,
0,
)?;
ptr as usize
};
let mut state = State {
jitdump_file,
map_addr,
code_index: 0,
dump_funcs: true,
};
state.write_file_header()?;
Ok(JitDumpAgent {
state: Mutex::new(state),
})
}
}
impl ProfilingAgent for JitDumpAgent {
fn module_load(&self, module: &CompiledModule, dbg_image: Option<&[u8]>) {
self.state.lock().unwrap().module_load(module, dbg_image);
}
fn trampoline_load(&self, file: &object::File<'_>) {
self.state.lock().unwrap().trampoline_load(file)
}
}
impl State {
/// Returns timestamp from a single source
fn get_time_stamp(&self) -> u64 {
// We need to use `CLOCK_MONOTONIC` on Linux which is what `Instant`
// conveniently also uses, but `Instant` doesn't allow us to get access
// to nanoseconds as an internal detail, so we calculate the nanoseconds
// ourselves here.
let ts = rustix::time::clock_gettime(rustix::time::ClockId::Monotonic);
// TODO: What does it mean for either sec or nsec to be negative?
(ts.tv_sec * 1_000_000_000 + ts.tv_nsec) as u64
}
/// Returns the ELF machine architecture.
fn get_e_machine(&self) -> u32 {
match target_lexicon::HOST.architecture {
Architecture::X86_64 => elf::EM_X86_64 as u32,
Architecture::X86_32(_) => elf::EM_386 as u32,
Architecture::Arm(_) => elf::EM_ARM as u32,
Architecture::Aarch64(_) => elf::EM_AARCH64 as u32,
Architecture::S390x => elf::EM_S390 as u32,
_ => unimplemented!("unrecognized architecture"),
}
}
fn write_file_header(&mut self) -> Result<()> {
let header = FileHeader {
timestamp: self.get_time_stamp(),
e_machine: self.get_e_machine(),
magic: 0x4A695444,
version: 1,
size: mem::size_of::<FileHeader>() as u32,
pad1: 0,
pid: process::id(),
flags: 0,
};
self.jitdump_file.write_all(object::bytes_of(&header))?;
Ok(())
}
fn write_code_load_record(
&mut self,
record_name: &str,
cl_record: CodeLoadRecord,
code_buffer: &[u8],
) -> Result<()> {
self.jitdump_file.write_all(object::bytes_of(&cl_record))?;
self.jitdump_file.write_all(record_name.as_bytes())?;
self.jitdump_file.write_all(b"\0")?;
self.jitdump_file.write_all(code_buffer)?;
Ok(())
}
/// Write DebugInfoRecord to open jit dump file.
/// Must be written before the corresponding CodeLoadRecord.
fn write_debug_info_record(&mut self, dir_record: DebugInfoRecord) -> Result<()> {
self.jitdump_file.write_all(object::bytes_of(&dir_record))?;
Ok(())
}
/// Write DebugInfoRecord to open jit dump file.
/// Must be written before the corresponding CodeLoadRecord.
fn write_debug_info_entries(&mut self, die_entries: Vec<DebugEntry>) -> Result<()> {
for entry in die_entries.iter() {
self.jitdump_file
.write_all(object::bytes_of(&entry.address))?;
self.jitdump_file.write_all(object::bytes_of(&entry.line))?;
self.jitdump_file
.write_all(object::bytes_of(&entry.discriminator))?;
self.jitdump_file.write_all(entry.filename.as_bytes())?;
self.jitdump_file.write_all(b"\0")?;
}
Ok(())
}
/// Sent when a method is compiled and loaded into memory by the VM.
pub fn module_load(&mut self, module: &CompiledModule, dbg_image: Option<&[u8]>) {
let pid = process::id();
let tid = pid; // ThreadId does appear to track underlying thread. Using PID.
for (idx, func) in module.finished_functions() {
let (addr, len) = unsafe { ((*func).as_ptr() as *const u8, (*func).len()) };
if let Some(img) = &dbg_image {
if let Err(err) = self.dump_from_debug_image(img, "wasm", addr, len, pid, tid) {
println!(
"Jitdump: module_load failed dumping from debug image: {:?}\n",
err
);
}
} else {
let timestamp = self.get_time_stamp();
let name = super::debug_name(module.module(), idx);
self.dump_code_load_record(&name, addr, len, timestamp, pid, tid);
}
}
for (idx, func, len) in module.trampolines() {
let (addr, len) = (func as usize as *const u8, len);
let timestamp = self.get_time_stamp();
let name = format!("wasm::trampoline[{}]", idx.index());
self.dump_code_load_record(&name, addr, len, timestamp, pid, tid);
}
}
fn trampoline_load(&mut self, image: &object::File<'_>) {
use object::{ObjectSection, ObjectSymbol, SectionKind, SymbolKind};
let pid = process::id();
let tid = pid;
let text_base = match image.sections().find(|s| s.kind() == SectionKind::Text) {
Some(section) => match section.data() {
Ok(data) => data.as_ptr() as usize,
Err(_) => return,
},
None => return,
};
for sym in image.symbols() {
if !sym.is_definition() {
continue;
}
if sym.kind() != SymbolKind::Text {
continue;
}
let address = sym.address();
let size = sym.size();
if address == 0 || size == 0 {
continue;
}
if let Ok(name) = sym.name() {
let addr = text_base + address as usize;
let timestamp = self.get_time_stamp();
self.dump_code_load_record(
&name,
addr as *const u8,
size as usize,
timestamp,
pid,
tid,
);
}
}
}
fn dump_code_load_record(
&mut self,
method_name: &str,
addr: *const u8,
len: usize,
timestamp: u64,
pid: u32,
tid: u32,
) {
let name_len = method_name.len() + 1;
let size_limit = mem::size_of::<CodeLoadRecord>();
let rh = RecordHeader {
id: RecordId::JitCodeLoad as u32,
record_size: size_limit as u32 + name_len as u32 + len as u32,
timestamp,
};
let clr = CodeLoadRecord {
header: rh,
pid,
tid,
virtual_address: addr as u64,
address: addr as u64,
size: len as u64,
index: self.code_index,
};
self.code_index += 1;
unsafe {
let code_buffer: &[u8] = std::slice::from_raw_parts(addr, len);
if let Err(err) = self.write_code_load_record(method_name, clr, code_buffer) {
println!("Jitdump: write_code_load_failed_record failed: {:?}\n", err);
}
}
}
/// Attempts to dump debuginfo data structures, adding method and line level
/// for the jitted function.
pub fn dump_from_debug_image(
&mut self,
dbg_image: &[u8],
module_name: &str,
addr: *const u8,
len: usize,
pid: u32,
tid: u32,
) -> Result<()> {
let file = object::File::parse(dbg_image).unwrap();
let endian = if file.is_little_endian() {
gimli::RunTimeEndian::Little
} else {
gimli::RunTimeEndian::Big
};
let load_section = |id: gimli::SectionId| -> Result<borrow::Cow<[u8]>> {
if let Some(section) = file.section_by_name(id.name()) {
Ok(section.data()?.into())
} else {
Ok((&[] as &[u8]).into())
}
};
let dwarf_cow = gimli::Dwarf::load(&load_section)?;
let borrow_section: &dyn for<'a> Fn(
&'a borrow::Cow<[u8]>,
)
-> gimli::EndianSlice<'a, gimli::RunTimeEndian> =
&|section| gimli::EndianSlice::new(&*section, endian);
let dwarf = dwarf_cow.borrow(&borrow_section);
let mut iter = dwarf.units();
while let Some(header) = iter.next()? {
let unit = match dwarf.unit(header) {
Ok(unit) => unit,
Err(_err) => {
return Ok(());
}
};
self.dump_entries(unit, &dwarf, module_name, addr, len, pid, tid)?;
// TODO: Temp exit to avoid duplicate addresses being covered by only
// processing the top unit
break;
}
if !self.dump_funcs {
let timestamp = self.get_time_stamp();
self.dump_code_load_record(module_name, addr, len, timestamp, pid, tid);
}
Ok(())
}
fn dump_entries<R: Reader>(
&mut self,
unit: gimli::Unit<R>,
dwarf: &gimli::Dwarf<R>,
module_name: &str,
addr: *const u8,
len: usize,
pid: u32,
tid: u32,
) -> Result<()> {
let mut depth = 0;
let mut entries = unit.entries();
while let Some((delta_depth, entry)) = entries.next_dfs()? {
if self.dump_funcs {
let record_header = RecordHeader {
id: RecordId::JitCodeLoad as u32,
record_size: 0,
timestamp: 0,
};
let mut clr = CodeLoadRecord {
header: record_header,
pid,
tid,
virtual_address: 0,
address: 0,
size: 0,
index: 0,
};
let mut clr_name: String = String::from(module_name);
let mut get_debug_entry = false;
depth += delta_depth;
assert!(depth >= 0);
if entry.tag() == gimli::constants::DW_TAG_subprogram {
get_debug_entry = true;
let mut attrs = entry.attrs();
while let Some(attr) = attrs.next()? {
if let Some(n) = attr.name().static_string() {
if n == "DW_AT_low_pc" {
clr.address = match attr.value() {
gimli::AttributeValue::Addr(address) => address,
_ => 0,
};
clr.virtual_address = clr.address;
} else if n == "DW_AT_high_pc" {
clr.size = match attr.value() {
gimli::AttributeValue::Udata(data) => data,
_ => 0,
};
} else if n == "DW_AT_name" {
clr_name = match attr.value() {
gimli::AttributeValue::DebugStrRef(offset) => {
if let Ok(s) = dwarf.debug_str.get_str(offset) {
clr_name.push_str("::");
clr_name.push_str(&s.to_string_lossy()?);
clr_name
} else {
clr_name.push_str("::");
clr_name.push_str("?");
clr_name
}
}
_ => {
clr_name.push_str("??");
clr_name
}
};
}
}
}
}
if get_debug_entry {
// TODO: Temp check to make sure well only formed data is processed.
if clr.address == 0 {
continue;
}
// TODO: Temp check to make sure well only formed data is processed.
if clr_name == "?" {
continue;
}
if clr.address == 0 || clr.size == 0 {
clr.address = addr as u64;
clr.virtual_address = addr as u64;
clr.size = len as u64;
}
clr.header.record_size = mem::size_of::<CodeLoadRecord>() as u32
+ (clr_name.len() + 1) as u32
+ clr.size as u32;
clr.index = self.code_index;
self.code_index += 1;
self.dump_debug_info(&unit, &dwarf, clr.address, clr.size, None)?;
clr.header.timestamp = self.get_time_stamp();
unsafe {
let code_buffer: &[u8] =
std::slice::from_raw_parts(clr.address as *const u8, clr.size as usize);
let _ = self.write_code_load_record(&clr_name, clr, code_buffer);
}
}
} else {
let mut func_name: String = String::from("?");
let mut func_addr = 0;
let mut func_size = 0;
let mut get_debug_entry = false;
depth += delta_depth;
assert!(depth >= 0);
if entry.tag() == gimli::constants::DW_TAG_subprogram {
get_debug_entry = true;
let mut attrs = entry.attrs();
while let Some(attr) = attrs.next()? {
if let Some(n) = attr.name().static_string() {
if n == "DW_AT_low_pc" {
func_addr = match attr.value() {
gimli::AttributeValue::Addr(address) => address,
_ => 0,
};
} else if n == "DW_AT_high_pc" {
func_size = match attr.value() {
gimli::AttributeValue::Udata(data) => data,
_ => 0,
};
} else if n == "DW_AT_name" {
func_name = match attr.value() {
gimli::AttributeValue::DebugStrRef(offset) => {
if let Ok(s) = dwarf.debug_str.get_str(offset) {
func_name.clear();
func_name.push_str(&s.to_string_lossy()?);
func_name
} else {
func_name.push_str("?");
func_name
}
}
_ => {
func_name.push_str("??");
func_name
}
};
}
}
}
}
if get_debug_entry {
// TODO: Temp check to make sure well only formed data is processed.
if func_addr == 0 {
continue;
}
// TODO: Temp check to make sure well only formed data is processed.
if func_name == "?" {
continue;
}
self.dump_debug_info(
&unit,
&dwarf,
func_addr,
func_size,
Some(func_name.as_str()),
)?;
}
}
}
Ok(())
}
fn dump_debug_info<R: Reader>(
&mut self,
unit: &gimli::Unit<R>,
dwarf: &gimli::Dwarf<R>,
address: u64,
size: u64,
file_suffix: Option<&str>,
) -> Result<()> {
let timestamp = self.get_time_stamp();
if let Some(program) = unit.line_program.clone() {
let mut debug_info_record = DebugInfoRecord {
header: RecordHeader {
id: RecordId::JitCodeDebugInfo as u32,
record_size: 0,
timestamp,
},
address,
count: 0,
};
let mut debug_entries = Vec::new();
let mut debug_entries_total_filenames_len = 0;
let mut rows = program.rows();
while let Some((header, row)) = rows.next_row()? {
let row_file_index = row.file_index() - 1;
let myfile = dwarf
.attr_string(
&unit,
header.file_names()[row_file_index as usize].path_name(),
)
.unwrap();
let filename = myfile.to_string_lossy()?;
let line = row.line().map(|nonzero| nonzero.get()).unwrap_or(0);
let column = match row.column() {
gimli::ColumnType::Column(column) => column.get(),
gimli::ColumnType::LeftEdge => 0,
};
if (row.address() < address) || (row.address() > (address + size)) {
continue;
}
let mut debug_entry = DebugEntry {
address: row.address(),
line: line as u32,
discriminator: column as u32,
filename: filename.to_string(),
};
if let Some(suffix) = file_suffix {
debug_entry.filename.push_str("::");
debug_entry.filename.push_str(suffix);
}
debug_entries_total_filenames_len += debug_entry.filename.len() + 1;
debug_entries.push(debug_entry);
}
debug_info_record.count = debug_entries.len() as u64;
let debug_entries_size = (debug_info_record.count
* (mem::size_of::<DebugEntry>() as u64 - mem::size_of::<String>() as u64))
+ debug_entries_total_filenames_len as u64;
debug_info_record.header.record_size =
mem::size_of::<DebugInfoRecord>() as u32 + debug_entries_size as u32;
let _ = self.write_debug_info_record(debug_info_record);
let _ = self.write_debug_info_entries(debug_entries);
}
Ok(())
}
}
impl Drop for State {
fn drop(&mut self) {
unsafe {
rustix::io::munmap(self.map_addr as *mut _, rustix::process::page_size()).unwrap();
}
}
}
trait Reader: gimli::Reader<Offset = usize> + Send + Sync {}
impl<'input, Endian> Reader for gimli::EndianSlice<'input, Endian> where
Endian: gimli::Endianity + Send + Sync
{
}
| RecordHeader |
analysis.pb.go | // Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
// protoc-gen-go v1.25.0
// protoc v3.15.3
// source: kythe/proto/analysis.proto
package analysis_go_proto
import (
proto "github.com/golang/protobuf/proto"
protoreflect "google.golang.org/protobuf/reflect/protoreflect"
protoimpl "google.golang.org/protobuf/runtime/protoimpl"
anypb "google.golang.org/protobuf/types/known/anypb"
timestamppb "google.golang.org/protobuf/types/known/timestamppb"
storage_go_proto "kythe.io/kythe/proto/storage_go_proto"
reflect "reflect"
sync "sync"
)
const (
// Verify that this generated code is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(20 - protoimpl.MinVersion)
// Verify that runtime/protoimpl is sufficiently up-to-date.
_ = protoimpl.EnforceVersion(protoimpl.MaxVersion - 20)
)
// This is a compile-time assertion that a sufficiently up-to-date version
// of the legacy proto package is being used.
const _ = proto.ProtoPackageIsVersion4
type AnalysisResult_Status int32
const (
AnalysisResult_COMPLETE AnalysisResult_Status = 0
AnalysisResult_INCOMPLETE AnalysisResult_Status = 1
AnalysisResult_INVALID_REQUEST AnalysisResult_Status = 2
)
// Enum value maps for AnalysisResult_Status.
var (
AnalysisResult_Status_name = map[int32]string{
0: "COMPLETE",
1: "INCOMPLETE",
2: "INVALID_REQUEST",
}
AnalysisResult_Status_value = map[string]int32{
"COMPLETE": 0,
"INCOMPLETE": 1,
"INVALID_REQUEST": 2,
}
)
func (x AnalysisResult_Status) Enum() *AnalysisResult_Status {
p := new(AnalysisResult_Status)
*p = x
return p
}
func (x AnalysisResult_Status) String() string {
return protoimpl.X.EnumStringOf(x.Descriptor(), protoreflect.EnumNumber(x))
}
func (AnalysisResult_Status) Descriptor() protoreflect.EnumDescriptor {
return file_kythe_proto_analysis_proto_enumTypes[0].Descriptor()
}
func (AnalysisResult_Status) Type() protoreflect.EnumType {
return &file_kythe_proto_analysis_proto_enumTypes[0]
}
func (x AnalysisResult_Status) Number() protoreflect.EnumNumber {
return protoreflect.EnumNumber(x)
}
// Deprecated: Use AnalysisResult_Status.Descriptor instead.
func (AnalysisResult_Status) EnumDescriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{2, 0}
}
type AnalysisRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Compilation *CompilationUnit `protobuf:"bytes,1,opt,name=compilation,proto3" json:"compilation,omitempty"`
FileDataService string `protobuf:"bytes,2,opt,name=file_data_service,json=fileDataService,proto3" json:"file_data_service,omitempty"`
Revision string `protobuf:"bytes,3,opt,name=revision,proto3" json:"revision,omitempty"`
BuildId string `protobuf:"bytes,4,opt,name=build_id,json=buildId,proto3" json:"build_id,omitempty"`
}
func (x *AnalysisRequest) Reset() {
*x = AnalysisRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[0]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *AnalysisRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*AnalysisRequest) ProtoMessage() {}
func (x *AnalysisRequest) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[0]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use AnalysisRequest.ProtoReflect.Descriptor instead.
func (*AnalysisRequest) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{0}
}
func (x *AnalysisRequest) GetCompilation() *CompilationUnit {
if x != nil {
return x.Compilation
}
return nil
}
func (x *AnalysisRequest) GetFileDataService() string {
if x != nil {
return x.FileDataService
}
return ""
}
func (x *AnalysisRequest) GetRevision() string {
if x != nil {
return x.Revision
}
return ""
}
func (x *AnalysisRequest) GetBuildId() string {
if x != nil {
return x.BuildId
}
return ""
}
type AnalysisOutput struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Value []byte `protobuf:"bytes,1,opt,name=value,proto3" json:"value,omitempty"`
FinalResult *AnalysisResult `protobuf:"bytes,10,opt,name=final_result,json=finalResult,proto3" json:"final_result,omitempty"`
}
func (x *AnalysisOutput) Reset() {
*x = AnalysisOutput{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[1]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *AnalysisOutput) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*AnalysisOutput) ProtoMessage() {}
func (x *AnalysisOutput) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[1]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use AnalysisOutput.ProtoReflect.Descriptor instead.
func (*AnalysisOutput) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{1}
}
func (x *AnalysisOutput) GetValue() []byte {
if x != nil {
return x.Value
}
return nil
}
func (x *AnalysisOutput) GetFinalResult() *AnalysisResult {
if x != nil {
return x.FinalResult
}
return nil
}
type AnalysisResult struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Status AnalysisResult_Status `protobuf:"varint,1,opt,name=status,proto3,enum=kythe.proto.AnalysisResult_Status" json:"status,omitempty"`
Summary string `protobuf:"bytes,2,opt,name=summary,proto3" json:"summary,omitempty"`
}
func (x *AnalysisResult) Reset() {
*x = AnalysisResult{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[2]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *AnalysisResult) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*AnalysisResult) ProtoMessage() {}
func (x *AnalysisResult) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[2]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use AnalysisResult.ProtoReflect.Descriptor instead.
func (*AnalysisResult) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{2}
}
func (x *AnalysisResult) GetStatus() AnalysisResult_Status {
if x != nil {
return x.Status
}
return AnalysisResult_COMPLETE
}
func (x *AnalysisResult) GetSummary() string {
if x != nil {
return x.Summary
}
return ""
}
type CompilationUnit struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
VName *storage_go_proto.VName `protobuf:"bytes,1,opt,name=v_name,json=vName,proto3" json:"v_name,omitempty"`
RequiredInput []*CompilationUnit_FileInput `protobuf:"bytes,3,rep,name=required_input,json=requiredInput,proto3" json:"required_input,omitempty"`
HasCompileErrors bool `protobuf:"varint,4,opt,name=has_compile_errors,json=hasCompileErrors,proto3" json:"has_compile_errors,omitempty"`
Argument []string `protobuf:"bytes,5,rep,name=argument,proto3" json:"argument,omitempty"`
SourceFile []string `protobuf:"bytes,6,rep,name=source_file,json=sourceFile,proto3" json:"source_file,omitempty"`
OutputKey string `protobuf:"bytes,7,opt,name=output_key,json=outputKey,proto3" json:"output_key,omitempty"`
WorkingDirectory string `protobuf:"bytes,8,opt,name=working_directory,json=workingDirectory,proto3" json:"working_directory,omitempty"`
EntryContext string `protobuf:"bytes,9,opt,name=entry_context,json=entryContext,proto3" json:"entry_context,omitempty"`
Environment []*CompilationUnit_Env `protobuf:"bytes,10,rep,name=environment,proto3" json:"environment,omitempty"`
Details []*anypb.Any `protobuf:"bytes,11,rep,name=details,proto3" json:"details,omitempty"`
}
func (x *CompilationUnit) Reset() {
*x = CompilationUnit{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[3]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *CompilationUnit) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*CompilationUnit) ProtoMessage() {}
func (x *CompilationUnit) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[3]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use CompilationUnit.ProtoReflect.Descriptor instead.
func (*CompilationUnit) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{3}
}
func (x *CompilationUnit) GetVName() *storage_go_proto.VName {
if x != nil {
return x.VName
}
return nil
}
func (x *CompilationUnit) GetRequiredInput() []*CompilationUnit_FileInput {
if x != nil {
return x.RequiredInput
}
return nil
}
func (x *CompilationUnit) GetHasCompileErrors() bool {
if x != nil {
return x.HasCompileErrors
}
return false
}
func (x *CompilationUnit) GetArgument() []string {
if x != nil {
return x.Argument
}
return nil
}
func (x *CompilationUnit) GetSourceFile() []string {
if x != nil {
return x.SourceFile
}
return nil
}
func (x *CompilationUnit) GetOutputKey() string {
if x != nil {
return x.OutputKey
}
return ""
}
func (x *CompilationUnit) GetWorkingDirectory() string {
if x != nil {
return x.WorkingDirectory
}
return ""
}
func (x *CompilationUnit) GetEntryContext() string {
if x != nil {
return x.EntryContext
}
return ""
}
func (x *CompilationUnit) GetEnvironment() []*CompilationUnit_Env {
if x != nil {
return x.Environment
}
return nil
}
func (x *CompilationUnit) GetDetails() []*anypb.Any {
if x != nil {
return x.Details
}
return nil
}
type KzipInfo struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Corpora map[string]*KzipInfo_CorpusInfo `protobuf:"bytes,1,rep,name=corpora,proto3" json:"corpora,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
Size int64 `protobuf:"varint,4,opt,name=size,proto3" json:"size,omitempty"`
CriticalKzipErrors []string `protobuf:"bytes,5,rep,name=critical_kzip_errors,json=criticalKzipErrors,proto3" json:"critical_kzip_errors,omitempty"`
}
func (x *KzipInfo) Reset() {
*x = KzipInfo{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[4]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *KzipInfo) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*KzipInfo) ProtoMessage() {}
func (x *KzipInfo) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[4]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use KzipInfo.ProtoReflect.Descriptor instead.
func (*KzipInfo) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{4}
}
func (x *KzipInfo) GetCorpora() map[string]*KzipInfo_CorpusInfo {
if x != nil {
return x.Corpora
}
return nil
}
func (x *KzipInfo) GetSize() int64 {
if x != nil {
return x.Size
}
return 0
}
func (x *KzipInfo) GetCriticalKzipErrors() []string {
if x != nil {
return x.CriticalKzipErrors
}
return nil
}
type BuildMetadata struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
CommitTimestamp *timestamppb.Timestamp `protobuf:"bytes,1,opt,name=commit_timestamp,json=commitTimestamp,proto3" json:"commit_timestamp,omitempty"`
}
func (x *BuildMetadata) Reset() {
*x = BuildMetadata{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[5]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *BuildMetadata) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*BuildMetadata) ProtoMessage() {}
func (x *BuildMetadata) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[5]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use BuildMetadata.ProtoReflect.Descriptor instead.
func (*BuildMetadata) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{5}
}
func (x *BuildMetadata) GetCommitTimestamp() *timestamppb.Timestamp {
if x != nil {
return x.CommitTimestamp
}
return nil
}
type FilesRequest struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Files []*FileInfo `protobuf:"bytes,1,rep,name=files,proto3" json:"files,omitempty"`
}
func (x *FilesRequest) Reset() {
*x = FilesRequest{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[6]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *FilesRequest) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*FilesRequest) ProtoMessage() {}
func (x *FilesRequest) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[6]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use FilesRequest.ProtoReflect.Descriptor instead.
func (*FilesRequest) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{6}
}
func (x *FilesRequest) GetFiles() []*FileInfo {
if x != nil {
return x.Files
}
return nil
}
type FileInfo struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Path string `protobuf:"bytes,1,opt,name=path,proto3" json:"path,omitempty"`
Digest string `protobuf:"bytes,2,opt,name=digest,proto3" json:"digest,omitempty"`
}
func (x *FileInfo) Reset() {
*x = FileInfo{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[7]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *FileInfo) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*FileInfo) ProtoMessage() {}
func (x *FileInfo) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[7]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use FileInfo.ProtoReflect.Descriptor instead.
func (*FileInfo) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{7}
}
func (x *FileInfo) GetPath() string {
if x != nil {
return x.Path
}
return ""
}
func (x *FileInfo) GetDigest() string {
if x != nil {
return x.Digest
}
return ""
}
type FileData struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Content []byte `protobuf:"bytes,1,opt,name=content,proto3" json:"content,omitempty"`
Info *FileInfo `protobuf:"bytes,2,opt,name=info,proto3" json:"info,omitempty"`
Missing bool `protobuf:"varint,3,opt,name=missing,proto3" json:"missing,omitempty"`
}
func (x *FileData) Reset() {
*x = FileData{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[8]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *FileData) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*FileData) ProtoMessage() {}
func (x *FileData) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[8]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use FileData.ProtoReflect.Descriptor instead.
func (*FileData) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{8}
}
func (x *FileData) GetContent() []byte {
if x != nil {
return x.Content
}
return nil
}
func (x *FileData) GetInfo() *FileInfo {
if x != nil {
return x.Info
}
return nil
}
func (x *FileData) GetMissing() bool {
if x != nil {
return x.Missing
}
return false
}
type CompilationBundle struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Unit *CompilationUnit `protobuf:"bytes,1,opt,name=unit,proto3" json:"unit,omitempty"`
Files []*FileData `protobuf:"bytes,2,rep,name=files,proto3" json:"files,omitempty"`
}
func (x *CompilationBundle) Reset() {
*x = CompilationBundle{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[9]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *CompilationBundle) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*CompilationBundle) ProtoMessage() {}
func (x *CompilationBundle) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[9]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use CompilationBundle.ProtoReflect.Descriptor instead.
func (*CompilationBundle) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{9}
}
func (x *CompilationBundle) GetUnit() *CompilationUnit {
if x != nil {
return x.Unit
}
return nil
}
func (x *CompilationBundle) GetFiles() []*FileData {
if x != nil {
return x.Files
}
return nil
}
type IndexedCompilation struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Unit *CompilationUnit `protobuf:"bytes,1,opt,name=unit,proto3" json:"unit,omitempty"`
Index *IndexedCompilation_Index `protobuf:"bytes,2,opt,name=index,proto3" json:"index,omitempty"`
}
func (x *IndexedCompilation) Reset() {
*x = IndexedCompilation{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[10]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *IndexedCompilation) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*IndexedCompilation) ProtoMessage() {}
func (x *IndexedCompilation) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[10]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use IndexedCompilation.ProtoReflect.Descriptor instead.
func (*IndexedCompilation) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{10}
}
func (x *IndexedCompilation) GetUnit() *CompilationUnit {
if x != nil {
return x.Unit
}
return nil
}
func (x *IndexedCompilation) GetIndex() *IndexedCompilation_Index {
if x != nil {
return x.Index
}
return nil
}
type CompilationUnit_FileInput struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
VName *storage_go_proto.VName `protobuf:"bytes,1,opt,name=v_name,json=vName,proto3" json:"v_name,omitempty"`
Info *FileInfo `protobuf:"bytes,2,opt,name=info,proto3" json:"info,omitempty"`
Details []*anypb.Any `protobuf:"bytes,4,rep,name=details,proto3" json:"details,omitempty"`
}
func (x *CompilationUnit_FileInput) Reset() {
*x = CompilationUnit_FileInput{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[11]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *CompilationUnit_FileInput) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*CompilationUnit_FileInput) ProtoMessage() {}
func (x *CompilationUnit_FileInput) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[11]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use CompilationUnit_FileInput.ProtoReflect.Descriptor instead.
func (*CompilationUnit_FileInput) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{3, 0}
}
func (x *CompilationUnit_FileInput) GetVName() *storage_go_proto.VName {
if x != nil {
return x.VName
}
return nil
}
func (x *CompilationUnit_FileInput) GetInfo() *FileInfo {
if x != nil {
return x.Info
}
return nil
}
func (x *CompilationUnit_FileInput) GetDetails() []*anypb.Any {
if x != nil {
return x.Details
}
return nil
}
type CompilationUnit_Env struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Name string `protobuf:"bytes,1,opt,name=name,proto3" json:"name,omitempty"`
Value string `protobuf:"bytes,2,opt,name=value,proto3" json:"value,omitempty"`
}
func (x *CompilationUnit_Env) Reset() {
*x = CompilationUnit_Env{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[12]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *CompilationUnit_Env) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*CompilationUnit_Env) ProtoMessage() {}
func (x *CompilationUnit_Env) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[12]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use CompilationUnit_Env.ProtoReflect.Descriptor instead.
func (*CompilationUnit_Env) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{3, 1}
}
func (x *CompilationUnit_Env) GetName() string {
if x != nil {
return x.Name
}
return ""
}
func (x *CompilationUnit_Env) GetValue() string {
if x != nil {
return x.Value
}
return ""
}
type KzipInfo_CorpusInfo struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
LanguageRequiredInputs map[string]*KzipInfo_CorpusInfo_RequiredInputs `protobuf:"bytes,4,rep,name=language_required_inputs,json=languageRequiredInputs,proto3" json:"language_required_inputs,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
LanguageSources map[string]*KzipInfo_CorpusInfo_RequiredInputs `protobuf:"bytes,5,rep,name=language_sources,json=languageSources,proto3" json:"language_sources,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"bytes,2,opt,name=value,proto3"`
}
func (x *KzipInfo_CorpusInfo) Reset() {
*x = KzipInfo_CorpusInfo{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[13]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *KzipInfo_CorpusInfo) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*KzipInfo_CorpusInfo) ProtoMessage() {}
func (x *KzipInfo_CorpusInfo) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[13]
if protoimpl.UnsafeEnabled && x != nil |
return mi.MessageOf(x)
}
// Deprecated: Use KzipInfo_CorpusInfo.ProtoReflect.Descriptor instead.
func (*KzipInfo_CorpusInfo) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{4, 0}
}
func (x *KzipInfo_CorpusInfo) GetLanguageRequiredInputs() map[string]*KzipInfo_CorpusInfo_RequiredInputs {
if x != nil {
return x.LanguageRequiredInputs
}
return nil
}
func (x *KzipInfo_CorpusInfo) GetLanguageSources() map[string]*KzipInfo_CorpusInfo_RequiredInputs {
if x != nil {
return x.LanguageSources
}
return nil
}
type KzipInfo_CorpusInfo_RequiredInputs struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Count int32 `protobuf:"varint,1,opt,name=count,proto3" json:"count,omitempty"`
}
func (x *KzipInfo_CorpusInfo_RequiredInputs) Reset() {
*x = KzipInfo_CorpusInfo_RequiredInputs{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[15]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *KzipInfo_CorpusInfo_RequiredInputs) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*KzipInfo_CorpusInfo_RequiredInputs) ProtoMessage() {}
func (x *KzipInfo_CorpusInfo_RequiredInputs) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[15]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use KzipInfo_CorpusInfo_RequiredInputs.ProtoReflect.Descriptor instead.
func (*KzipInfo_CorpusInfo_RequiredInputs) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{4, 0, 0}
}
func (x *KzipInfo_CorpusInfo_RequiredInputs) GetCount() int32 {
if x != nil {
return x.Count
}
return 0
}
type IndexedCompilation_Index struct {
state protoimpl.MessageState
sizeCache protoimpl.SizeCache
unknownFields protoimpl.UnknownFields
Revisions []string `protobuf:"bytes,1,rep,name=revisions,proto3" json:"revisions,omitempty"`
}
func (x *IndexedCompilation_Index) Reset() {
*x = IndexedCompilation_Index{}
if protoimpl.UnsafeEnabled {
mi := &file_kythe_proto_analysis_proto_msgTypes[18]
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
ms.StoreMessageInfo(mi)
}
}
func (x *IndexedCompilation_Index) String() string {
return protoimpl.X.MessageStringOf(x)
}
func (*IndexedCompilation_Index) ProtoMessage() {}
func (x *IndexedCompilation_Index) ProtoReflect() protoreflect.Message {
mi := &file_kythe_proto_analysis_proto_msgTypes[18]
if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
}
return mi.MessageOf(x)
}
// Deprecated: Use IndexedCompilation_Index.ProtoReflect.Descriptor instead.
func (*IndexedCompilation_Index) Descriptor() ([]byte, []int) {
return file_kythe_proto_analysis_proto_rawDescGZIP(), []int{10, 0}
}
func (x *IndexedCompilation_Index) GetRevisions() []string {
if x != nil {
return x.Revisions
}
return nil
}
var File_kythe_proto_analysis_proto protoreflect.FileDescriptor
var file_kythe_proto_analysis_proto_rawDesc = []byte{
0x0a, 0x1a, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2f, 0x61, 0x6e,
0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x12, 0x0b, 0x6b, 0x79,
0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2f, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x61, 0x6e, 0x79, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x1f, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2f, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x62, 0x75, 0x66, 0x2f, 0x74, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x1a, 0x19, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2f, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x2f, 0x73, 0x74, 0x6f, 0x72, 0x61, 0x67, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x22, 0xb4, 0x01, 0x0a, 0x0f, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x65, 0x71,
0x75, 0x65, 0x73, 0x74, 0x12, 0x3e, 0x0a, 0x0b, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74,
0x69, 0x6f, 0x6e, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x6b, 0x79, 0x74, 0x68,
0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74,
0x69, 0x6f, 0x6e, 0x55, 0x6e, 0x69, 0x74, 0x52, 0x0b, 0x63, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61,
0x74, 0x69, 0x6f, 0x6e, 0x12, 0x2a, 0x0a, 0x11, 0x66, 0x69, 0x6c, 0x65, 0x5f, 0x64, 0x61, 0x74,
0x61, 0x5f, 0x73, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
0x0f, 0x66, 0x69, 0x6c, 0x65, 0x44, 0x61, 0x74, 0x61, 0x53, 0x65, 0x72, 0x76, 0x69, 0x63, 0x65,
0x12, 0x1a, 0x0a, 0x08, 0x72, 0x65, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x18, 0x03, 0x20, 0x01,
0x28, 0x09, 0x52, 0x08, 0x72, 0x65, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x12, 0x19, 0x0a, 0x08,
0x62, 0x75, 0x69, 0x6c, 0x64, 0x5f, 0x69, 0x64, 0x18, 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x07,
0x62, 0x75, 0x69, 0x6c, 0x64, 0x49, 0x64, 0x22, 0x66, 0x0a, 0x0e, 0x41, 0x6e, 0x61, 0x6c, 0x79,
0x73, 0x69, 0x73, 0x4f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c,
0x75, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x12,
0x3e, 0x0a, 0x0c, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x5f, 0x72, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x18,
0x0a, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1b, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x65, 0x73, 0x75,
0x6c, 0x74, 0x52, 0x0b, 0x66, 0x69, 0x6e, 0x61, 0x6c, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x22,
0xa3, 0x01, 0x0a, 0x0e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x65, 0x73, 0x75,
0x6c, 0x74, 0x12, 0x3a, 0x0a, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x18, 0x01, 0x20, 0x01,
0x28, 0x0e, 0x32, 0x22, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x2e, 0x41, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x52, 0x65, 0x73, 0x75, 0x6c, 0x74, 0x2e,
0x53, 0x74, 0x61, 0x74, 0x75, 0x73, 0x52, 0x06, 0x73, 0x74, 0x61, 0x74, 0x75, 0x73, 0x12, 0x18,
0x0a, 0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52,
0x07, 0x73, 0x75, 0x6d, 0x6d, 0x61, 0x72, 0x79, 0x22, 0x3b, 0x0a, 0x06, 0x53, 0x74, 0x61, 0x74,
0x75, 0x73, 0x12, 0x0c, 0x0a, 0x08, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x10, 0x00,
0x12, 0x0e, 0x0a, 0x0a, 0x49, 0x4e, 0x43, 0x4f, 0x4d, 0x50, 0x4c, 0x45, 0x54, 0x45, 0x10, 0x01,
0x12, 0x13, 0x0a, 0x0f, 0x49, 0x4e, 0x56, 0x41, 0x4c, 0x49, 0x44, 0x5f, 0x52, 0x45, 0x51, 0x55,
0x45, 0x53, 0x54, 0x10, 0x02, 0x22, 0xac, 0x05, 0x0a, 0x0f, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c,
0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x6e, 0x69, 0x74, 0x12, 0x29, 0x0a, 0x06, 0x76, 0x5f, 0x6e,
0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x12, 0x2e, 0x6b, 0x79, 0x74, 0x68,
0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x56, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x05, 0x76,
0x4e, 0x61, 0x6d, 0x65, 0x12, 0x4d, 0x0a, 0x0e, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64,
0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x18, 0x03, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x26, 0x2e, 0x6b,
0x79, 0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69,
0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x6e, 0x69, 0x74, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x49,
0x6e, 0x70, 0x75, 0x74, 0x52, 0x0d, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x49, 0x6e,
0x70, 0x75, 0x74, 0x12, 0x2c, 0x0a, 0x12, 0x68, 0x61, 0x73, 0x5f, 0x63, 0x6f, 0x6d, 0x70, 0x69,
0x6c, 0x65, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x18, 0x04, 0x20, 0x01, 0x28, 0x08, 0x52,
0x10, 0x68, 0x61, 0x73, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x65, 0x45, 0x72, 0x72, 0x6f, 0x72,
0x73, 0x12, 0x1a, 0x0a, 0x08, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x05, 0x20,
0x03, 0x28, 0x09, 0x52, 0x08, 0x61, 0x72, 0x67, 0x75, 0x6d, 0x65, 0x6e, 0x74, 0x12, 0x1f, 0x0a,
0x0b, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x5f, 0x66, 0x69, 0x6c, 0x65, 0x18, 0x06, 0x20, 0x03,
0x28, 0x09, 0x52, 0x0a, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x46, 0x69, 0x6c, 0x65, 0x12, 0x1d,
0x0a, 0x0a, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x5f, 0x6b, 0x65, 0x79, 0x18, 0x07, 0x20, 0x01,
0x28, 0x09, 0x52, 0x09, 0x6f, 0x75, 0x74, 0x70, 0x75, 0x74, 0x4b, 0x65, 0x79, 0x12, 0x2b, 0x0a,
0x11, 0x77, 0x6f, 0x72, 0x6b, 0x69, 0x6e, 0x67, 0x5f, 0x64, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f,
0x72, 0x79, 0x18, 0x08, 0x20, 0x01, 0x28, 0x09, 0x52, 0x10, 0x77, 0x6f, 0x72, 0x6b, 0x69, 0x6e,
0x67, 0x44, 0x69, 0x72, 0x65, 0x63, 0x74, 0x6f, 0x72, 0x79, 0x12, 0x23, 0x0a, 0x0d, 0x65, 0x6e,
0x74, 0x72, 0x79, 0x5f, 0x63, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x18, 0x09, 0x20, 0x01, 0x28,
0x09, 0x52, 0x0c, 0x65, 0x6e, 0x74, 0x72, 0x79, 0x43, 0x6f, 0x6e, 0x74, 0x65, 0x78, 0x74, 0x12,
0x42, 0x0a, 0x0b, 0x65, 0x6e, 0x76, 0x69, 0x72, 0x6f, 0x6e, 0x6d, 0x65, 0x6e, 0x74, 0x18, 0x0a,
0x20, 0x03, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x6e,
0x69, 0x74, 0x2e, 0x45, 0x6e, 0x76, 0x52, 0x0b, 0x65, 0x6e, 0x76, 0x69, 0x72, 0x6f, 0x6e, 0x6d,
0x65, 0x6e, 0x74, 0x12, 0x2e, 0x0a, 0x07, 0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x18, 0x0b,
0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72,
0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x07, 0x64, 0x65, 0x74, 0x61,
0x69, 0x6c, 0x73, 0x1a, 0x97, 0x01, 0x0a, 0x09, 0x46, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x70, 0x75,
0x74, 0x12, 0x29, 0x0a, 0x06, 0x76, 0x5f, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28,
0x0b, 0x32, 0x12, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e,
0x56, 0x4e, 0x61, 0x6d, 0x65, 0x52, 0x05, 0x76, 0x4e, 0x61, 0x6d, 0x65, 0x12, 0x29, 0x0a, 0x04,
0x69, 0x6e, 0x66, 0x6f, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6b, 0x79, 0x74,
0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x66,
0x6f, 0x52, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x12, 0x2e, 0x0a, 0x07, 0x64, 0x65, 0x74, 0x61, 0x69,
0x6c, 0x73, 0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x14, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c,
0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66, 0x2e, 0x41, 0x6e, 0x79, 0x52, 0x07,
0x64, 0x65, 0x74, 0x61, 0x69, 0x6c, 0x73, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x1a, 0x2f, 0x0a,
0x03, 0x45, 0x6e, 0x76, 0x12, 0x12, 0x0a, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x18, 0x01, 0x20, 0x01,
0x28, 0x09, 0x52, 0x04, 0x6e, 0x61, 0x6d, 0x65, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75,
0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x4a, 0x04,
0x08, 0x02, 0x10, 0x03, 0x22, 0x8c, 0x06, 0x0a, 0x08, 0x4b, 0x7a, 0x69, 0x70, 0x49, 0x6e, 0x66,
0x6f, 0x12, 0x3c, 0x0a, 0x07, 0x63, 0x6f, 0x72, 0x70, 0x6f, 0x72, 0x61, 0x18, 0x01, 0x20, 0x03,
0x28, 0x0b, 0x32, 0x22, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f,
0x2e, 0x4b, 0x7a, 0x69, 0x70, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x43, 0x6f, 0x72, 0x70, 0x6f, 0x72,
0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x07, 0x63, 0x6f, 0x72, 0x70, 0x6f, 0x72, 0x61, 0x12,
0x12, 0x0a, 0x04, 0x73, 0x69, 0x7a, 0x65, 0x18, 0x04, 0x20, 0x01, 0x28, 0x03, 0x52, 0x04, 0x73,
0x69, 0x7a, 0x65, 0x12, 0x30, 0x0a, 0x14, 0x63, 0x72, 0x69, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x5f,
0x6b, 0x7a, 0x69, 0x70, 0x5f, 0x65, 0x72, 0x72, 0x6f, 0x72, 0x73, 0x18, 0x05, 0x20, 0x03, 0x28,
0x09, 0x52, 0x12, 0x63, 0x72, 0x69, 0x74, 0x69, 0x63, 0x61, 0x6c, 0x4b, 0x7a, 0x69, 0x70, 0x45,
0x72, 0x72, 0x6f, 0x72, 0x73, 0x1a, 0x91, 0x04, 0x0a, 0x0a, 0x43, 0x6f, 0x72, 0x70, 0x75, 0x73,
0x49, 0x6e, 0x66, 0x6f, 0x12, 0x76, 0x0a, 0x18, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65,
0x5f, 0x72, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x5f, 0x69, 0x6e, 0x70, 0x75, 0x74, 0x73,
0x18, 0x04, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x3c, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x7a, 0x69, 0x70, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x43, 0x6f,
0x72, 0x70, 0x75, 0x73, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67,
0x65, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x45,
0x6e, 0x74, 0x72, 0x79, 0x52, 0x16, 0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x52, 0x65,
0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x12, 0x60, 0x0a, 0x10,
0x6c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x5f, 0x73, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73,
0x18, 0x05, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x35, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x7a, 0x69, 0x70, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x43, 0x6f,
0x72, 0x70, 0x75, 0x73, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67,
0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0f, 0x6c,
0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x53, 0x6f, 0x75, 0x72, 0x63, 0x65, 0x73, 0x1a, 0x26,
0x0a, 0x0e, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73,
0x12, 0x14, 0x0a, 0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x05, 0x52,
0x05, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x1a, 0x7a, 0x0a, 0x1b, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61,
0x67, 0x65, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73,
0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01,
0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x45, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65,
0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70,
0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x7a, 0x69, 0x70, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x43, 0x6f,
0x72, 0x70, 0x75, 0x73, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x52, 0x65, 0x71, 0x75, 0x69, 0x72, 0x65,
0x64, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02,
0x38, 0x01, 0x1a, 0x73, 0x0a, 0x14, 0x4c, 0x61, 0x6e, 0x67, 0x75, 0x61, 0x67, 0x65, 0x53, 0x6f,
0x75, 0x72, 0x63, 0x65, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65,
0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x45, 0x0a, 0x05,
0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x2f, 0x2e, 0x6b, 0x79,
0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x7a, 0x69, 0x70, 0x49, 0x6e,
0x66, 0x6f, 0x2e, 0x43, 0x6f, 0x72, 0x70, 0x75, 0x73, 0x49, 0x6e, 0x66, 0x6f, 0x2e, 0x52, 0x65,
0x71, 0x75, 0x69, 0x72, 0x65, 0x64, 0x49, 0x6e, 0x70, 0x75, 0x74, 0x73, 0x52, 0x05, 0x76, 0x61,
0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x4a, 0x04, 0x08, 0x01, 0x10, 0x02, 0x4a, 0x04, 0x08,
0x02, 0x10, 0x03, 0x4a, 0x04, 0x08, 0x03, 0x10, 0x04, 0x1a, 0x5c, 0x0a, 0x0c, 0x43, 0x6f, 0x72,
0x70, 0x6f, 0x72, 0x61, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79,
0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79, 0x12, 0x36, 0x0a, 0x05, 0x76,
0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x20, 0x2e, 0x6b, 0x79, 0x74,
0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x4b, 0x7a, 0x69, 0x70, 0x49, 0x6e, 0x66,
0x6f, 0x2e, 0x43, 0x6f, 0x72, 0x70, 0x75, 0x73, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x05, 0x76, 0x61,
0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x4a, 0x04, 0x08, 0x02, 0x10, 0x03, 0x4a, 0x04, 0x08,
0x03, 0x10, 0x04, 0x22, 0x56, 0x0a, 0x0d, 0x42, 0x75, 0x69, 0x6c, 0x64, 0x4d, 0x65, 0x74, 0x61,
0x64, 0x61, 0x74, 0x61, 0x12, 0x45, 0x0a, 0x10, 0x63, 0x6f, 0x6d, 0x6d, 0x69, 0x74, 0x5f, 0x74,
0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1a,
0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x62, 0x75, 0x66,
0x2e, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x52, 0x0f, 0x63, 0x6f, 0x6d, 0x6d,
0x69, 0x74, 0x54, 0x69, 0x6d, 0x65, 0x73, 0x74, 0x61, 0x6d, 0x70, 0x22, 0x3b, 0x0a, 0x0c, 0x46,
0x69, 0x6c, 0x65, 0x73, 0x52, 0x65, 0x71, 0x75, 0x65, 0x73, 0x74, 0x12, 0x2b, 0x0a, 0x05, 0x66,
0x69, 0x6c, 0x65, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6b, 0x79, 0x74,
0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x66,
0x6f, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x22, 0x36, 0x0a, 0x08, 0x46, 0x69, 0x6c, 0x65,
0x49, 0x6e, 0x66, 0x6f, 0x12, 0x12, 0x0a, 0x04, 0x70, 0x61, 0x74, 0x68, 0x18, 0x01, 0x20, 0x01,
0x28, 0x09, 0x52, 0x04, 0x70, 0x61, 0x74, 0x68, 0x12, 0x16, 0x0a, 0x06, 0x64, 0x69, 0x67, 0x65,
0x73, 0x74, 0x18, 0x02, 0x20, 0x01, 0x28, 0x09, 0x52, 0x06, 0x64, 0x69, 0x67, 0x65, 0x73, 0x74,
0x22, 0x69, 0x0a, 0x08, 0x46, 0x69, 0x6c, 0x65, 0x44, 0x61, 0x74, 0x61, 0x12, 0x18, 0x0a, 0x07,
0x63, 0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0c, 0x52, 0x07, 0x63,
0x6f, 0x6e, 0x74, 0x65, 0x6e, 0x74, 0x12, 0x29, 0x0a, 0x04, 0x69, 0x6e, 0x66, 0x6f, 0x18, 0x02,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x15, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x2e, 0x46, 0x69, 0x6c, 0x65, 0x49, 0x6e, 0x66, 0x6f, 0x52, 0x04, 0x69, 0x6e, 0x66,
0x6f, 0x12, 0x18, 0x0a, 0x07, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x18, 0x03, 0x20, 0x01,
0x28, 0x08, 0x52, 0x07, 0x6d, 0x69, 0x73, 0x73, 0x69, 0x6e, 0x67, 0x22, 0x72, 0x0a, 0x11, 0x43,
0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x42, 0x75, 0x6e, 0x64, 0x6c, 0x65,
0x12, 0x30, 0x0a, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x18, 0x01, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c,
0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6d,
0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x6e, 0x69, 0x74, 0x52, 0x04, 0x75, 0x6e,
0x69, 0x74, 0x12, 0x2b, 0x0a, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x18, 0x02, 0x20, 0x03, 0x28,
0x0b, 0x32, 0x15, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e,
0x46, 0x69, 0x6c, 0x65, 0x44, 0x61, 0x74, 0x61, 0x52, 0x05, 0x66, 0x69, 0x6c, 0x65, 0x73, 0x22,
0xaa, 0x01, 0x0a, 0x12, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x65, 0x64, 0x43, 0x6f, 0x6d, 0x70, 0x69,
0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x12, 0x30, 0x0a, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x18, 0x01,
0x20, 0x01, 0x28, 0x0b, 0x32, 0x1c, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0x2e, 0x43, 0x6f, 0x6d, 0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x55, 0x6e,
0x69, 0x74, 0x52, 0x04, 0x75, 0x6e, 0x69, 0x74, 0x12, 0x3b, 0x0a, 0x05, 0x69, 0x6e, 0x64, 0x65,
0x78, 0x18, 0x02, 0x20, 0x01, 0x28, 0x0b, 0x32, 0x25, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e,
0x70, 0x72, 0x6f, 0x74, 0x6f, 0x2e, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x65, 0x64, 0x43, 0x6f, 0x6d,
0x70, 0x69, 0x6c, 0x61, 0x74, 0x69, 0x6f, 0x6e, 0x2e, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x52, 0x05,
0x69, 0x6e, 0x64, 0x65, 0x78, 0x1a, 0x25, 0x0a, 0x05, 0x49, 0x6e, 0x64, 0x65, 0x78, 0x12, 0x1c,
0x0a, 0x09, 0x72, 0x65, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x18, 0x01, 0x20, 0x03, 0x28,
0x09, 0x52, 0x09, 0x72, 0x65, 0x76, 0x69, 0x73, 0x69, 0x6f, 0x6e, 0x73, 0x42, 0x37, 0x0a, 0x1f,
0x63, 0x6f, 0x6d, 0x2e, 0x67, 0x6f, 0x6f, 0x67, 0x6c, 0x65, 0x2e, 0x64, 0x65, 0x76, 0x74, 0x6f,
0x6f, 0x6c, 0x73, 0x2e, 0x6b, 0x79, 0x74, 0x68, 0x65, 0x2e, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x5a,
0x11, 0x61, 0x6e, 0x61, 0x6c, 0x79, 0x73, 0x69, 0x73, 0x5f, 0x67, 0x6f, 0x5f, 0x70, 0x72, 0x6f,
0x74, 0x6f, 0xf8, 0x01, 0x01, 0x62, 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
file_kythe_proto_analysis_proto_rawDescOnce sync.Once
file_kythe_proto_analysis_proto_rawDescData = file_kythe_proto_analysis_proto_rawDesc
)
func file_kythe_proto_analysis_proto_rawDescGZIP() []byte {
file_kythe_proto_analysis_proto_rawDescOnce.Do(func() {
file_kythe_proto_analysis_proto_rawDescData = protoimpl.X.CompressGZIP(file_kythe_proto_analysis_proto_rawDescData)
})
return file_kythe_proto_analysis_proto_rawDescData
}
var file_kythe_proto_analysis_proto_enumTypes = make([]protoimpl.EnumInfo, 1)
var file_kythe_proto_analysis_proto_msgTypes = make([]protoimpl.MessageInfo, 19)
var file_kythe_proto_analysis_proto_goTypes = []interface{}{
(AnalysisResult_Status)(0), // 0: kythe.proto.AnalysisResult.Status
(*AnalysisRequest)(nil), // 1: kythe.proto.AnalysisRequest
(*AnalysisOutput)(nil), // 2: kythe.proto.AnalysisOutput
(*AnalysisResult)(nil), // 3: kythe.proto.AnalysisResult
(*CompilationUnit)(nil), // 4: kythe.proto.CompilationUnit
(*KzipInfo)(nil), // 5: kythe.proto.KzipInfo
(*BuildMetadata)(nil), // 6: kythe.proto.BuildMetadata
(*FilesRequest)(nil), // 7: kythe.proto.FilesRequest
(*FileInfo)(nil), // 8: kythe.proto.FileInfo
(*FileData)(nil), // 9: kythe.proto.FileData
(*CompilationBundle)(nil), // 10: kythe.proto.CompilationBundle
(*IndexedCompilation)(nil), // 11: kythe.proto.IndexedCompilation
(*CompilationUnit_FileInput)(nil), // 12: kythe.proto.CompilationUnit.FileInput
(*CompilationUnit_Env)(nil), // 13: kythe.proto.CompilationUnit.Env
(*KzipInfo_CorpusInfo)(nil), // 14: kythe.proto.KzipInfo.CorpusInfo
nil, // 15: kythe.proto.KzipInfo.CorporaEntry
(*KzipInfo_CorpusInfo_RequiredInputs)(nil), // 16: kythe.proto.KzipInfo.CorpusInfo.RequiredInputs
nil, // 17: kythe.proto.KzipInfo.CorpusInfo.LanguageRequiredInputsEntry
nil, // 18: kythe.proto.KzipInfo.CorpusInfo.LanguageSourcesEntry
(*IndexedCompilation_Index)(nil), // 19: kythe.proto.IndexedCompilation.Index
(*storage_go_proto.VName)(nil), // 20: kythe.proto.VName
(*anypb.Any)(nil), // 21: google.protobuf.Any
(*timestamppb.Timestamp)(nil), // 22: google.protobuf.Timestamp
}
var file_kythe_proto_analysis_proto_depIdxs = []int32{
4, // 0: kythe.proto.AnalysisRequest.compilation:type_name -> kythe.proto.CompilationUnit
3, // 1: kythe.proto.AnalysisOutput.final_result:type_name -> kythe.proto.AnalysisResult
0, // 2: kythe.proto.AnalysisResult.status:type_name -> kythe.proto.AnalysisResult.Status
20, // 3: kythe.proto.CompilationUnit.v_name:type_name -> kythe.proto.VName
12, // 4: kythe.proto.CompilationUnit.required_input:type_name -> kythe.proto.CompilationUnit.FileInput
13, // 5: kythe.proto.CompilationUnit.environment:type_name -> kythe.proto.CompilationUnit.Env
21, // 6: kythe.proto.CompilationUnit.details:type_name -> google.protobuf.Any
15, // 7: kythe.proto.KzipInfo.corpora:type_name -> kythe.proto.KzipInfo.CorporaEntry
22, // 8: kythe.proto.BuildMetadata.commit_timestamp:type_name -> google.protobuf.Timestamp
8, // 9: kythe.proto.FilesRequest.files:type_name -> kythe.proto.FileInfo
8, // 10: kythe.proto.FileData.info:type_name -> kythe.proto.FileInfo
4, // 11: kythe.proto.CompilationBundle.unit:type_name -> kythe.proto.CompilationUnit
9, // 12: kythe.proto.CompilationBundle.files:type_name -> kythe.proto.FileData
4, // 13: kythe.proto.IndexedCompilation.unit:type_name -> kythe.proto.CompilationUnit
19, // 14: kythe.proto.IndexedCompilation.index:type_name -> kythe.proto.IndexedCompilation.Index
20, // 15: kythe.proto.CompilationUnit.FileInput.v_name:type_name -> kythe.proto.VName
8, // 16: kythe.proto.CompilationUnit.FileInput.info:type_name -> kythe.proto.FileInfo
21, // 17: kythe.proto.CompilationUnit.FileInput.details:type_name -> google.protobuf.Any
17, // 18: kythe.proto.KzipInfo.CorpusInfo.language_required_inputs:type_name -> kythe.proto.KzipInfo.CorpusInfo.LanguageRequiredInputsEntry
18, // 19: kythe.proto.KzipInfo.CorpusInfo.language_sources:type_name -> kythe.proto.KzipInfo.CorpusInfo.LanguageSourcesEntry
14, // 20: kythe.proto.KzipInfo.CorporaEntry.value:type_name -> kythe.proto.KzipInfo.CorpusInfo
16, // 21: kythe.proto.KzipInfo.CorpusInfo.LanguageRequiredInputsEntry.value:type_name -> kythe.proto.KzipInfo.CorpusInfo.RequiredInputs
16, // 22: kythe.proto.KzipInfo.CorpusInfo.LanguageSourcesEntry.value:type_name -> kythe.proto.KzipInfo.CorpusInfo.RequiredInputs
23, // [23:23] is the sub-list for method output_type
23, // [23:23] is the sub-list for method input_type
23, // [23:23] is the sub-list for extension type_name
23, // [23:23] is the sub-list for extension extendee
0, // [0:23] is the sub-list for field type_name
}
func init() { file_kythe_proto_analysis_proto_init() }
func file_kythe_proto_analysis_proto_init() {
if File_kythe_proto_analysis_proto != nil {
return
}
if !protoimpl.UnsafeEnabled {
file_kythe_proto_analysis_proto_msgTypes[0].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*AnalysisRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[1].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*AnalysisOutput); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[2].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*AnalysisResult); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[3].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CompilationUnit); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[4].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*KzipInfo); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[5].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*BuildMetadata); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[6].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*FilesRequest); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[7].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*FileInfo); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[8].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*FileData); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[9].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CompilationBundle); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[10].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*IndexedCompilation); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[11].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CompilationUnit_FileInput); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[12].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*CompilationUnit_Env); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[13].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*KzipInfo_CorpusInfo); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[15].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*KzipInfo_CorpusInfo_RequiredInputs); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
file_kythe_proto_analysis_proto_msgTypes[18].Exporter = func(v interface{}, i int) interface{} {
switch v := v.(*IndexedCompilation_Index); i {
case 0:
return &v.state
case 1:
return &v.sizeCache
case 2:
return &v.unknownFields
default:
return nil
}
}
}
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
GoPackagePath: reflect.TypeOf(x{}).PkgPath(),
RawDescriptor: file_kythe_proto_analysis_proto_rawDesc,
NumEnums: 1,
NumMessages: 19,
NumExtensions: 0,
NumServices: 0,
},
GoTypes: file_kythe_proto_analysis_proto_goTypes,
DependencyIndexes: file_kythe_proto_analysis_proto_depIdxs,
EnumInfos: file_kythe_proto_analysis_proto_enumTypes,
MessageInfos: file_kythe_proto_analysis_proto_msgTypes,
}.Build()
File_kythe_proto_analysis_proto = out.File
file_kythe_proto_analysis_proto_rawDesc = nil
file_kythe_proto_analysis_proto_goTypes = nil
file_kythe_proto_analysis_proto_depIdxs = nil
}
| {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
}
return ms
} |
f64.rs | //! This module provides constants which are specific to the implementation
//! of the `f64` floating point data type.
//!
//! *[See also the `f64` primitive type](../../std/primitive.f64.html).*
//!
//! Mathematically significant numbers are provided in the `consts` sub-module.
#![stable(feature = "rust1", since = "1.0.0")]
#[cfg(not(bootstrap))]
use crate::convert::FloatToInt;
#[cfg(not(test))]
use crate::intrinsics;
use crate::mem;
use crate::num::FpCategory;
/// The radix or base of the internal representation of `f64`.
#[stable(feature = "rust1", since = "1.0.0")]
pub const RADIX: u32 = 2;
/// Number of significant digits in base 2.
#[stable(feature = "rust1", since = "1.0.0")]
pub const MANTISSA_DIGITS: u32 = 53;
/// Approximate number of significant digits in base 10.
#[stable(feature = "rust1", since = "1.0.0")]
pub const DIGITS: u32 = 15;
/// [Machine epsilon] value for `f64`.
///
/// This is the difference between `1.0` and the next larger representable number.
///
/// [Machine epsilon]: https://en.wikipedia.org/wiki/Machine_epsilon
#[stable(feature = "rust1", since = "1.0.0")]
pub const EPSILON: f64 = 2.2204460492503131e-16_f64;
/// Smallest finite `f64` value.
#[stable(feature = "rust1", since = "1.0.0")]
pub const MIN: f64 = -1.7976931348623157e+308_f64;
/// Smallest positive normal `f64` value.
#[stable(feature = "rust1", since = "1.0.0")]
pub const MIN_POSITIVE: f64 = 2.2250738585072014e-308_f64;
/// Largest finite `f64` value.
#[stable(feature = "rust1", since = "1.0.0")]
pub const MAX: f64 = 1.7976931348623157e+308_f64;
/// One greater than the minimum possible normal power of 2 exponent.
#[stable(feature = "rust1", since = "1.0.0")]
pub const MIN_EXP: i32 = -1021;
/// Maximum possible power of 2 exponent.
#[stable(feature = "rust1", since = "1.0.0")]
pub const MAX_EXP: i32 = 1024;
/// Minimum possible normal power of 10 exponent.
#[stable(feature = "rust1", since = "1.0.0")]
pub const MIN_10_EXP: i32 = -307;
/// Maximum possible power of 10 exponent.
#[stable(feature = "rust1", since = "1.0.0")]
pub const MAX_10_EXP: i32 = 308;
/// Not a Number (NaN).
#[stable(feature = "rust1", since = "1.0.0")]
pub const NAN: f64 = 0.0_f64 / 0.0_f64;
/// Infinity (∞).
#[stable(feature = "rust1", since = "1.0.0")]
pub const INFINITY: f64 = 1.0_f64 / 0.0_f64;
/// Negative infinity (-∞).
#[stable(feature = "rust1", since = "1.0.0")]
pub const NEG_INFINITY: f64 = -1.0_f64 / 0.0_f64;
/// Basic mathematical constants.
#[stable(feature = "rust1", since = "1.0.0")]
pub mod consts {
// FIXME: replace with mathematical constants from cmath.
/// Archimedes' constant (π)
#[stable(feature = "rust1", since = "1.0.0")]
pub const PI: f64 = 3.14159265358979323846264338327950288_f64;
/// The full circle constant (τ)
///
/// Equal to 2π.
#[unstable(feature = "tau_constant", issue = "66770")]
pub const TAU: f64 = 6.28318530717958647692528676655900577_f64;
/// π/2
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_PI_2: f64 = 1.57079632679489661923132169163975144_f64;
/// π/3
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_PI_3: f64 = 1.04719755119659774615421446109316763_f64;
/// π/4
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_PI_4: f64 = 0.785398163397448309615660845819875721_f64;
/// π/6
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_PI_6: f64 = 0.52359877559829887307710723054658381_f64;
/// π/8
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_PI_8: f64 = 0.39269908169872415480783042290993786_f64;
/// 1/π
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_1_PI: f64 = 0.318309886183790671537767526745028724_f64;
/// 2/π
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_2_PI: f64 = 0.636619772367581343075535053490057448_f64;
/// 2/sqrt(π)
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_2_SQRT_PI: f64 = 1.12837916709551257389615890312154517_f64;
/// sqrt(2)
#[stable(feature = "rust1", since = "1.0.0")]
pub const SQRT_2: f64 = 1.41421356237309504880168872420969808_f64;
/// 1/sqrt(2)
#[stable(feature = "rust1", since = "1.0.0")]
pub const FRAC_1_SQRT_2: f64 = 0.707106781186547524400844362104849039_f64;
/// Euler's number (e)
#[stable(feature = "rust1", since = "1.0.0")]
pub const E: f64 = 2.71828182845904523536028747135266250_f64;
/// log<sub>2</sub>(10)
#[unstable(feature = "extra_log_consts", issue = "50540")]
pub const LOG2_10: f64 = 3.32192809488736234787031942948939018_f64;
/// log<sub>2</sub>(e)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LOG2_E: f64 = 1.44269504088896340735992468100189214_f64;
/// log<sub>10</sub>(2)
#[unstable(feature = "extra_log_consts", issue = "50540")]
pub const LOG10_2: f64 = 0.301029995663981195213738894724493027_f64;
/// log<sub>10</sub>(e)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LOG10_E: f64 = 0.434294481903251827651128918916605082_f64;
/// ln(2)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LN_2: f64 = 0.693147180559945309417232121458176568_f64;
/// ln(10)
#[stable(feature = "rust1", since = "1.0.0")]
pub const LN_10: f64 = 2.30258509299404568401799145468436421_f64;
}
#[lang = "f64"]
#[cfg(not(test))]
impl f64 {
/// Returns `true` if this value is `NaN`.
///
/// ```
/// use std::f64;
///
/// let nan = f64::NAN;
/// let f = 7.0_f64;
///
/// assert!(nan.is_nan());
/// assert!(!f.is_nan());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_nan(self) -> bool {
self != self
}
// FIXME(#50145): `abs` is publicly unavailable in libcore due to
// concerns about portability, so this implementation is for
// private use internally.
#[inline]
fn abs_private(self) -> f64 {
f64::from_bits(self.to_bits() & 0x7fff_ffff_ffff_ffff)
}
/// Returns `true` if this value is positive infinity or negative infinity, and
/// `false` otherwise.
///
/// ```
/// use std::f64;
///
/// let f = 7.0f64;
/// let inf = f64::INFINITY;
/// let neg_inf = f64::NEG_INFINITY;
/// let nan = f64::NAN;
///
/// assert!(!f.is_infinite());
/// assert!(!nan.is_infinite());
///
/// assert!(inf.is_infinite());
/// assert!(neg_inf.is_infinite());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_infinite(self) -> bool {
self.abs_private() == INFINITY
}
/// Returns `true` if this number is neither infinite nor `NaN`.
///
/// ```
/// use std::f64;
///
/// let f = 7.0f64;
/// let inf: f64 = f64::INFINITY;
/// let neg_inf: f64 = f64::NEG_INFINITY;
/// let nan: f64 = f64::NAN;
///
/// assert!(f.is_finite());
///
/// assert!(!nan.is_finite());
/// assert!(!inf.is_finite());
/// assert!(!neg_inf.is_finite());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_finite(self) -> bool {
// There's no need to handle NaN separately: if self is NaN,
// the comparison is not true, exactly as desired.
self.abs_private() < INFINITY
}
/// Returns `true` if the number is neither zero, infinite,
/// [subnormal][subnormal], or `NaN`.
///
/// ```
/// use std::f64;
///
/// let min = f64::MIN_POSITIVE; // 2.2250738585072014e-308f64
/// let max = f64::MAX;
/// let lower_than_min = 1.0e-308_f64;
/// let zero = 0.0f64;
///
/// assert!(min.is_normal());
/// assert!(max.is_normal());
///
/// assert!(!zero.is_normal());
/// assert!(!f64::NAN.is_normal());
/// assert!(!f64::INFINITY.is_normal());
/// // Values between `0` and `min` are Subnormal.
/// assert!(!lower_than_min.is_normal());
/// ```
/// [subnormal]: https://en.wikipedia.org/wiki/Denormal_number
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_normal(self) -> bool {
self.classify() == FpCategory::Normal
}
/// Returns the floating point category of the number. If only one property
/// is going to be tested, it is generally faster to use the specific
/// predicate instead.
///
/// ```
/// use std::num::FpCategory;
/// use std::f64;
///
/// let num = 12.4_f64;
/// let inf = f64::INFINITY;
///
/// assert_eq!(num.classify(), FpCategory::Normal);
/// assert_eq!(inf.classify(), FpCategory::Infinite);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
pub fn classify(self) -> FpCategory {
const EXP_MASK: u64 = 0x7ff0000000000000;
const MAN_MASK: u64 = 0x000fffffffffffff;
let bits = self.to_bits();
match (bits & MAN_MASK, bits & EXP_MASK) {
(0, 0) => FpCategory::Zero,
(_, 0) => FpCategory::Subnormal,
(0, EXP_MASK) => FpCategory::Infinite, | }
/// Returns `true` if `self` has a positive sign, including `+0.0`, `NaN`s with
/// positive sign bit and positive infinity.
///
/// ```
/// let f = 7.0_f64;
/// let g = -7.0_f64;
///
/// assert!(f.is_sign_positive());
/// assert!(!g.is_sign_positive());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_sign_positive(self) -> bool {
!self.is_sign_negative()
}
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(since = "1.0.0", reason = "renamed to is_sign_positive")]
#[inline]
#[doc(hidden)]
pub fn is_positive(self) -> bool {
self.is_sign_positive()
}
/// Returns `true` if `self` has a negative sign, including `-0.0`, `NaN`s with
/// negative sign bit and negative infinity.
///
/// ```
/// let f = 7.0_f64;
/// let g = -7.0_f64;
///
/// assert!(!f.is_sign_negative());
/// assert!(g.is_sign_negative());
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn is_sign_negative(self) -> bool {
self.to_bits() & 0x8000_0000_0000_0000 != 0
}
#[stable(feature = "rust1", since = "1.0.0")]
#[rustc_deprecated(since = "1.0.0", reason = "renamed to is_sign_negative")]
#[inline]
#[doc(hidden)]
pub fn is_negative(self) -> bool {
self.is_sign_negative()
}
/// Takes the reciprocal (inverse) of a number, `1/x`.
///
/// ```
/// let x = 2.0_f64;
/// let abs_difference = (x.recip() - (1.0 / x)).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn recip(self) -> f64 {
1.0 / self
}
/// Converts radians to degrees.
///
/// ```
/// use std::f64::consts;
///
/// let angle = consts::PI;
///
/// let abs_difference = (angle.to_degrees() - 180.0).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_degrees(self) -> f64 {
// The division here is correctly rounded with respect to the true
// value of 180/π. (This differs from f32, where a constant must be
// used to ensure a correctly rounded result.)
self * (180.0f64 / consts::PI)
}
/// Converts degrees to radians.
///
/// ```
/// use std::f64::consts;
///
/// let angle = 180.0_f64;
///
/// let abs_difference = (angle.to_radians() - consts::PI).abs();
///
/// assert!(abs_difference < 1e-10);
/// ```
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn to_radians(self) -> f64 {
let value: f64 = consts::PI;
self * (value / 180.0)
}
/// Returns the maximum of the two numbers.
///
/// ```
/// let x = 1.0_f64;
/// let y = 2.0_f64;
///
/// assert_eq!(x.max(y), y);
/// ```
///
/// If one of the arguments is NaN, then the other argument is returned.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn max(self, other: f64) -> f64 {
intrinsics::maxnumf64(self, other)
}
/// Returns the minimum of the two numbers.
///
/// ```
/// let x = 1.0_f64;
/// let y = 2.0_f64;
///
/// assert_eq!(x.min(y), x);
/// ```
///
/// If one of the arguments is NaN, then the other argument is returned.
#[stable(feature = "rust1", since = "1.0.0")]
#[inline]
pub fn min(self, other: f64) -> f64 {
intrinsics::minnumf64(self, other)
}
/// Rounds toward zero and converts to any primitive integer type,
/// assuming that the value is finite and fits in that type.
///
/// ```
/// #![feature(float_approx_unchecked_to)]
///
/// let value = 4.6_f32;
/// let rounded = unsafe { value.approx_unchecked_to::<u16>() };
/// assert_eq!(rounded, 4);
///
/// let value = -128.9_f32;
/// let rounded = unsafe { value.approx_unchecked_to::<i8>() };
/// assert_eq!(rounded, std::i8::MIN);
/// ```
///
/// # Safety
///
/// The value must:
///
/// * Not be `NaN`
/// * Not be infinite
/// * Be representable in the return type `Int`, after truncating off its fractional part
#[cfg(not(bootstrap))]
#[unstable(feature = "float_approx_unchecked_to", issue = "67058")]
#[inline]
pub unsafe fn approx_unchecked_to<Int>(self) -> Int where Self: FloatToInt<Int> {
FloatToInt::<Int>::approx_unchecked(self)
}
/// Raw transmutation to `u64`.
///
/// This is currently identical to `transmute::<f64, u64>(self)` on all platforms.
///
/// See `from_bits` for some discussion of the portability of this operation
/// (there are almost no issues).
///
/// Note that this function is distinct from `as` casting, which attempts to
/// preserve the *numeric* value, and not the bitwise value.
///
/// # Examples
///
/// ```
/// assert!((1f64).to_bits() != 1f64 as u64); // to_bits() is not casting!
/// assert_eq!((12.5f64).to_bits(), 0x4029000000000000);
///
/// ```
#[stable(feature = "float_bits_conv", since = "1.20.0")]
#[inline]
pub fn to_bits(self) -> u64 {
// SAFETY: `u64` is a plain old datatype so we can always transmute to it
unsafe { mem::transmute(self) }
}
/// Raw transmutation from `u64`.
///
/// This is currently identical to `transmute::<u64, f64>(v)` on all platforms.
/// It turns out this is incredibly portable, for two reasons:
///
/// * Floats and Ints have the same endianness on all supported platforms.
/// * IEEE-754 very precisely specifies the bit layout of floats.
///
/// However there is one caveat: prior to the 2008 version of IEEE-754, how
/// to interpret the NaN signaling bit wasn't actually specified. Most platforms
/// (notably x86 and ARM) picked the interpretation that was ultimately
/// standardized in 2008, but some didn't (notably MIPS). As a result, all
/// signaling NaNs on MIPS are quiet NaNs on x86, and vice-versa.
///
/// Rather than trying to preserve signaling-ness cross-platform, this
/// implementation favours preserving the exact bits. This means that
/// any payloads encoded in NaNs will be preserved even if the result of
/// this method is sent over the network from an x86 machine to a MIPS one.
///
/// If the results of this method are only manipulated by the same
/// architecture that produced them, then there is no portability concern.
///
/// If the input isn't NaN, then there is no portability concern.
///
/// If you don't care about signalingness (very likely), then there is no
/// portability concern.
///
/// Note that this function is distinct from `as` casting, which attempts to
/// preserve the *numeric* value, and not the bitwise value.
///
/// # Examples
///
/// ```
/// let v = f64::from_bits(0x4029000000000000);
/// assert_eq!(v, 12.5);
/// ```
#[stable(feature = "float_bits_conv", since = "1.20.0")]
#[inline]
pub fn from_bits(v: u64) -> Self {
// SAFETY: `u64` is a plain old datatype so we can always transmute from it
// It turns out the safety issues with sNaN were overblown! Hooray!
unsafe { mem::transmute(v) }
}
/// Return the memory representation of this floating point number as a byte array in
/// big-endian (network) byte order.
///
/// # Examples
///
/// ```
/// let bytes = 12.5f64.to_be_bytes();
/// assert_eq!(bytes, [0x40, 0x29, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]);
/// ```
#[stable(feature = "float_to_from_bytes", since = "1.40.0")]
#[inline]
pub fn to_be_bytes(self) -> [u8; 8] {
self.to_bits().to_be_bytes()
}
/// Return the memory representation of this floating point number as a byte array in
/// little-endian byte order.
///
/// # Examples
///
/// ```
/// let bytes = 12.5f64.to_le_bytes();
/// assert_eq!(bytes, [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x40]);
/// ```
#[stable(feature = "float_to_from_bytes", since = "1.40.0")]
#[inline]
pub fn to_le_bytes(self) -> [u8; 8] {
self.to_bits().to_le_bytes()
}
/// Return the memory representation of this floating point number as a byte array in
/// native byte order.
///
/// As the target platform's native endianness is used, portable code
/// should use [`to_be_bytes`] or [`to_le_bytes`], as appropriate, instead.
///
/// [`to_be_bytes`]: #method.to_be_bytes
/// [`to_le_bytes`]: #method.to_le_bytes
///
/// # Examples
///
/// ```
/// let bytes = 12.5f64.to_ne_bytes();
/// assert_eq!(
/// bytes,
/// if cfg!(target_endian = "big") {
/// [0x40, 0x29, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
/// } else {
/// [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x40]
/// }
/// );
/// ```
#[stable(feature = "float_to_from_bytes", since = "1.40.0")]
#[inline]
pub fn to_ne_bytes(self) -> [u8; 8] {
self.to_bits().to_ne_bytes()
}
/// Create a floating point value from its representation as a byte array in big endian.
///
/// # Examples
///
/// ```
/// let value = f64::from_be_bytes([0x40, 0x29, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]);
/// assert_eq!(value, 12.5);
/// ```
#[stable(feature = "float_to_from_bytes", since = "1.40.0")]
#[inline]
pub fn from_be_bytes(bytes: [u8; 8]) -> Self {
Self::from_bits(u64::from_be_bytes(bytes))
}
/// Create a floating point value from its representation as a byte array in little endian.
///
/// # Examples
///
/// ```
/// let value = f64::from_le_bytes([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x40]);
/// assert_eq!(value, 12.5);
/// ```
#[stable(feature = "float_to_from_bytes", since = "1.40.0")]
#[inline]
pub fn from_le_bytes(bytes: [u8; 8]) -> Self {
Self::from_bits(u64::from_le_bytes(bytes))
}
/// Create a floating point value from its representation as a byte array in native endian.
///
/// As the target platform's native endianness is used, portable code
/// likely wants to use [`from_be_bytes`] or [`from_le_bytes`], as
/// appropriate instead.
///
/// [`from_be_bytes`]: #method.from_be_bytes
/// [`from_le_bytes`]: #method.from_le_bytes
///
/// # Examples
///
/// ```
/// let value = f64::from_ne_bytes(if cfg!(target_endian = "big") {
/// [0x40, 0x29, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00]
/// } else {
/// [0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x29, 0x40]
/// });
/// assert_eq!(value, 12.5);
/// ```
#[stable(feature = "float_to_from_bytes", since = "1.40.0")]
#[inline]
pub fn from_ne_bytes(bytes: [u8; 8]) -> Self {
Self::from_bits(u64::from_ne_bytes(bytes))
}
} | (_, EXP_MASK) => FpCategory::Nan,
_ => FpCategory::Normal,
} |
subtract-the-product-and-sum-of-digits-of-an-integer.rs | impl Solution {
pub fn subtract_product_and_sum(mut n: i32) -> i32 {
let mut prod = 1;
let mut sum = 0;
while n > 0 {
prod *= n % 10;
sum += n % 10;
n /= 10; | prod - sum
}
} | } |
UrlManage.py |
import calendar
import datetime
import json
import os
import os.path
import shutil
import tqdm
import traceback
from concurrent.futures import ThreadPoolExecutor
import urllib.error
import urllib.parse
from sqlalchemy import and_
from sqlalchemy import or_
import sqlalchemy.exc
if __name__ == "__main__":
import logSetup
logSetup.initLogging()
import common.database as db
import common.Exceptions
import common.management.util
import common.management.file_cleanup
import common.management.WebMirrorManage
import WebMirror.processor.RssProcessor
import flags
import pprint
import config
from config import C_RAW_RESOURCE_DIR
import WebMirror.OutputFilters.rss.FeedDataParser
import WebMirror.OutputFilters.util.feedNameLut
import WebRequest
import WebMirror.Engine
def exposed_fix_lndb_urls():
'''
Scan the qidian feed items, and extract the book url segments which are not
in the feedparser url-seg -> title map.
Given those segments, then do a HTTP fetch, and pull out the page title.
Finally, print that information in a nice table for updating the
scraper func.
'''
with db.session_context() as sess:
pages = sess.query(db.WebPages) \
.filter(db.WebPages.netloc == "lndb.info") \
.all()
print(pages)
# feed_url = feed_item.urls[0].feed_url
# pfunc = feed_item.get_func()
# missing = []
# for release in feed_item.releases:
# item = {}
# item['title'] = release.title
# item['guid'] = release.contentid
# item['linkUrl'] = release.contenturl
# item['feedUrl'] = feed_url
# item['srcname'] = "wat"
# item['published'] = "wat"
# ret = pfunc(item)
# if not ret:
# missing.append(release.contenturl)
# urls = {}
# for url in missing:
# root, _ = url.rsplit("/", 1)
# urls[root] = url
# wg = WebRequest.WebGetRobust()
# lines = []
# for root, url in urls.items():
# urlfrag = root.split("www")[-1]
# meta = common.management.util.get_page_title(wg, url)
# title = meta['title']
# outstr = " ('www{}/', '{}', 'translated'),".format(urlfrag, title)
# lines.append(outstr)
# for outstr in lines:
# print(outstr)
def | ():
'''
So the novelupdate scrape borked somewhere, and is generating duplicate URLs.
Gah.
'''
with db.session_context() as sess:
print("Querying for rows")
res = sess.query(db.NuReleaseItem.id, db.NuReleaseItem.outbound_wrapper).all()
print("Found %s nu outbound wrappers" % len(res))
total = 0
bad = 0
urlmap = {}
fix_list = []
print("Parsing URLs")
for dbid, url in res:
total += 1
if url.count("http") > 1:
bad += 1
fix_list.append((dbid, url))
# print(dbid, url)
else:
urlmap[url] = dbid
print("Found %s links, %s of which are invalid" % (total, bad))
count = 0
with db.session_context() as sess:
for dbid, url in tqdm.tqdm(fix_list, desc="Fixing duplicate NU urls."):
actual_url, _ = url.split("http://")
if actual_url in urlmap:
res = sess.query(db.NuResolvedOutbound).filter_by(parent=dbid).update({"parent" : urlmap[actual_url]})
res = sess.query(db.NuReleaseItem).filter_by(id=dbid).delete()
else:
res = sess.query(db.NuReleaseItem).filter_by(id=dbid).update({"outbound_wrapper" : actual_url})
urlmap[url] = dbid
count += 1
if count > 2500:
count = 0
sess.commit()
# res = sess.query(db.NuReleaseItem.id, db.NuReleaseItem.outbound_wrapper).all()
sess.commit()
# print(dbid, curl)
def exposed_scan_for_masked_urls():
'''
Do a streaming iteration over the rows in the database, and run them through the url filtering mechanism to see
if any are actually not wanted.
'''
engine = WebMirror.Engine.SiteArchiver(None, None, None)
with db.session_context() as sess:
print("Querying for rows")
iterable = sess.query(db.WebPages.id, db.WebPages.url, db.WebPages.netloc) \
.order_by(db.WebPages.netloc) \
.yield_per(1000)
rows = 0
skipped = []
for rid, url, netloc in tqdm.tqdm(iterable):
ret = engine.external_link_check(netloc, url)
if not ret:
skipped.append((netloc, url, rid))
rows += 1
print("Found %s rows. Saving" % rows)
with open("delete_netlocs.json", "w") as fp:
json.dump(skipped, fp, indent=4)
print("Saved to output json")
| exposed_fix_nu_duplicate_url_segments |
query.pb.gw.go | // Code generated by protoc-gen-grpc-gateway. DO NOT EDIT.
// source: lbm/evidence/v1beta1/query.proto
/*
Package types is a reverse proxy.
It translates gRPC into RESTful JSON APIs.
*/
package types
import (
"context"
"io"
"net/http"
"github.com/golang/protobuf/descriptor"
"github.com/golang/protobuf/proto"
"github.com/grpc-ecosystem/grpc-gateway/runtime"
"github.com/grpc-ecosystem/grpc-gateway/utilities"
"google.golang.org/grpc"
"google.golang.org/grpc/codes"
"google.golang.org/grpc/grpclog"
"google.golang.org/grpc/status"
)
// Suppress "imported and not used" errors
var _ codes.Code
var _ io.Reader
var _ status.Status
var _ = runtime.String
var _ = utilities.NewDoubleArray
var _ = descriptor.ForMessage
func request_Query_Evidence_0(ctx context.Context, marshaler runtime.Marshaler, client QueryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq QueryEvidenceRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["evidence_hash"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "evidence_hash")
}
protoReq.EvidenceHash, err = runtime.Bytes(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "evidence_hash", err)
}
msg, err := client.Evidence(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Query_Evidence_0(ctx context.Context, marshaler runtime.Marshaler, server QueryServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq QueryEvidenceRequest
var metadata runtime.ServerMetadata
var (
val string
ok bool
err error
_ = err
)
val, ok = pathParams["evidence_hash"]
if !ok {
return nil, metadata, status.Errorf(codes.InvalidArgument, "missing parameter %s", "evidence_hash")
}
protoReq.EvidenceHash, err = runtime.Bytes(val)
if err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "type mismatch, parameter: %s, error: %v", "evidence_hash", err)
}
msg, err := server.Evidence(ctx, &protoReq)
return msg, metadata, err
}
var (
filter_Query_AllEvidence_0 = &utilities.DoubleArray{Encoding: map[string]int{}, Base: []int(nil), Check: []int(nil)}
)
func request_Query_AllEvidence_0(ctx context.Context, marshaler runtime.Marshaler, client QueryClient, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq QueryAllEvidenceRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Query_AllEvidence_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := client.AllEvidence(ctx, &protoReq, grpc.Header(&metadata.HeaderMD), grpc.Trailer(&metadata.TrailerMD))
return msg, metadata, err
}
func local_request_Query_AllEvidence_0(ctx context.Context, marshaler runtime.Marshaler, server QueryServer, req *http.Request, pathParams map[string]string) (proto.Message, runtime.ServerMetadata, error) {
var protoReq QueryAllEvidenceRequest
var metadata runtime.ServerMetadata
if err := req.ParseForm(); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
if err := runtime.PopulateQueryParameters(&protoReq, req.Form, filter_Query_AllEvidence_0); err != nil {
return nil, metadata, status.Errorf(codes.InvalidArgument, "%v", err)
}
msg, err := server.AllEvidence(ctx, &protoReq)
return msg, metadata, err
}
// RegisterQueryHandlerServer registers the http handlers for service Query to "mux".
// UnaryRPC :call QueryServer directly.
// StreamingRPC :currently unsupported pending https://github.com/grpc/grpc-go/issues/906.
// Note that using this registration option will cause many gRPC library features (such as grpc.SendHeader, etc) to stop working. Consider using RegisterQueryHandlerFromEndpoint instead.
func RegisterQueryHandlerServer(ctx context.Context, mux *runtime.ServeMux, server QueryServer) error {
mux.Handle("GET", pattern_Query_Evidence_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Query_Evidence_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Query_Evidence_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Query_AllEvidence_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateIncomingContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := local_request_Query_AllEvidence_0(rctx, inboundMarshaler, server, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Query_AllEvidence_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
// RegisterQueryHandlerFromEndpoint is same as RegisterQueryHandler but
// automatically dials to "endpoint" and closes the connection when "ctx" gets done.
func RegisterQueryHandlerFromEndpoint(ctx context.Context, mux *runtime.ServeMux, endpoint string, opts []grpc.DialOption) (err error) {
conn, err := grpc.Dial(endpoint, opts...)
if err != nil {
return err
}
defer func() {
if err != nil {
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
return
}
go func() {
<-ctx.Done()
if cerr := conn.Close(); cerr != nil {
grpclog.Infof("Failed to close conn to %s: %v", endpoint, cerr)
}
}()
}()
return RegisterQueryHandler(ctx, mux, conn)
}
// RegisterQueryHandler registers the http handlers for service Query to "mux".
// The handlers forward requests to the grpc endpoint over "conn".
func RegisterQueryHandler(ctx context.Context, mux *runtime.ServeMux, conn *grpc.ClientConn) error {
return RegisterQueryHandlerClient(ctx, mux, NewQueryClient(conn))
}
// RegisterQueryHandlerClient registers the http handlers for service Query
// to "mux". The handlers forward requests to the grpc endpoint over the given implementation of "QueryClient".
// Note: the gRPC framework executes interceptors within the gRPC handler. If the passed in "QueryClient"
// doesn't go through the normal gRPC flow (creating a gRPC client etc.) then it will be up to the passed in
// "QueryClient" to call the correct interceptors.
func | (ctx context.Context, mux *runtime.ServeMux, client QueryClient) error {
mux.Handle("GET", pattern_Query_Evidence_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Query_Evidence_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Query_Evidence_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
mux.Handle("GET", pattern_Query_AllEvidence_0, func(w http.ResponseWriter, req *http.Request, pathParams map[string]string) {
ctx, cancel := context.WithCancel(req.Context())
defer cancel()
inboundMarshaler, outboundMarshaler := runtime.MarshalerForRequest(mux, req)
rctx, err := runtime.AnnotateContext(ctx, mux, req)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
resp, md, err := request_Query_AllEvidence_0(rctx, inboundMarshaler, client, req, pathParams)
ctx = runtime.NewServerMetadataContext(ctx, md)
if err != nil {
runtime.HTTPError(ctx, mux, outboundMarshaler, w, req, err)
return
}
forward_Query_AllEvidence_0(ctx, mux, outboundMarshaler, w, req, resp, mux.GetForwardResponseOptions()...)
})
return nil
}
var (
pattern_Query_Evidence_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 1, 1, 0, 4, 1, 5, 3}, []string{"lbm", "evidence", "v1beta1", "evidence_hash"}, "", runtime.AssumeColonVerbOpt(true)))
pattern_Query_AllEvidence_0 = runtime.MustPattern(runtime.NewPattern(1, []int{2, 0, 2, 1, 2, 2, 2, 1}, []string{"lbm", "evidence", "v1beta1"}, "", runtime.AssumeColonVerbOpt(true)))
)
var (
forward_Query_Evidence_0 = runtime.ForwardResponseMessage
forward_Query_AllEvidence_0 = runtime.ForwardResponseMessage
)
| RegisterQueryHandlerClient |
interface.go | package keytransform
import ds "github.com/jbenet/go-datastore"
// KeyMapping is a function that maps one key to annother | // transforming keys
type KeyTransform interface {
ConvertKey(ds.Key) ds.Key
InvertKey(ds.Key) ds.Key
}
// Datastore is a keytransform.Datastore
type Datastore interface {
ds.Shim
KeyTransform
}
// Wrap wraps a given datastore with a KeyTransform function.
// The resulting wrapped datastore will use the transform on all Datastore
// operations.
func Wrap(child ds.Datastore, t KeyTransform) *ktds {
if t == nil {
panic("t (KeyTransform) is nil")
}
if child == nil {
panic("child (ds.Datastore) is nil")
}
return &ktds{child: child, KeyTransform: t}
} | type KeyMapping func(ds.Key) ds.Key
// KeyTransform is an object with a pair of functions for (invertibly) |
mod.rs | use std::fmt;
use std::future::Future;
use std::io;
use std::pin::Pin;
use std::task::{
Context,
Poll::{self, *},
};
use hyper::client::connect::{Connect, Connected, Destination};
use tokio_net::uds::UnixStream;
// TODO: https://github.com/hyperium/hyper/blob/8f4b05ae78567dfc52236bc83d7be7b7fc3eebb0/src/client/connect/http.rs#L19-L20
type ConnectFuture = Pin<Box<dyn Future<Output = io::Result<UnixStream>> + Send>>;
use super::Uri;
/// A type which implements hyper's client connector interface
/// for unix domain sockets
///
/// `UnixConnector` instances expects uri's
/// to be constructued with `hyperlocal::Uri::new()` which produce uris with a `unix://`
/// scheme
///
/// # Examples
///
/// ```rust
/// use hyper::{Body, Client};
/// use hyperlocal::UnixConnector;
///
/// let client = hyper::Client::builder()
/// .build::<_, hyper::Body>(UnixConnector::default());
/// ```
#[derive(Clone, Debug, Default)]
pub struct UnixConnector;
impl Connect for UnixConnector {
type Transport = UnixStream;
type Error = io::Error;
type Future = UnixConnecting;
fn connect(&self, destination: Destination) -> Self::Future {
match Uri::parse_socket_path(destination.scheme(), destination.host()) {
Ok(path) => UnixConnecting::Connecting(Box::pin(UnixStream::connect(path))),
Err(err) => UnixConnecting::Error(Some(err)),
}
}
}
#[doc(hidden)]
pub enum | {
Connecting(ConnectFuture),
Error(Option<io::Error>),
}
impl fmt::Debug for UnixConnecting {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("UnixConnecting")
.finish()
}
}
impl Future for UnixConnecting {
type Output = Result<(UnixStream, Connected), io::Error>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.get_mut();
match this {
UnixConnecting::Connecting(ref mut f) => match Pin::new(f).poll(cx) {
Ready(Ok(stream)) => Ready(Ok((stream, Connected::new()))),
Pending => Pending,
Ready(Err(err)) => Ready(Err(err)),
},
UnixConnecting::Error(ref mut e) => {
Poll::Ready(Err(e.take().expect("polled more than once")))
}
}
}
}
| UnixConnecting |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.