prompt
large_stringlengths
70
991k
completion
large_stringlengths
0
1.02k
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import os import sys if __name__ == "__main__":<|fim▁hole|> execute_from_command_line(sys.argv)<|fim▁end|>
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "wellspring.settings") from django.core.management import execute_from_command_line
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian // Licensed under the MIT License <LICENSE.md> //! FFI bindings to gdi32. #![cfg(all(windows, any(target_arch = "x86", target_arch = "x86_64")))] extern crate winapi; use winapi::*; extern "system" { pub fn AbortDoc(hdc: HDC) -> c_int; pub fn AbortPath(hdc: HDC) -> BOOL; pub fn AddFontMemResourceEx( pbFont: PVOID, cbSize: DWORD, pdv: PVOID, pcFonts: *mut DWORD, ) -> HANDLE; pub fn AddFontResourceA(lpszFilename: LPCSTR) -> c_int; pub fn AddFontResourceExA(lpszFilename: LPCSTR, fl: DWORD, pdv: PVOID) -> c_int; pub fn AddFontResourceExW(lpszFilename: LPCWSTR, fl: DWORD, pdv: PVOID) -> c_int; pub fn AddFontResourceW(lpszFilename: LPCWSTR) -> c_int; pub fn AngleArc( hdc: HDC, X: c_int, Y: c_int, dwRadius: DWORD, eStartAngle: FLOAT, eSweepAngle: FLOAT, ) -> BOOL; pub fn AnimatePalette( hpal: HPALETTE, iStartIndex: UINT, cEntries: UINT, ppe: *const PALETTEENTRY, ) -> BOOL; pub fn Arc( hdc: HDC, nLeftRect: c_int, nTopRect: c_int, nRightRect: c_int, nBottomRect: c_int, nXStartArc: c_int, nYStartArc: c_int, nXEndArc: c_int, nYEndArc: c_int, ) -> BOOL; pub fn ArcTo( hdc: HDC, nLeftRect: c_int, nTopRect: c_int, nRightRect: c_int, nBottomRect: c_int, nXRadial1: c_int, nYRadial1: c_int, nXRadial2: c_int, nYRadial2: c_int, ) -> BOOL; pub fn BeginPath(hdc: HDC) -> BOOL; pub fn BitBlt( hdc: HDC, x: c_int, y: c_int, cx: c_int, cy: c_int, hdcSrc: HDC, x1: c_int, y1: c_int, rop: DWORD, ) -> BOOL; pub fn CancelDC(hdc: HDC) -> BOOL; pub fn CheckColorsInGamut( hDC: HDC, lpRGBTriples: LPVOID, lpBuffer: LPVOID, nCount: UINT, ) -> BOOL; pub fn ChoosePixelFormat(hdc: HDC, ppfd: *const PIXELFORMATDESCRIPTOR) -> c_int; pub fn Chord( hdc: HDC, nLeftRect: c_int, nTopRect: c_int, nRightRect: c_int, nBottomRect: c_int, nXRadial1: c_int, nYRadial1: c_int, nXRadial2: c_int, nYRadial2: c_int, ) -> BOOL; pub fn CloseEnhMetaFile(hdc: HDC) -> HENHMETAFILE; pub fn CloseFigure(hdc: HDC) -> BOOL; pub fn CloseMetaFile(hdc: HDC) -> HMETAFILE; pub fn ColorCorrectPalette( hDC: HDC, hPalette: HPALETTE, dwFirstEntry: DWORD, dwNumOfEntries: DWORD, ) -> BOOL; pub fn ColorMatchToTarget(hDC: HDC, hdcTarget: HDC, uiAction: UINT) -> BOOL; pub fn CombineRgn( hrgnDst: HRGN, hrgnSrc1: HRGN, hrgnSrc2: HRGN, fnCombineMode: c_int, ) -> c_int; pub fn CombineTransform( lpxformResult: LPXFORM, lpxform1: *const XFORM, lpxform2: *const XFORM, ) -> BOOL; pub fn CopyEnhMetaFileA(hemfSrc: HENHMETAFILE, lpszFile: LPCSTR) -> HENHMETAFILE; pub fn CopyEnhMetaFileW(hemfSrc: HENHMETAFILE, lpszFile: LPCWSTR) -> HENHMETAFILE; pub fn CopyMetaFileA(hmfSrc: HMETAFILE, lpszFile: LPCSTR) -> HMETAFILE; pub fn CopyMetaFileW(hmfSrc: HMETAFILE, lpszFile: LPCWSTR) -> HMETAFILE; pub fn CreateBitmap( nWidth: c_int, nHeight: c_int, nPlanes: UINT, nBitCount: UINT, lpBits: *const c_void, ) -> HBITMAP; pub fn CreateBitmapIndirect(pbm: *const BITMAP) -> HBITMAP; pub fn CreateBrushIndirect(lplb: *const LOGBRUSH) -> HBRUSH; pub fn CreateColorSpaceA(lpLogColorSpace: LPLOGCOLORSPACEA) -> HCOLORSPACE; pub fn CreateColorSpaceW(lpLogColorSpace: LPLOGCOLORSPACEW) -> HCOLORSPACE; pub fn CreateCompatibleBitmap(hdc: HDC, cx: c_int, cy: c_int) -> HBITMAP; pub fn CreateCompatibleDC(hdc: HDC) -> HDC; pub fn CreateDCA( lpszDriver: LPCSTR, lpszDevice: LPCSTR, lpszOutput: LPCSTR, lpInitData: *const DEVMODEA, ) -> HDC; pub fn CreateDCW( lpszDriver: LPCWSTR, lpszDevice: LPCWSTR, lpszOutput: LPCWSTR, lpInitData: *const DEVMODEW, ) -> HDC; pub fn CreateDIBPatternBrush(hglbDIBPacked: HGLOBAL, fuColorSpec: UINT) -> HBRUSH; pub fn CreateDIBPatternBrushPt(lpPackedDIB: *const VOID, iUsage: UINT) -> HBRUSH; pub fn CreateDIBSection( hdc: HDC, lpbmi: *const BITMAPINFO, usage: UINT, ppvBits: *mut *mut c_void, hSection: HANDLE, offset: DWORD, ) -> HBITMAP; pub fn CreateDIBitmap( hdc: HDC, pbmih: *const BITMAPINFOHEADER, flInit: DWORD, pjBits: *const c_void, pbmi: *const BITMAPINFO, iUsage: UINT, ) -> HBITMAP; pub fn CreateDiscardableBitmap(hdc: HDC, nWidth: c_int, nHeight: c_int) -> HBITMAP; pub fn CreateEllipticRgn( nLeftRect: c_int, nTopRect: c_int, nRightRect: c_int, nBottomRect: c_int, ) -> HRGN; pub fn CreateEllipticRgnIndirect(lprc: *const RECT) -> HRGN; pub fn CreateEnhMetaFileA( hdcRef: HDC, lpFilename: LPCSTR, lpRect: *const RECT, lpDescription: LPCSTR, ) -> HDC; pub fn CreateEnhMetaFileW( hdcRef: HDC, lpFilename: LPCWSTR, lpRect: *const RECT, lpDescription: LPCWSTR, ) -> HDC; pub fn CreateFontA( cHeight: c_int, cWidth: c_int, cEscapement: c_int, cOrientation: c_int, cWeight: c_int, bItalic: DWORD, bUnderline: DWORD, bStrikeOut: DWORD, iCharSet: DWORD, iOutPrecision: DWORD, iClipPrecision: DWORD, iQuality: DWORD, iPitchAndFamily: DWORD, pszFaceName: LPCSTR, ) -> HFONT; pub fn CreateFontIndirectA(lplf: *const LOGFONTA) -> HFONT; pub fn CreateFontIndirectExA(penumlfex: *const ENUMLOGFONTEXDVA) -> HFONT; pub fn CreateFontIndirectExW(penumlfex: *const ENUMLOGFONTEXDVW) -> HFONT; pub fn CreateFontIndirectW(lplf: *const LOGFONTW) -> HFONT; pub fn CreateFontW( cHeight: c_int, cWidth: c_int, cEscapement: c_int, cOrientation: c_int, cWeight: c_int, bItalic: DWORD, bUnderline: DWORD, bStrikeOut: DWORD, iCharSet: DWORD, iOutPrecision: DWORD, iClipPrecision: DWORD, iQuality: DWORD, iPitchAndFamily: DWORD, pszFaceName: LPCWSTR, ) -> HFONT; pub fn CreateHalftonePalette(hdc: HDC) -> HPALETTE; pub fn CreateHatchBrush(fnStyle: c_int, clrref: COLORREF) -> HBRUSH; pub fn CreateICA( lpszDriver: LPCSTR, lpszDevice: LPCSTR, lpszOutput: LPCSTR, lpdvmInit: *const DEVMODEA, ) -> HDC; pub fn CreateICW( lpszDriver: LPCWSTR, lpszDevice: LPCWSTR, lpszOutput: LPCWSTR, lpdvmInit: *const DEVMODEW, ) -> HDC; pub fn CreateMetaFileA(lpszFile: LPCSTR) -> HDC; pub fn CreateMetaFileW(lpszFile: LPCWSTR) -> HDC; pub fn CreatePalette(lplgpl: *const LOGPALETTE) -> HPALETTE; pub fn CreatePatternBrush(hbmp: HBITMAP) -> HBRUSH; pub fn CreatePen(fnPenStyle: c_int, nWidth: c_int, crColor: COLORREF) -> HPEN; pub fn CreatePenIndirect(lplgpn: *const LOGPEN) -> HPEN; pub fn CreatePolyPolygonRgn( lppt: *const POINT, lpPolyCounts: *const INT, nCount: c_int, fnPolyFillMode: c_int, ) -> HRGN; pub fn CreatePolygonRgn(lppt: *const POINT, cPoints: c_int, fnPolyFillMode: c_int) -> HRGN; pub fn CreateRectRgn( nLeftRect: c_int, nTopRect: c_int, nRightRect: c_int, nBottomRect: c_int, ) -> HRGN; // pub fn CreateRectRgnIndirect(); // pub fn CreateRoundRectRgn(); // pub fn CreateScalableFontResourceA(); // pub fn CreateScalableFontResourceW(); pub fn CreateSolidBrush(color: COLORREF) -> HBRUSH; // pub fn D3DKMTAcquireKeyedMutex(); // pub fn D3DKMTAcquireKeyedMutex2(); // pub fn D3DKMTCacheHybridQueryValue(); // pub fn D3DKMTCheckExclusiveOwnership(); // pub fn D3DKMTCheckMonitorPowerState(); // pub fn D3DKMTCheckMultiPlaneOverlaySupport(); // pub fn D3DKMTCheckOcclusion(); // pub fn D3DKMTCheckSharedResourceAccess(); // pub fn D3DKMTCheckVidPnExclusiveOwnership(); // pub fn D3DKMTCloseAdapter(); // pub fn D3DKMTConfigureSharedResource(); // pub fn D3DKMTCreateAllocation(); // pub fn D3DKMTCreateAllocation2(); // pub fn D3DKMTCreateContext(); // pub fn D3DKMTCreateDCFromMemory(); // pub fn D3DKMTCreateDevice(); // pub fn D3DKMTCreateKeyedMutex(); // pub fn D3DKMTCreateKeyedMutex2(); // pub fn D3DKMTCreateOutputDupl(); // pub fn D3DKMTCreateOverlay(); // pub fn D3DKMTCreateSynchronizationObject(); // pub fn D3DKMTCreateSynchronizationObject2(); // pub fn D3DKMTDestroyAllocation(); // pub fn D3DKMTDestroyContext(); // pub fn D3DKMTDestroyDCFromMemory(); // pub fn D3DKMTDestroyDevice(); // pub fn D3DKMTDestroyKeyedMutex(); // pub fn D3DKMTDestroyOutputDupl(); // pub fn D3DKMTDestroyOverlay(); // pub fn D3DKMTDestroySynchronizationObject(); // pub fn D3DKMTEnumAdapters(); // pub fn D3DKMTEscape(); // pub fn D3DKMTFlipOverlay(); // pub fn D3DKMTGetCachedHybridQueryValue(); // pub fn D3DKMTGetContextInProcessSchedulingPriority(); // pub fn D3DKMTGetContextSchedulingPriority(); // pub fn D3DKMTGetDeviceState(); // pub fn D3DKMTGetDisplayModeList(); // pub fn D3DKMTGetMultisampleMethodList(); // pub fn D3DKMTGetOverlayState(); // pub fn D3DKMTGetPresentHistory(); // pub fn D3DKMTGetPresentQueueEvent(); // pub fn D3DKMTGetProcessSchedulingPriorityClass(); // pub fn D3DKMTGetRuntimeData(); // pub fn D3DKMTGetScanLine(); // pub fn D3DKMTGetSharedPrimaryHandle(); // pub fn D3DKMTGetSharedResourceAdapterLuid(); // pub fn D3DKMTInvalidateActiveVidPn(); // pub fn D3DKMTLock(); // pub fn D3DKMTNetDispGetNextChunkInfo(); // pub fn D3DKMTNetDispQueryMiracastDisplayDeviceStatus(); // pub fn D3DKMTNetDispQueryMiracastDisplayDeviceSupport(); // pub fn D3DKMTNetDispStartMiracastDisplayDevice2(); // pub fn D3DKMTNetDispStopMiracastDisplayDevice(); // pub fn D3DKMTOfferAllocations(); // pub fn D3DKMTOpenAdapterFromDeviceName(); // pub fn D3DKMTOpenAdapterFromGdiDisplayName(); // pub fn D3DKMTOpenAdapterFromHdc(); // pub fn D3DKMTOpenAdapterFromLuid(); // pub fn D3DKMTOpenKeyedMutex(); // pub fn D3DKMTOpenKeyedMutex2(); // pub fn D3DKMTOpenNtHandleFromName(); // pub fn D3DKMTOpenResource(); // pub fn D3DKMTOpenResource2(); // pub fn D3DKMTOpenResourceFromNtHandle(); // pub fn D3DKMTOpenSyncObjectFromNtHandle(); // pub fn D3DKMTOpenSynchronizationObject(); // pub fn D3DKMTOutputDuplGetFrameInfo(); // pub fn D3DKMTOutputDuplGetMetaData(); // pub fn D3DKMTOutputDuplGetPointerShapeData(); // pub fn D3DKMTOutputDuplPresent(); // pub fn D3DKMTOutputDuplReleaseFrame(); // pub fn D3DKMTPinDirectFlipResources(); // pub fn D3DKMTPollDisplayChildren(); // pub fn D3DKMTPresent(); // pub fn D3DKMTPresentMultiPlaneOverlay(); // pub fn D3DKMTQueryAdapterInfo(); // pub fn D3DKMTQueryAllocationResidency(); // pub fn D3DKMTQueryRemoteVidPnSourceFromGdiDisplayName(); // pub fn D3DKMTQueryResourceInfo(); // pub fn D3DKMTQueryResourceInfoFromNtHandle(); // pub fn D3DKMTQueryStatistics(); // pub fn D3DKMTReclaimAllocations(); // pub fn D3DKMTReleaseKeyedMutex(); // pub fn D3DKMTReleaseKeyedMutex2(); // pub fn D3DKMTReleaseProcessVidPnSourceOwners(); // pub fn D3DKMTRender(); // pub fn D3DKMTSetAllocationPriority(); // pub fn D3DKMTSetContextInProcessSchedulingPriority(); // pub fn D3DKMTSetContextSchedulingPriority(); // pub fn D3DKMTSetDisplayMode(); // pub fn D3DKMTSetDisplayPrivateDriverFormat(); // pub fn D3DKMTSetGammaRamp(); // pub fn D3DKMTSetProcessSchedulingPriorityClass(); // pub fn D3DKMTSetQueuedLimit(); // pub fn D3DKMTSetStereoEnabled(); // pub fn D3DKMTSetVidPnSourceOwner(); // pub fn D3DKMTSetVidPnSourceOwner1(); // pub fn D3DKMTShareObjects(); // pub fn D3DKMTSharedPrimaryLockNotification(); // pub fn D3DKMTSharedPrimaryUnLockNotification(); // pub fn D3DKMTSignalSynchronizationObject(); // pub fn D3DKMTSignalSynchronizationObject2(); // pub fn D3DKMTUnlock(); // pub fn D3DKMTUnpinDirectFlipResources(); // pub fn D3DKMTUpdateOverlay(); // pub fn D3DKMTWaitForIdle(); // pub fn D3DKMTWaitForSynchronizationObject(); // pub fn D3DKMTWaitForSynchronizationObject2(); // pub fn D3DKMTWaitForVerticalBlankEvent(); // pub fn D3DKMTWaitForVerticalBlankEvent2(); pub fn DPtoLP(hdc: HDC, lppt: *mut POINT, c: c_int) -> BOOL; // pub fn DeleteColorSpace(); pub fn DeleteDC(hdc: HDC) -> BOOL; // pub fn DeleteEnhMetaFile(); // pub fn DeleteMetaFile(); pub fn DeleteObject(ho: HGDIOBJ) -> BOOL; pub fn DescribePixelFormat( hdc: HDC, iPixelFormat: c_int, nBytes: UINT, ppfd: LPPIXELFORMATDESCRIPTOR, ) -> c_int; // pub fn DeviceCapabilitiesExA(); // pub fn DeviceCapabilitiesExW(); // pub fn DrawEscape(); pub fn Ellipse(hdc: HDC, left: c_int, top: c_int, right: c_int, bottom: c_int) -> BOOL; // pub fn EnableEUDC(); // pub fn EndDoc(); // pub fn EndFormPage(); // pub fn EndPage(); // pub fn EndPath(); // pub fn EnumEnhMetaFile(); // pub fn EnumFontFamiliesA(); // pub fn EnumFontFamiliesExA(); // pub fn EnumFontFamiliesExW(); // pub fn EnumFontFamiliesW(); // pub fn EnumFontsA(); // pub fn EnumFontsW(); // pub fn EnumICMProfilesA(); // pub fn EnumICMProfilesW(); // pub fn EnumMetaFile(); // pub fn EnumObjects(); // pub fn EqualRgn(); // pub fn Escape(); // pub fn EudcLoadLinkW(); // pub fn EudcUnloadLinkW(); // pub fn ExcludeClipRect(); // pub fn ExtCreatePen(); // pub fn ExtCreateRegion(); // pub fn ExtEscape(); // pub fn ExtFloodFill(); pub fn ExtSelectClipRgn(hdc: HDC, hrgn: HRGN, mode: c_int) -> c_int; // pub fn ExtTextOutA(); // pub fn ExtTextOutW(); // pub fn FillPath(); // pub fn FillRgn(); // pub fn FixBrushOrgEx(); // pub fn FlattenPath(); // pub fn FloodFill(); // pub fn FrameRgn(); // pub fn GdiAlphaBlend(); // pub fn GdiArtificialDecrementDriver(); // pub fn GdiComment(); // pub fn GdiDeleteSpoolFileHandle(); // pub fn GdiEndDocEMF(); // pub fn GdiEndPageEMF(); // pub fn GdiFlush(); // pub fn GdiGetBatchLimit(); // pub fn GdiGetDC(); // pub fn GdiGetDevmodeForPage(); // pub fn GdiGetPageCount(); // pub fn GdiGetPageHandle(); // pub fn GdiGetSpoolFileHandle(); // pub fn GdiGradientFill(); // pub fn GdiPlayDCScript(); // pub fn GdiPlayEMF(); // pub fn GdiPlayJournal(); // pub fn GdiPlayPageEMF(); // pub fn GdiPlayPrivatePageEMF(); // pub fn GdiPlayScript(); // pub fn GdiResetDCEMF(); // pub fn GdiSetBatchLimit(); // pub fn GdiStartDocEMF(); // pub fn GdiStartPageEMF(); // pub fn GdiTransparentBlt(); pub fn GetArcDirection(hdc: HDC) -> c_int; // pub fn GetAspectRatioFilterEx(); // pub fn GetBitmapBits(); // pub fn GetBitmapDimensionEx(); // pub fn GetBkColor(); // pub fn GetBkMode(); // pub fn GetBoundsRect(); // pub fn GetBrushOrgEx(); // pub fn GetCharABCWidthsA(); // pub fn GetCharABCWidthsFloatA(); // pub fn GetCharABCWidthsFloatW(); // pub fn GetCharABCWidthsI(); // pub fn GetCharABCWidthsW(); // pub fn GetCharWidth32A(); // pub fn GetCharWidth32W(); // pub fn GetCharWidthA(); // pub fn GetCharWidthFloatA(); // pub fn GetCharWidthFloatW(); // pub fn GetCharWidthI(); // pub fn GetCharWidthW(); // pub fn GetCharacterPlacementA(); // pub fn GetCharacterPlacementW(); // pub fn GetClipBox(); // pub fn GetClipRgn(); // pub fn GetColorAdjustment(); // pub fn GetColorSpace(); // pub fn GetCurrentObject(); // pub fn GetCurrentPositionEx(); // pub fn GetDCBrushColor(); // pub fn GetDCOrgEx(); // pub fn GetDCPenColor(); // pub fn GetDIBColorTable(); pub fn GetDIBits( hdc: HDC, hbm: HBITMAP, start: UINT, cLines: UINT, lpvBits: LPVOID, lpbmi: LPBITMAPINFO, usage: UINT ) -> c_int; pub fn GetDeviceCaps(hdc: HDC, nIndex: c_int) -> c_int; // pub fn GetDeviceGammaRamp(); // pub fn GetEnhMetaFileA(); // pub fn GetEnhMetaFileBits(); // pub fn GetEnhMetaFileDescriptionA(); // pub fn GetEnhMetaFileDescriptionW(); // pub fn GetEnhMetaFileHeader(); // pub fn GetEnhMetaFilePaletteEntries(); // pub fn GetEnhMetaFilePixelFormat(); // pub fn GetEnhMetaFileW(); // pub fn GetFontAssocStatus();<|fim▁hole|> // pub fn GetGlyphIndicesA(); // pub fn GetGlyphIndicesW(); // pub fn GetGlyphOutline(); // pub fn GetGlyphOutlineA(); // pub fn GetGlyphOutlineW(); // pub fn GetGraphicsMode(); // pub fn GetICMProfileA(); // pub fn GetICMProfileW(); // pub fn GetKerningPairs(); // pub fn GetKerningPairsA(); // pub fn GetKerningPairsW(); // pub fn GetLayout(); // pub fn GetLogColorSpaceA(); // pub fn GetLogColorSpaceW(); // pub fn GetMapMode(); // pub fn GetMetaFileA(); // pub fn GetMetaFileBitsEx(); // pub fn GetMetaFileW(); // pub fn GetMetaRgn(); // pub fn GetMiterLimit(); // pub fn GetNearestColor(); // pub fn GetNearestPaletteIndex(); // pub fn GetObjectA(); // pub fn GetObjectType(); // pub fn GetObjectW(); // pub fn GetOutlineTextMetricsA(); // pub fn GetOutlineTextMetricsW(); // pub fn GetPaletteEntries(); // pub fn GetPath(); // pub fn GetPixel(); // pub fn GetPixelFormat(); // pub fn GetPolyFillMode(); // pub fn GetROP2(); // pub fn GetRandomRgn(); // pub fn GetRasterizerCaps(); // pub fn GetRegionData(); // pub fn GetRelAbs(); // pub fn GetRgnBox(); pub fn GetStockObject(i: c_int) -> HGDIOBJ; // pub fn GetStretchBltMode(); // pub fn GetSystemPaletteEntries(); // pub fn GetSystemPaletteUse(); // pub fn GetTextAlign(); // pub fn GetTextCharacterExtra(); // pub fn GetTextCharset(); // pub fn GetTextCharsetInfo(); // pub fn GetTextColor(); // pub fn GetTextExtentExPointA(); // pub fn GetTextExtentExPointI(); // pub fn GetTextExtentExPointW(); // pub fn GetTextExtentPoint32A(); // pub fn GetTextExtentPoint32W(); // pub fn GetTextExtentPointA(); // pub fn GetTextExtentPointI(); // pub fn GetTextExtentPointW(); // pub fn GetTextFaceA(); pub fn GetTextFaceW(hdc: HDC, c: c_int, lpName: LPWSTR) -> c_int; // pub fn GetTextMetricsA(); pub fn GetTextMetricsW(hdc: HDC, lptm: *mut TEXTMETRICW) -> BOOL; // pub fn GetViewportExtEx(); // pub fn GetViewportOrgEx(); // pub fn GetWinMetaFileBits(); // pub fn GetWindowExtEx(); // pub fn GetWindowOrgEx(); // pub fn GetWorldTransform(); // pub fn IntersectClipRect(); // pub fn InvertRgn(); // pub fn LPtoDP(); pub fn LineDDA( nXStart: c_int, nYStart: c_int, nXEnd: c_int, nYEnd: c_int, lpLineFunc: LINEDDAPROC, lpData: LPARAM, ) -> BOOL; pub fn LineTo(hdc: HDC, nXEnd: c_int, nYEnd: c_int); // pub fn MaskBlt(); // pub fn ModifyWorldTransform(); pub fn MoveToEx(hdc: HDC, X: c_int, Y: c_int, lpPoint:LPPOINT) -> BOOL; // pub fn OffsetClipRgn(); // pub fn OffsetRgn(); // pub fn OffsetViewportOrgEx(); // pub fn OffsetWindowOrgEx(); // pub fn PaintRgn(); pub fn PatBlt( hdc: HDC, nXLeft: c_int, nYLeft: c_int, nWidth: c_int, nHeight: c_int, dwRop: DWORD, ) -> BOOL; // pub fn PathToRegion(); pub fn Pie( hdc: HDC, nLeftRect: c_int, nTopRect: c_int, nBottomRect: c_int, nXRadial1: c_int, nYRadial1: c_int, nXRadial2: c_int, nYRadial2: c_int, ) -> BOOL; // pub fn PlayEnhMetaFile(); // pub fn PlayEnhMetaFileRecord(); // pub fn PlayMetaFile(); // pub fn PlayMetaFileRecord(); // pub fn PlgBlt(); pub fn PolyBezier(hdc: HDC, lppt: *const POINT, cPoints: DWORD) -> BOOL; pub fn PolyBezierTo(hdc: HDC, lppt: *const POINT, cPoints: DWORD) -> BOOL; pub fn PolyDraw(hdc: HDC, lppt: *const POINT, lpbTypes: *const BYTE, cCount: c_int) -> BOOL; pub fn PolyPolygon( hdc: HDC, lpPoints: *const POINT, lpPolyCounts: *const INT, cCount: DWORD, ) -> BOOL; pub fn PolyPolyline( hdc: HDC, lppt: *const POINT, lpdwPolyPoints: *const DWORD, cCount: DWORD, ) -> BOOL; // pub fn PolyTextOutA(); // pub fn PolyTextOutW(); pub fn Polygon(hdc: HDC, lpPoints: *const POINT, nCount: c_int) -> BOOL; pub fn Polyline(hdc: HDC, lppt: *const POINT, cCount: c_int) -> BOOL; pub fn PolylineTo(hdc: HDC, lppt: *const POINT, cCount: DWORD) -> BOOL; // pub fn PtInRegion(); // pub fn PtVisible(); // pub fn RealizePalette(); // pub fn RectInRegion(); // pub fn RectVisible(); pub fn Rectangle(hdc: HDC, left: c_int, top: c_int, right: c_int, bottom: c_int) -> BOOL; // pub fn RemoveFontMemResourceEx(); // pub fn RemoveFontResourceA(); // pub fn RemoveFontResourceExA(); // pub fn RemoveFontResourceExW(); // pub fn RemoveFontResourceW(); // pub fn ResetDCA(); // pub fn ResetDCW(); // pub fn ResizePalette(); pub fn RestoreDC(hdc: HDC, nSavedDC: c_int) -> BOOL; pub fn RoundRect( hdc: HDC, nLeftRect: c_int, nTopRect: c_int, nRightRect: c_int, nBottomRect: c_int, nWidth: c_int, nHeight: c_int, ) -> BOOL; pub fn SaveDC(hdc: HDC) -> c_int; // pub fn ScaleViewportExtEx(); // pub fn ScaleWindowExtEx(); // pub fn SelectBrushLocal(); // pub fn SelectClipPath(); pub fn SelectClipRgn(hdc: HDC, hrgn: HRGN) -> c_int; // pub fn SelectFontLocal(); pub fn SelectObject(hdc: HDC, h: HGDIOBJ) -> HGDIOBJ; // pub fn SelectPalette(); // pub fn SetAbortProc(); pub fn SetArcDirection(hdc: HDC, ArcDirection: c_int) -> c_int; // pub fn SetBitmapBits(); // pub fn SetBitmapDimensionEx(); pub fn SetBkColor(hdc: HDC, color: COLORREF) -> COLORREF; pub fn SetBkMode(hdc: HDC, mode: c_int) -> c_int; // pub fn SetBoundsRect(); // pub fn SetBrushOrgEx(); // pub fn SetColorAdjustment(); // pub fn SetColorSpace(); pub fn SetDCBrushColor(hdc: HDC, color: COLORREF) -> COLORREF; // pub fn SetDCPenColor(); // pub fn SetDIBColorTable(); // pub fn SetDIBits(); // pub fn SetDIBitsToDevice(); // pub fn SetDeviceGammaRamp(); // pub fn SetEnhMetaFileBits(); // pub fn SetFontEnumeration(); // pub fn SetGraphicsMode(); // pub fn SetICMMode(); // pub fn SetICMProfileA(); // pub fn SetICMProfileW(); // pub fn SetLayout(); // pub fn SetMagicColors(); pub fn SetMapMode(hdc: HDC, mode: c_int) -> c_int; // pub fn SetMapperFlags(); // pub fn SetMetaFileBitsEx(); // pub fn SetMetaRgn(); // pub fn SetMiterLimit(); // pub fn SetPaletteEntries(); pub fn SetPixel(hdc: HDC, x: c_int, y: c_int, color: COLORREF) -> COLORREF; pub fn SetPixelFormat( hdc: HDC, iPixelFormat: c_int, ppfd: *const PIXELFORMATDESCRIPTOR, ) -> BOOL; // pub fn SetPixelV(); pub fn SetPolyFillMode(hdc: HDC, iPolyFillMode: c_int); pub fn SetROP2(hdc: HDC, rop2: c_int) -> c_int; pub fn SetRectRgn(hrgn: HRGN, left: c_int, top: c_int, right: c_int, bottom: c_int) -> BOOL; // pub fn SetRelAbs(); // pub fn SetStretchBltMode(); // pub fn SetSystemPaletteUse(); pub fn SetTextAlign(hdc: HDC, align: UINT) -> UINT; // pub fn SetTextCharacterExtra(); pub fn SetTextColor(hdc: HDC, color: COLORREF) -> COLORREF; // pub fn SetTextJustification(); pub fn SetViewportExtEx(hdc: HDC, x: c_int, y: c_int, lpsz: *mut SIZE) -> BOOL; pub fn SetViewportOrgEx(hdc: HDC, x: c_int, y: c_int, lppt: *mut POINT) -> BOOL; // pub fn SetWinMetaFileBits(); pub fn SetWindowExtEx(hdc: HDC, x: c_int, y: c_int, lppt: *mut SIZE) -> BOOL; // pub fn SetWindowOrgEx(); // pub fn SetWorldTransform(); // pub fn StartDocA(); // pub fn StartDocW(); // pub fn StartFormPage(); // pub fn StartPage(); // pub fn StretchBlt(); pub fn StretchDIBits( hdc: HDC, XDest: c_int, YDest: c_int, nDestWidth: c_int, nDestHeight: c_int, XSrc: c_int, YSrc: c_int, nSrcWidth: c_int, nSrcHeight: c_int, lpBits: *const VOID, lpBitsInfo: *const BITMAPINFO, iUsage: UINT, dwRop: DWORD, ) -> c_int; // pub fn StrokeAndFillPath(); // pub fn StrokePath(); pub fn SwapBuffers(hdc: HDC) -> BOOL; pub fn TextOutA(hdc: HDC, x: c_int, y: c_int, lpString: LPCSTR, c: c_int) -> BOOL; pub fn TextOutW(hdc: HDC, x: c_int, y: c_int, lpString: LPCWSTR, c: c_int) -> BOOL; // pub fn TranslateCharsetInfo(); // pub fn UnrealizeObject(); // pub fn UpdateColors(); // pub fn UpdateICMRegKeyA(); // pub fn UpdateICMRegKeyW(); // pub fn WidenPath(); // pub fn gdiPlaySpoolStream(); }<|fim▁end|>
// pub fn GetFontData(); // pub fn GetFontLanguageInfo(); // pub fn GetFontResourceInfoW(); // pub fn GetFontUnicodeRanges();
<|file_name|>test_stdio.py<|end_file_name|><|fim▁begin|># Copyright (c) Twisted Matrix Laboratories. # See LICENSE for details. """ Tests for L{twisted.internet.stdio}. @var properEnv: A copy of L{os.environ} which has L{bytes} keys/values on POSIX platforms and native L{str} keys/values on Windows. """ from __future__ import absolute_import, division import os import sys import itertools from twisted.trial import unittest from twisted.python import filepath, log from twisted.python.reflect import requireModule from twisted.python.runtime import platform from twisted.python.compat import xrange, intToBytes, bytesEnviron from twisted.internet import error, defer, protocol, stdio, reactor from twisted.test.test_tcp import ConnectionLostNotifyingProtocol # A short string which is intended to appear here and nowhere else, # particularly not in any random garbage output CPython unavoidable # generates (such as in warning text and so forth). This is searched # for in the output from stdio_test_lastwrite and if it is found at # the end, the functionality works. UNIQUE_LAST_WRITE_STRING = b'xyz123abc Twisted is great!' skipWindowsNopywin32 = None if platform.isWindows(): if requireModule('win32process') is None: skipWindowsNopywin32 = ("On windows, spawnProcess is not available " "in the absence of win32process.") properEnv = dict(os.environ) properEnv["PYTHONPATH"] = os.pathsep.join(sys.path) else: properEnv = bytesEnviron() properEnv[b"PYTHONPATH"] = os.pathsep.join(sys.path).encode( sys.getfilesystemencoding()) class StandardIOTestProcessProtocol(protocol.ProcessProtocol): """ Test helper for collecting output from a child process and notifying something when it exits. @ivar onConnection: A L{defer.Deferred} which will be called back with C{None} when the connection to the child process is established. @ivar onCompletion: A L{defer.Deferred} which will be errbacked with the failure associated with the child process exiting when it exits. @ivar onDataReceived: A L{defer.Deferred} which will be called back with this instance whenever C{childDataReceived} is called, or C{None} to suppress these callbacks. @ivar data: A C{dict} mapping file descriptors to strings containing all bytes received from the child process on each file descriptor. """ onDataReceived = None def __init__(self): self.onConnection = defer.Deferred() self.onCompletion = defer.Deferred() self.data = {} def connectionMade(self): self.onConnection.callback(None) def childDataReceived(self, name, bytes): """ Record all bytes received from the child process in the C{data} dictionary. Fire C{onDataReceived} if it is not C{None}. """ self.data[name] = self.data.get(name, b'') + bytes if self.onDataReceived is not None: d, self.onDataReceived = self.onDataReceived, None d.callback(self) def processEnded(self, reason): self.onCompletion.callback(reason) class StandardInputOutputTests(unittest.TestCase): skip = skipWindowsNopywin32 def _spawnProcess(self, proto, sibling, *args, **kw): """ Launch a child Python process and communicate with it using the given ProcessProtocol. @param proto: A L{ProcessProtocol} instance which will be connected to the child process. @param sibling: The basename of a file containing the Python program to run in the child process. @param *args: strings which will be passed to the child process on the command line as C{argv[2:]}. @param **kw: additional arguments to pass to L{reactor.spawnProcess}. @return: The L{IProcessTransport} provider for the spawned process. """ args = [sys.executable, b"-m", b"twisted.test." + sibling, reactor.__class__.__module__] + list(args)<|fim▁hole|> sys.executable, args, env=properEnv, **kw) def _requireFailure(self, d, callback): def cb(result): self.fail("Process terminated with non-Failure: %r" % (result,)) def eb(err): return callback(err) return d.addCallbacks(cb, eb) def test_loseConnection(self): """ Verify that a protocol connected to L{StandardIO} can disconnect itself using C{transport.loseConnection}. """ errorLogFile = self.mktemp() log.msg("Child process logging to " + errorLogFile) p = StandardIOTestProcessProtocol() d = p.onCompletion self._spawnProcess(p, b'stdio_test_loseconn', errorLogFile) def processEnded(reason): # Copy the child's log to ours so it's more visible. with open(errorLogFile, 'r') as f: for line in f: log.msg("Child logged: " + line.rstrip()) self.failIfIn(1, p.data) reason.trap(error.ProcessDone) return self._requireFailure(d, processEnded) def test_readConnectionLost(self): """ When stdin is closed and the protocol connected to it implements L{IHalfCloseableProtocol}, the protocol's C{readConnectionLost} method is called. """ errorLogFile = self.mktemp() log.msg("Child process logging to " + errorLogFile) p = StandardIOTestProcessProtocol() p.onDataReceived = defer.Deferred() def cbBytes(ignored): d = p.onCompletion p.transport.closeStdin() return d p.onDataReceived.addCallback(cbBytes) def processEnded(reason): reason.trap(error.ProcessDone) d = self._requireFailure(p.onDataReceived, processEnded) self._spawnProcess( p, b'stdio_test_halfclose', errorLogFile) return d def test_lastWriteReceived(self): """ Verify that a write made directly to stdout using L{os.write} after StandardIO has finished is reliably received by the process reading that stdout. """ p = StandardIOTestProcessProtocol() # Note: the OS X bug which prompted the addition of this test # is an apparent race condition involving non-blocking PTYs. # Delaying the parent process significantly increases the # likelihood of the race going the wrong way. If you need to # fiddle with this code at all, uncommenting the next line # will likely make your life much easier. It is commented out # because it makes the test quite slow. # p.onConnection.addCallback(lambda ign: __import__('time').sleep(5)) try: self._spawnProcess( p, b'stdio_test_lastwrite', UNIQUE_LAST_WRITE_STRING, usePTY=True) except ValueError as e: # Some platforms don't work with usePTY=True raise unittest.SkipTest(str(e)) def processEnded(reason): """ Asserts that the parent received the bytes written by the child immediately after the child starts. """ self.assertTrue( p.data[1].endswith(UNIQUE_LAST_WRITE_STRING), "Received %r from child, did not find expected bytes." % ( p.data,)) reason.trap(error.ProcessDone) return self._requireFailure(p.onCompletion, processEnded) def test_hostAndPeer(self): """ Verify that the transport of a protocol connected to L{StandardIO} has C{getHost} and C{getPeer} methods. """ p = StandardIOTestProcessProtocol() d = p.onCompletion self._spawnProcess(p, b'stdio_test_hostpeer') def processEnded(reason): host, peer = p.data[1].splitlines() self.assertTrue(host) self.assertTrue(peer) reason.trap(error.ProcessDone) return self._requireFailure(d, processEnded) def test_write(self): """ Verify that the C{write} method of the transport of a protocol connected to L{StandardIO} sends bytes to standard out. """ p = StandardIOTestProcessProtocol() d = p.onCompletion self._spawnProcess(p, b'stdio_test_write') def processEnded(reason): self.assertEqual(p.data[1], b'ok!') reason.trap(error.ProcessDone) return self._requireFailure(d, processEnded) def test_writeSequence(self): """ Verify that the C{writeSequence} method of the transport of a protocol connected to L{StandardIO} sends bytes to standard out. """ p = StandardIOTestProcessProtocol() d = p.onCompletion self._spawnProcess(p, b'stdio_test_writeseq') def processEnded(reason): self.assertEqual(p.data[1], b'ok!') reason.trap(error.ProcessDone) return self._requireFailure(d, processEnded) def _junkPath(self): junkPath = self.mktemp() with open(junkPath, 'wb') as junkFile: for i in xrange(1024): junkFile.write(intToBytes(i) + b'\n') return junkPath def test_producer(self): """ Verify that the transport of a protocol connected to L{StandardIO} is a working L{IProducer} provider. """ p = StandardIOTestProcessProtocol() d = p.onCompletion written = [] toWrite = list(range(100)) def connectionMade(ign): if toWrite: written.append(intToBytes(toWrite.pop()) + b"\n") proc.write(written[-1]) reactor.callLater(0.01, connectionMade, None) proc = self._spawnProcess(p, b'stdio_test_producer') p.onConnection.addCallback(connectionMade) def processEnded(reason): self.assertEqual(p.data[1], b''.join(written)) self.assertFalse( toWrite, "Connection lost with %d writes left to go." % (len(toWrite),)) reason.trap(error.ProcessDone) return self._requireFailure(d, processEnded) def test_consumer(self): """ Verify that the transport of a protocol connected to L{StandardIO} is a working L{IConsumer} provider. """ p = StandardIOTestProcessProtocol() d = p.onCompletion junkPath = self._junkPath() self._spawnProcess(p, b'stdio_test_consumer', junkPath) def processEnded(reason): with open(junkPath, 'rb') as f: self.assertEqual(p.data[1], f.read()) reason.trap(error.ProcessDone) return self._requireFailure(d, processEnded) def test_normalFileStandardOut(self): """ If L{StandardIO} is created with a file descriptor which refers to a normal file (ie, a file from the filesystem), L{StandardIO.write} writes bytes to that file. In particular, it does not immediately consider the file closed or call its protocol's C{connectionLost} method. """ onConnLost = defer.Deferred() proto = ConnectionLostNotifyingProtocol(onConnLost) path = filepath.FilePath(self.mktemp()) self.normal = normal = path.open('wb') self.addCleanup(normal.close) kwargs = dict(stdout=normal.fileno()) if not platform.isWindows(): # Make a fake stdin so that StandardIO doesn't mess with the *real* # stdin. r, w = os.pipe() self.addCleanup(os.close, r) self.addCleanup(os.close, w) kwargs['stdin'] = r connection = stdio.StandardIO(proto, **kwargs) # The reactor needs to spin a bit before it might have incorrectly # decided stdout is closed. Use this counter to keep track of how # much we've let it spin. If it closes before we expected, this # counter will have a value that's too small and we'll know. howMany = 5 count = itertools.count() def spin(): for value in count: if value == howMany: connection.loseConnection() return connection.write(intToBytes(value)) break reactor.callLater(0, spin) reactor.callLater(0, spin) # Once the connection is lost, make sure the counter is at the # appropriate value. def cbLost(reason): self.assertEqual(next(count), howMany + 1) self.assertEqual( path.getContent(), b''.join(map(intToBytes, range(howMany)))) onConnLost.addCallback(cbLost) return onConnLost if platform.isWindows(): test_normalFileStandardOut.skip = ( "StandardIO does not accept stdout as an argument to Windows. " "Testing redirection to a file is therefore harder.")<|fim▁end|>
return reactor.spawnProcess( proto,
<|file_name|>rename_hist_items.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python2.7 from bioblend.galaxy import GalaxyInstance import requests import datetime import argparse requests.packages.urllib3.disable_warnings() def parse_args(): args = argparse.ArgumentParser(description="Rename history items using a tabular file." +"\n" + "Example usage: python rename_hist_items.py -url misssissippi.snv.jussieu.fr \ -key $your_api_key -hid $your_history_id -table $your_tabular_file \n \<|fim▁hole|> args.add_argument("-key", "--api_key", required=True, help="api key for galaxy instance" ) args.add_argument("-hid", "--history_id", required=True, help="History id of hitory containing files to be renamed") args.add_argument("-table", "--rename_table", required=True, type=file, help="tab-seperated file with first column current filename,\ and second column the desired name") return args.parse_args() def return_datetime(string_representation): """ returns current time, to find last modified history. Currently ununsed, may be used in the future. """ date, time = string_representation.split('T') return datetime.datetime.strptime(date + ' ' + time, "%Y-%m-%d %H:%M:%S.%f") def get_rename_list(rename_table): return [(line.split('\t')[0],line.split('\t')[1].strip()) for line in rename_table] def get_instance(url, api_key): return GalaxyInstance(url, api_key) def get_name_id_d(gi, hid): return {dataset[u'name']:dataset[u'id'] for dataset in gi.histories.show_history(hid, contents=True)} def update_names(gi, hid, rename_list, name_id_d ): for old_name, new_name in rename_list: dataset_id = name_id_d[old_name] gi.histories.update_dataset(history_id=hid, dataset_id=dataset_id, name=new_name) def main(): args = parse_args() hid = args.history_id rename_list = get_rename_list(args.rename_table) gi = get_instance(args.galaxy_url, args.api_key) name_id_d = get_name_id_d(gi, hid) rval = update_names(gi, hid, rename_list, name_id_d) if __name__ == "__main__": main()<|fim▁end|>
See test-data/sample_table.tab for an example file.") args.add_argument("-url", "--galaxy_url", required=True, help="url of galaxy instance")
<|file_name|>types.rs<|end_file_name|><|fim▁begin|>use std::collections::{HashMap, HashSet}; use std::cell::RefCell; #[derive(Deserialize, Debug, Serialize, PartialEq)] pub struct ClaimDefinition { #[serde(rename = "ref")] pub schema_seq_no: i32, #[serde(rename = "origin")] pub issuer_did: String, pub signature_type: String, pub data: ClaimDefinitionData } #[derive(Deserialize, Debug, Serialize, PartialEq)] pub struct ClaimDefinitionData { #[serde(rename = "primary")] pub public_key: PublicKey, #[serde(rename = "revocation")] pub public_key_revocation: Option<String>, } #[derive(Debug, Deserialize, PartialEq, Serialize, Eq)] pub struct PublicKey { pub n: String, pub s: String, pub rms: String, pub r: HashMap<String, String>, pub rctxt: String, pub z: String } #[derive(Deserialize, Eq, PartialEq, Debug)] #[serde(rename_all = "camelCase")] pub struct Response { pub op: String, pub reason: String, pub req_id: u64, pub identifier: String } #[derive(Deserialize, Eq, PartialEq, Debug)] pub struct Reply<T> { pub op: String, pub result: T, } #[derive(Deserialize, Eq, PartialEq, Debug)] #[serde(rename_all = "camelCase")] pub struct GetNymReplyResult { pub identifier: String, pub req_id: u64, #[serde(rename = "type")] pub _type: String, pub data: Option<String>, pub dest: String } #[derive(Deserialize, Eq, PartialEq, Debug)] #[serde(rename_all = "camelCase")] pub struct GetNymResultData { pub identifier: String, pub dest: String, pub role: Option<String>,<|fim▁hole|> #[derive(Deserialize, Eq, PartialEq, Debug)] #[serde(rename_all = "camelCase")] pub struct GetAttribReplyResult { pub identifier: String, pub req_id: u64, #[serde(rename = "type")] pub _type: String, pub data: Option<String>, pub dest: String, pub raw: String, pub seq_no: Option<i32> } #[derive(Deserialize, Serialize, Eq, PartialEq, Debug)] #[serde(rename_all = "camelCase")] pub struct GetSchemaReplyResult { pub identifier: String, pub req_id: u64, pub seq_no: Option<i32>, //For tests/ In normal case seq_no exists #[serde(rename = "type")] pub _type: String, pub data: Option<GetSchemaResultData>, pub dest: Option<String> } #[derive(Deserialize, Serialize, Debug, PartialEq, Eq, Clone)] pub struct GetSchemaResultData { pub keys: HashSet<String>, pub name: String, pub origin: String, pub version: String } #[derive(Deserialize, PartialEq, Debug)] pub struct GetClaimDefReplyResult { pub identifier: String, #[serde(rename = "reqId")] pub req_id: u64, #[serde(rename = "seqNo")] pub seq_no: i32, #[serde(rename = "type")] pub _type: String, pub data: ClaimDefinitionData, pub origin: String, pub signature_type: String, #[serde(rename = "ref")] pub _ref: i32 } #[derive(Debug, Serialize, Deserialize)] pub struct GetTxnResult { pub identifier: String, #[serde(rename = "reqId")] pub req_id: u64, #[serde(rename = "seqNo")] pub seq_no: Option<i32>, #[serde(rename = "type")] pub _type: String, pub data: String } #[derive(Debug, Serialize, Deserialize)] pub struct SchemaResult { pub identifier: String, #[serde(rename = "reqId")] pub req_id: u64, #[serde(rename = "seqNo")] pub seq_no: i32, #[serde(rename = "type")] pub _type: String, pub data: Option<String> } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct Schema { #[serde(rename = "seqNo")] pub seq_no: i32, pub data: SchemaData } #[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)] pub struct SchemaData { pub name: String, pub version: String, pub keys: HashSet<String> } #[derive(Debug, Deserialize, Serialize, Clone, PartialEq, Eq)] pub struct ClaimOffer { pub issuer_did: String, pub schema_seq_no: i32 } #[derive(Debug, Deserialize, Serialize)] pub struct ProofClaimsJson { pub attrs: HashMap<String, Vec<ClaimInfo>>, pub predicates: HashMap<String, Vec<ClaimInfo>> } #[derive(Debug, Deserialize, Serialize)] pub struct ProofRequestJson { pub nonce: String, pub name: String, pub version: String, pub requested_attrs: HashMap<String, AttributeInfo>, pub requested_predicates: HashMap<String, Predicate> } #[derive(Clone, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)] pub struct Predicate { pub attr_name: String, pub p_type: String, pub value: i32, pub schema_seq_no: Option<i32>, pub issuer_did: Option<String> } #[derive(Clone, Debug, Deserialize, Serialize)] pub struct AttributeInfo { pub name: String, pub schema_seq_no: Option<i32>, pub issuer_did: Option<String> } #[derive(Debug, Serialize, Deserialize, Clone, Eq, PartialEq, Hash)] pub struct ClaimInfo { pub claim_uuid: String, pub issuer_did: String, pub revoc_reg_seq_no: Option<i32>, pub schema_seq_no: i32 } #[derive(Debug, Deserialize, Serialize)] pub struct ClaimRequestJson { pub blinded_ms: ClaimRequest, pub issuer_did: String, pub schema_seq_no: i32 } #[derive(Debug, Serialize, Deserialize)] pub struct ClaimRequest { pub prover_did: String, pub u: String, pub ur: Option<String> } #[derive(Debug, Deserialize, Serialize)] pub struct ClaimJson { pub claim: HashMap<String, Vec<String>>, pub revoc_reg_seq_no: Option<i32>, pub schema_seq_no: Option<i32>, pub signature: ClaimSignature, pub issuer_did: Option<String> } #[derive(Debug, Deserialize, Serialize)] pub struct ClaimSignature { pub primary_claim: PrimaryClaim, pub non_revocation_claim: Option<RefCell<String>> } #[derive(Debug, Deserialize, Serialize)] pub struct PrimaryClaim { pub m2: String, pub a: String, pub e: String, pub v: String } #[derive(Debug, Serialize, Deserialize)] pub struct ProofJson { pub proofs: HashMap<String, ClaimProof>, pub aggregated_proof: AggregatedProof, pub requested_proof: RequestedProofJson } #[derive(Debug, Serialize, Deserialize)] pub struct Proof { pub primary_proof: PrimaryProof, pub non_revoc_proof: Option<NonRevocProof> } #[derive(Clone, Debug, Serialize, Deserialize)] pub struct NonRevocProof { pub x_list: NonRevocProofXList, pub c_list: NonRevocProofCList } #[derive(Clone, Debug, Deserialize, Serialize)] pub struct NonRevocProofCList { pub e: String, pub d: String, pub a: String, pub g: String, pub w: String, pub s: String, pub u: String } #[derive(Clone, Debug, Deserialize, Serialize)] pub struct NonRevocProofXList { pub rho: String, pub r: String, pub r_prime: String, pub r_prime_prime: String, pub r_prime_prime_prime: String, pub o: String, pub o_prime: String, pub m: String, pub m_prime: String, pub t: String, pub t_prime: String, pub m2: String, pub s: String, pub c: String } #[derive(Debug, Serialize, Deserialize)] pub struct PrimaryProof { pub eq_proof: PrimaryEqualProof, pub ge_proofs: Vec<PrimaryPredicateGEProof> } #[derive(Debug, Serialize, Deserialize)] pub struct PrimaryPredicateGEProof { pub u: HashMap<String, String>, pub r: HashMap<String, String>, pub mj: String, pub alpha: String, pub t: HashMap<String, String>, pub predicate: Predicate } #[derive(Debug, Serialize, Deserialize)] pub struct PrimaryEqualProof { pub revealed_attrs: HashMap<String, String>, pub a_prime: String, pub e: String, pub v: String, pub m: HashMap<String, String>, pub m1: String, pub m2: String } #[derive(Debug, Serialize, Deserialize)] pub struct ClaimProof { pub proof: Proof, pub revoc_reg_seq_no: Option<i32>, pub schema_seq_no: i32, pub issuer_did: String } #[derive(Debug, Serialize, Deserialize)] pub struct AggregatedProof { pub c_hash: String, pub c_list: Vec<Vec<u8>> } #[derive(Debug, Serialize, Deserialize)] pub struct RequestedProofJson { pub revealed_attrs: HashMap<String, (String, String, String)>, pub unrevealed_attrs: HashMap<String, String>, pub self_attested_attrs: HashMap<String, String>, pub predicates: HashMap<String, String> }<|fim▁end|>
pub verkey: Option<String> }
<|file_name|>0001_initial.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from south.utils import datetime_utils as datetime from south.db import db from south.v2 import SchemaMigration from django.db import models class Migration(SchemaMigration): def forwards(self, orm): # Adding model 'LaunchWindow' db.create_table(u'launch_window_launchwindow', ( (u'id', self.gf('django.db.models.fields.AutoField')(primary_key=True)), ('name', self.gf('django.db.models.fields.CharField')(max_length=255)), ('description', self.gf('django.db.models.fields.TextField')()), ('cron_format', self.gf('django.db.models.fields.CharField')(max_length=255, null=True, blank=True)), )) db.send_create_signal(u'launch_window', ['LaunchWindow']) def backwards(self, orm): # Deleting model 'LaunchWindow'<|fim▁hole|> u'launch_window.launchwindow': { 'Meta': {'object_name': 'LaunchWindow'}, 'cron_format': ('django.db.models.fields.CharField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}), 'description': ('django.db.models.fields.TextField', [], {}), u'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), 'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}) } } complete_apps = ['launch_window']<|fim▁end|>
db.delete_table(u'launch_window_launchwindow') models = {
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main // list all of the multipels of m below max and returns them on the ret channel func listMultiples(m, max int, ret chan int) { if m == 0 { return } count := 0 tmp := 0 for { tmp = count * m if tmp >= max { break } count++ ret <- tmp } close(ret) } <|fim▁hole|>// range over each channel until they have been closed. and return the sum of all values returned func collect(c []chan int) int { r := make(chan int) intSet := make(map[int]struct{}) // fan in the recieved values for i := range c { go func(f chan int) { tmp := 0 for tmp = range f { r <- tmp } r <- -1 }(c[i]) } i := 0 tmp := 0 // sum values recieved on r until the done signal (-1) has been recieved by all for i < len(c) { tmp = <-r if tmp == -1 { i++ } else { intSet[tmp] = struct{}{} } } // add up all of the keys in the set sum := 0 for k, _ := range intSet { sum += k } return sum } func main() { r := []chan int{ make(chan int), make(chan int), } go listMultiples(3, 1000, r[0]) go listMultiples(5, 1000, r[1]) print(collect(r)) }<|fim▁end|>
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup, find_packages from dist_job_mgr.version import VERSION <|fim▁hole|> name='dist_job_mgr', version=VERSION, author='genForma Corp', author_email='[email protected]', url='', packages=find_packages(), include_package_data=True, zip_safe=False, entry_points = { 'console_scripts': [ 'djmctl = dist_job_mgr.djmctl:main', 'djm-worker = dist_job_mgr.worker_main:main' ]}, install_requires=['lockfile>=0.9',], # 'python-daemon'], license='Apache V2.0', description='Distributed Job Manager', long_description="description" )<|fim▁end|>
setup(
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>import learn import inject import project from build_surrogate import build_surrogate <|fim▁hole|><|fim▁end|>
from ActiveSubspace import ActiveSubspace
<|file_name|>search_notify.py<|end_file_name|><|fim▁begin|># # Copyright (c) 2008--2015 Red Hat, Inc. # # This software is licensed to you under the GNU General Public License, # version 2 (GPLv2). There is NO WARRANTY for this software, express or # implied, including the implied warranties of MERCHANTABILITY or FITNESS # FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2 # along with this software; if not, see # http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt. # # Red Hat trademarks are not licensed under GPLv2. No permission is # granted to use or replicate Red Hat trademarks that are incorporated # in this software or its documentation. # # Sends notification to search-server that it should update server index # <|fim▁hole|> class SearchNotify: def __init__(self, host="127.0.0.1", port="2828"): self.addr = "http://%s:%s" % (host, port) def notify(self, indexName="server"): try: client = xmlrpclib.ServerProxy(self.addr) result = client.admin.updateIndex(indexName) except Exception, e: log_error("Failed to notify search service located at %s to update %s indexes" % (self.addr, indexName), e) return False return result if __name__ == "__main__": search = SearchNotify() result = search.notify() print "search.notify() = %s" % (result)<|fim▁end|>
import xmlrpclib from spacewalk.common.rhnLog import log_error
<|file_name|>transitions-spec.js<|end_file_name|><|fim▁begin|>import { getTransitionNames, getAvailableTransitionNames, getTransitionStylesName } from 'frontend/transitions'; describe('getTransitionNames', () => { it('returns array of names', () => { const result = getTransitionNames(); expect(result).toContain('scroll'); expect(result).toContain('fade'); }); }); describe('getAvailableTransitions', () => { it('offers fade transitions if section and previous section both have full height', () => { const section = {fullHeight: true}; const previousSection = {fullHeight: true}; const result = getAvailableTransitionNames(section, previousSection); expect(result).toContain('fade'); expect(result).toContain('fadeBg'); }); it('does not offer fade transitions if section does not have full height', () => { const section = {}; const previousSection = {fullHeight: true}; const result = getAvailableTransitionNames(section, previousSection); expect(result).toContain('scroll'); expect(result).not.toContain('fade'); expect(result).not.toContain('fadeBg'); }); it('does not offer fade transitions if previous section does not have full height', () => { const section = {fullHeight: true}; const previousSection = {}; const result = getAvailableTransitionNames(section, previousSection); expect(result).toContain('scroll'); expect(result).not.toContain('fade'); expect(result).not.toContain('fadeBg'); }); }); describe('getTransitionStylesName', () => { it('uses fadeIn if both section and previous section have fullHeight', () => { const previousSection = {fullHeight: true, transition: 'scroll'}; const section = {fullHeight: true, transition: 'fade'}; const nextSection = {transition: 'scroll'}; const result = getTransitionStylesName(section, previousSection, nextSection); expect(result).toBe('fadeInScrollOut'); }); it('falls back to scrollIn if previous section does not have fullHeight', () => { const previousSection = {transition: 'scroll'}; const section = {fullHeight: true, transition: 'fade'}; const nextSection = {fullHeight: true, transition: 'scroll'}; const result = getTransitionStylesName(section, previousSection, nextSection); expect(result).toBe('scrollInScrollOut'); }); it('falls back to scrollIn if section does not have fullHeight', () => { const previousSection = {fullHeight: true, transition: 'scroll'}; const section = {transition: 'fade'}; const nextSection = {fullHeight: true, transition: 'scroll'}; const result = getTransitionStylesName(section, previousSection, nextSection); expect(result).toBe('scrollInScrollOut'); }); it('falls back to scrollIn if previous is missing', () => { const section = {transition: 'fade'}; const nextSection = {fullHeight: true, transition: 'scroll'}; const result = getTransitionStylesName(section, null, nextSection); expect(result).toBe('scrollInScrollOut'); }); it('uses fadeOut if both section and next section have fullHeight', () => { const previousSection = {transition: 'scroll'}; const section = {fullHeight: true, transition: 'reveal'}; const nextSection = {fullHeight: true, transition: 'fade'}; const result = getTransitionStylesName(section, previousSection, nextSection); expect(result).toBe('revealFadeOut');<|fim▁hole|> it('falls back to scrollOut if next section does not have fullHeight', () => { const previousSection = {transition: 'scroll'}; const section = {fullHeight: true, transition: 'reveal'}; const nextSection = {transition: 'fade'}; const result = getTransitionStylesName(section, previousSection, nextSection); expect(result).toBe('revealScrollOut'); }); it('falls back to scrollOut if section does not have fullHeight', () => { const previousSection = {transition: 'scroll'}; const section = {transition: 'reveal'}; const nextSection = {fullHeight: true, transition: 'fade'}; const result = getTransitionStylesName(section, previousSection, nextSection); expect(result).toBe('revealScrollOut'); }); it('falls back to scrollOut if next section is missing', () => { const previousSection = {transition: 'scroll'}; const section = {transition: 'reveal'}; const result = getTransitionStylesName(section, previousSection, null); expect(result).toBe('revealScrollOut'); }); });<|fim▁end|>
});
<|file_name|>02ccb3e6a553_.py<|end_file_name|><|fim▁begin|>"""empty message Revision ID: 02ccb3e6a553 Revises: None Create Date: 2016-05-17 22:15:03.881575 """ # revision identifiers, used by Alembic. revision = '02ccb3e6a553' down_revision = None from alembic import op import sqlalchemy as sa def upgrade(): ### commands auto generated by Alembic - please adjust! ### op.create_table('roles', sa.Column('id', sa.Integer(), nullable=False), sa.Column('name', sa.String(length=64), nullable=True), sa.Column('default', sa.Boolean(), nullable=True), sa.Column('permissions', sa.Integer(), nullable=True), sa.PrimaryKeyConstraint('id'), sa.UniqueConstraint('name') ) op.create_index(op.f('ix_roles_default'), 'roles', ['default'], unique=False) op.create_table('users', sa.Column('id', sa.Integer(), nullable=False), sa.Column('email', sa.String(length=64), nullable=True), sa.Column('username', sa.String(length=64), nullable=True), sa.Column('role_id', sa.Integer(), nullable=True), sa.Column('password_hash', sa.String(length=128), nullable=True), sa.Column('confirmed', sa.Boolean(), nullable=True), sa.Column('name', sa.String(length=64), nullable=True), sa.Column('location', sa.String(length=64), nullable=True), sa.Column('about_me', sa.Text(), nullable=True), sa.Column('member_since', sa.DateTime(), nullable=True), sa.Column('last_seen', sa.DateTime(), nullable=True), sa.ForeignKeyConstraint(['role_id'], ['roles.id'], ), sa.PrimaryKeyConstraint('id') ) op.create_index(op.f('ix_users_email'), 'users', ['email'], unique=True) op.create_index(op.f('ix_users_username'), 'users', ['username'], unique=True) ### end Alembic commands ###<|fim▁hole|> def downgrade(): ### commands auto generated by Alembic - please adjust! ### op.drop_index(op.f('ix_users_username'), table_name='users') op.drop_index(op.f('ix_users_email'), table_name='users') op.drop_table('users') op.drop_index(op.f('ix_roles_default'), table_name='roles') op.drop_table('roles') ### end Alembic commands ###<|fim▁end|>
<|file_name|>flatspice.py<|end_file_name|><|fim▁begin|># flatspice.py #----------------------------------------------------------- # Python script which writes a SPICE-format netlist. # Replaces the code formerly in "netlist.c" (deprecated). # Python scripting is now the preferred method for handling # netlist output formats. #----------------------------------------------------------- # Select the device string corresponding to the given prefix. # Return the body of the string, or an empty string if the prefix doesn't match. def select(sstr, prefix): ltext = '' if sstr.startswith(prefix): ltext += sstr[len(prefix) + 1:] return ltext # Generate an ASCII string from an xcircuit string (list) def textprint(slist, params): ltext = '' is_symbol = 0 is_iso = 0<|fim▁hole|> if x == 'Return': ltext += '\n' elif x == 'Underline': ltext += '_' elif x == 'Overline': ltext += '!' else: # is a dictionary; will have only one key if f == 'Font': lfont = x[x.keys()[0]] if lfont.startswith('Symbol'): is_symbol = 1 else: is_symbol = 0 if lfont.endswith('ISO'): is_iso = 1 else: is_iso = 0 elif f == 'Parameter': ltext += textprint(params[x[x.keys()[0]]], []) else: # text: SPICE translates "mu" to "u" for y in x[x.keys()[0]]: if is_symbol: if y == 'f': ltext += 'phi' elif y == 'm': ltext += 'u' else: ltext += y else: if ord(y) == 181: ltext += 'u' elif ord(y) > 127: ltext += '/' + str(ord(y)) else: ltext += y return ltext # Flatten the netlist and write to the output def recurseflat(outfile, ckt, clist): try: v = ckt['calls'] # calls to subcircuits except KeyError: # A bottom-level circuit element pass else: for y in v: for z in clist: if z['name'] == y['name']: # copy the object and substitute net names into subcircuit ports lobj = z lobj['ports'] = y['ports'] recurseflat(outfile, lobj, clist) break; try: w = ckt['devices'] except KeyError: pass else: for y in w: for u in y: lstr = select(textprint(u, []), 'spice') if lstr <> '': outfile.write('device: ' + lstr + '\n') # Top of the flattened-circuit writing routine def writespiceflat(): p=netlist() g=p['globals'] c=p['circuit'] l=len(c) top=c[l-1] topname=top['name'] topname += '.spc' try: outfile=open(topname, 'w') except IOError: return # print header line outfile.write('*SPICE flattened circuit "' + topname + '"') outfile.write(' from XCircuit v' + str(xc_version)) outfile.write(' (Python script "flatspice.py")\n') # print global variables for x in g: # 'globals' is a list of strings outfile.write('.GLOBAL ' + textprint(x, []) + '\n') outfile.write('\n') recurseflat(outfile, top, c) outfile.write('.end\n') outfile.close() # Key binding and menu button for the spice netlist output # bind('Alt_F', 'writespiceflat') newbutton('Netlist', 'Write Flattened Spice', 'writespiceflat')<|fim▁end|>
for x in slist: try: f = x.keys()[0] except AttributeError: # must be a string
<|file_name|>service.go<|end_file_name|><|fim▁begin|>package httpd import ( "crypto/tls" "expvar" "fmt" "log" "net" "net/http" "net/url" "strings" "sync" "time" "github.com/influxdata/kapacitor/services/logging" ) type Service struct { ln net.Listener addr string https bool cert string err chan error externalURL string server *http.Server mu sync.Mutex wg sync.WaitGroup new chan net.Conn active chan net.Conn idle chan net.Conn closed chan net.Conn stop chan chan struct{} shutdownTimeout time.Duration Handler *Handler logger *log.Logger } func NewService(c Config, hostname string, l *log.Logger, li logging.Interface) *Service { statMap := &expvar.Map{} statMap.Init() port, _ := c.Port() u := url.URL{ Host: fmt.Sprintf("%s:%d", hostname, port), Scheme: "http", } if c.HttpsEnabled { u.Scheme = "https" } s := &Service{ addr: c.BindAddress, https: c.HttpsEnabled, cert: c.HttpsCertificate, externalURL: u.String(), err: make(chan error, 1), shutdownTimeout: time.Duration(c.ShutdownTimeout), Handler: NewHandler( c.AuthEnabled, c.LogEnabled, c.WriteTracing, c.GZIP, statMap, l, li, c.SharedSecret, ), logger: l, } s.Handler.logger = s.logger return s } // Open starts the service func (s *Service) Open() error { s.mu.Lock() defer s.mu.Unlock() s.logger.Println("I! Starting HTTP service") s.logger.Println("I! Authentication enabled:", s.Handler.requireAuthentication) // Open listener. if s.https { cert, err := tls.LoadX509KeyPair(s.cert, s.cert) if err != nil { return err } listener, err := tls.Listen("tcp", s.addr, &tls.Config{ Certificates: []tls.Certificate{cert}, }) if err != nil { return err } s.logger.Println("I! Listening on HTTPS:", listener.Addr().String()) s.ln = listener } else { listener, err := net.Listen("tcp", s.addr) if err != nil { return err } s.logger.Println("I! Listening on HTTP:", listener.Addr().String()) s.ln = listener } // Define server s.server = &http.Server{ Handler: s.Handler, ConnState: s.connStateHandler, } s.new = make(chan net.Conn) s.active = make(chan net.Conn) s.idle = make(chan net.Conn) s.closed = make(chan net.Conn) s.stop = make(chan chan struct{}) // Begin listening for requests in a separate goroutine. go s.manage() s.wg.Add(1) go s.serve() return nil } // Close closes the underlying listener. func (s *Service) Close() error { defer s.logger.Println("I! Closed HTTP service") s.mu.Lock() defer s.mu.Unlock() // If server is not set we were never started if s.server == nil { return nil } // First turn off KeepAlives so that new connections will not become idle s.server.SetKeepAlivesEnabled(false) // Signal to manage loop we are stopping stopping := make(chan struct{}) s.stop <- stopping // Next close the listener so no new connections can be made err := s.ln.Close() if err != nil { return err } <-stopping s.wg.Wait() return nil } func (s *Service) Err() <-chan error { return s.err } func (s *Service) connStateHandler(c net.Conn, state http.ConnState) { switch state { case http.StateNew: s.new <- c case http.StateActive: s.active <- c case http.StateIdle: s.idle <- c case http.StateHijacked, http.StateClosed: s.closed <- c } } // Watch connection state and handle stop request. func (s *Service) manage() { defer func() { close(s.new) close(s.active) close(s.idle) close(s.closed) }() var stopDone chan struct{} conns := map[net.Conn]http.ConnState{} var timeout <-chan time.Time for { select { case c := <-s.new: conns[c] = http.StateNew case c := <-s.active: conns[c] = http.StateActive case c := <-s.idle: conns[c] = http.StateIdle // if we're already stopping, close it if stopDone != nil { c.Close() } case c := <-s.closed: delete(conns, c) // if we're waiting to stop and are all empty, we just closed the last // connection and we're done. if stopDone != nil && len(conns) == 0 { close(stopDone) return } case stopDone = <-s.stop: // if we're already all empty, we're already done if len(conns) == 0 { close(stopDone) return } // close current idle connections right away for c, cs := range conns { if cs == http.StateIdle { c.Close() } } timeout = time.After(s.shutdownTimeout) // continue the loop and wait for all the ConnState updates which will // eventually close(stopDone) and return from this goroutine. case <-timeout: s.logger.Println("E! shutdown timedout, forcefully closing all remaining connections") // Connections didn't close in time. // Forcefully close all connections. for c := range conns { c.Close() } } } } // serve serves the handler from the listener. func (s *Service) serve() { defer s.wg.Done() err := s.server.Serve(s.ln) // The listener was closed so exit // See https://github.com/golang/go/issues/4373 if !strings.Contains(err.Error(), "closed") { s.err <- fmt.Errorf("listener failed: addr=%s, err=%s", s.Addr(), err) } else { s.err <- nil } } func (s *Service) Addr() net.Addr { if s.ln != nil { return s.ln.Addr() } return nil } func (s *Service) URL() string { if s.ln != nil { if s.https { return "https://" + s.Addr().String() + BasePath } return "http://" + s.Addr().String() + BasePath } return "" } // URL that should resolve externally to the server HTTP endpoint. // It is possible that the URL does not resolve correctly if the hostname config setting is incorrect.<|fim▁hole|>func (s *Service) ExternalURL() string { return s.externalURL } func (s *Service) AddRoutes(routes []Route) error { return s.Handler.AddRoutes(routes) } func (s *Service) DelRoutes(routes []Route) { s.Handler.DelRoutes(routes) }<|fim▁end|>
<|file_name|>main.py<|end_file_name|><|fim▁begin|>from binsearch import BinSearch from nzbclub import NZBClub from nzbindex import NZBIndex from bs4 import BeautifulSoup from couchpotato.core.helpers.variable import getTitle, splitString, tryInt from couchpotato.core.helpers.encoding import simplifyString from couchpotato.environment import Env from couchpotato.core.logger import CPLog from couchpotato.core.helpers import namer_check from couchpotato.core.media._base.providers.nzb.base import NZBProvider log = CPLog(__name__) import re import urllib import urllib2 import traceback class Base(NZBProvider): urls = { 'download': 'http://www.binnews.in/', 'detail': 'http://www.binnews.in/', 'search': 'http://www.binnews.in/_bin/search2.php', } http_time_between_calls = 4 # Seconds cat_backup_id = None def _search(self, movie, quality, results): nzbDownloaders = [NZBClub(), BinSearch(), NZBIndex()] MovieTitles = movie['info']['titles'] moviequality = simplifyString(quality['identifier']) movieyear = movie['info']['year'] if quality['custom']['3d']==1: threeD= True else: threeD=False if moviequality in ("720p","1080p","bd50"): cat1='39' cat2='49' minSize = 2000 elif moviequality in ("dvdr"): cat1='23' cat2='48' minSize = 3000 else: cat1='6' cat2='27' minSize = 500 for MovieTitle in MovieTitles: try: TitleStringReal = str(MovieTitle.encode("latin-1").replace('-',' ')) except: continue if threeD: TitleStringReal = TitleStringReal + ' 3d' data = 'chkInit=1&edTitre='+TitleStringReal+'&chkTitre=on&chkFichier=on&chkCat=on&cats%5B%5D='+cat1+'&cats%5B%5D='+cat2+'&edAge=&edYear=' try: soup = BeautifulSoup( urllib2.urlopen(self.urls['search'], data) ) except Exception, e: log.error(u"Error trying to load BinNewz response: "+e) return [] tables = soup.findAll("table", id="tabliste") for table in tables: rows = table.findAll("tr") for row in rows: cells = row.select("> td") if (len(cells) < 11): continue name = cells[2].text.strip() testname=namer_check.correctName(name,movie) if testname==0: continue language = cells[3].find("img").get("src") if not "_fr" in language and not "_frq" in language: continue detectedlang='' if "_fr" in language: detectedlang=' truefrench ' else: detectedlang=' french ' # blacklist_groups = [ "alt.binaries.multimedia" ] blacklist_groups = [] newgroupLink = cells[4].find("a") newsgroup = None if newgroupLink.contents: newsgroup = newgroupLink.contents[0] if newsgroup == "abmulti": newsgroup = "alt.binaries.multimedia" elif newsgroup == "ab.moovee": newsgroup = "alt.binaries.moovee" elif newsgroup == "abtvseries": newsgroup = "alt.binaries.tvseries"<|fim▁hole|> newsgroup = "alt.binaries.tv" elif newsgroup == "a.b.teevee": newsgroup = "alt.binaries.teevee" elif newsgroup == "abstvdivxf": newsgroup = "alt.binaries.series.tv.divx.french" elif newsgroup == "abhdtvx264fr": newsgroup = "alt.binaries.hdtv.x264.french" elif newsgroup == "abmom": newsgroup = "alt.binaries.mom" elif newsgroup == "abhdtv": newsgroup = "alt.binaries.hdtv" elif newsgroup == "abboneless": newsgroup = "alt.binaries.boneless" elif newsgroup == "abhdtvf": newsgroup = "alt.binaries.hdtv.french" elif newsgroup == "abhdtvx264": newsgroup = "alt.binaries.hdtv.x264" elif newsgroup == "absuperman": newsgroup = "alt.binaries.superman" elif newsgroup == "abechangeweb": newsgroup = "alt.binaries.echange-web" elif newsgroup == "abmdfvost": newsgroup = "alt.binaries.movies.divx.french.vost" elif newsgroup == "abdvdr": newsgroup = "alt.binaries.dvdr" elif newsgroup == "abmzeromov": newsgroup = "alt.binaries.movies.zeromovies" elif newsgroup == "abcfaf": newsgroup = "alt.binaries.cartoons.french.animes-fansub" elif newsgroup == "abcfrench": newsgroup = "alt.binaries.cartoons.french" elif newsgroup == "abgougouland": newsgroup = "alt.binaries.gougouland" elif newsgroup == "abroger": newsgroup = "alt.binaries.roger" elif newsgroup == "abtatu": newsgroup = "alt.binaries.tatu" elif newsgroup =="abstvf": newsgroup = "alt.binaries.series.tv.french" elif newsgroup =="abmdfreposts": newsgroup="alt.binaries.movies.divx.french.reposts" elif newsgroup =="abmdf": newsgroup="alt.binaries.movies.french" elif newsgroup =="abhdtvfrepost": newsgroup="alt.binaries.hdtv.french.repost" elif newsgroup == "abmmkv": newsgroup = "alt.binaries.movies.mkv" elif newsgroup == "abf-tv": newsgroup = "alt.binaries.french-tv" elif newsgroup == "abmdfo": newsgroup = "alt.binaries.movies.divx.french.old" elif newsgroup == "abmf": newsgroup = "alt.binaries.movies.french" elif newsgroup == "ab.movies": newsgroup = "alt.binaries.movies" elif newsgroup == "a.b.french": newsgroup = "alt.binaries.french" elif newsgroup == "a.b.3d": newsgroup = "alt.binaries.3d" elif newsgroup == "ab.dvdrip": newsgroup = "alt.binaries.dvdrip" elif newsgroup == "ab.welovelori": newsgroup = "alt.binaries.welovelori" elif newsgroup == "abblu-ray": newsgroup = "alt.binaries.blu-ray" elif newsgroup == "ab.bloaf": newsgroup = "alt.binaries.bloaf" elif newsgroup == "ab.hdtv.german": newsgroup = "alt.binaries.hdtv.german" elif newsgroup == "abmd": newsgroup = "alt.binaries.movies.divx" elif newsgroup == "ab.ath": newsgroup = "alt.binaries.ath" elif newsgroup == "a.b.town": newsgroup = "alt.binaries.town" elif newsgroup == "a.b.u-4all": newsgroup = "alt.binaries.u-4all" elif newsgroup == "ab.amazing": newsgroup = "alt.binaries.amazing" elif newsgroup == "ab.astronomy": newsgroup = "alt.binaries.astronomy" elif newsgroup == "ab.nospam.cheer": newsgroup = "alt.binaries.nospam.cheerleaders" elif newsgroup == "ab.worms": newsgroup = "alt.binaries.worms" elif newsgroup == "abcores": newsgroup = "alt.binaries.cores" elif newsgroup == "abdvdclassics": newsgroup = "alt.binaries.dvd.classics" elif newsgroup == "abdvdf": newsgroup = "alt.binaries.dvd.french" elif newsgroup == "abdvds": newsgroup = "alt.binaries.dvds" elif newsgroup == "abmdfrance": newsgroup = "alt.binaries.movies.divx.france" elif newsgroup == "abmisc": newsgroup = "alt.binaries.misc" elif newsgroup == "abnl": newsgroup = "alt.binaries.nl" elif newsgroup == "abx": newsgroup = "alt.binaries.x" else: log.error(u"Unknown binnewz newsgroup: " + newsgroup) continue if newsgroup in blacklist_groups: log.error(u"Ignoring result, newsgroup is blacklisted: " + newsgroup) continue filename = cells[5].contents[0] m = re.search("^(.+)\s+{(.*)}$", name) qualityStr = "" if m: name = m.group(1) qualityStr = m.group(2) m = re.search("^(.+)\s+\[(.*)\]$", name) source = None if m: name = m.group(1) source = m.group(2) m = re.search("(.+)\(([0-9]{4})\)", name) year = "" if m: name = m.group(1) year = m.group(2) if int(year) > movieyear + 1 or int(year) < movieyear - 1: continue m = re.search("(.+)\((\d{2}/\d{2}/\d{4})\)", name) dateStr = "" if m: name = m.group(1) dateStr = m.group(2) year = dateStr[-5:].strip(")").strip("/") m = re.search("(.+)\s+S(\d{2})\s+E(\d{2})(.*)", name) if m: name = m.group(1) + " S" + m.group(2) + "E" + m.group(3) + m.group(4) m = re.search("(.+)\s+S(\d{2})\s+Ep(\d{2})(.*)", name) if m: name = m.group(1) + " S" + m.group(2) + "E" + m.group(3) + m.group(4) filenameLower = filename.lower() searchItems = [] if qualityStr=="": if source in ("Blu Ray-Rip", "HD DVD-Rip"): qualityStr="brrip" elif source =="DVDRip": qualityStr="dvdrip" elif source == "TS": qualityStr ="ts" elif source == "DVDSCR": qualityStr ="scr" elif source == "CAM": qualityStr ="cam" elif moviequality == "dvdr": qualityStr ="dvdr" if year =='': year = '1900' if len(searchItems) == 0 and qualityStr == str(moviequality): searchItems.append( filename ) for searchItem in searchItems: resultno=1 for downloader in nzbDownloaders: log.info("Searching for download : " + name + ", search string = "+ searchItem + " on " + downloader.__class__.__name__) try: binsearch_result = downloader.search(searchItem, minSize, newsgroup ) if binsearch_result: new={} def extra_check(item): return True qualitytag='' if qualityStr.lower() in ['720p','1080p']: qualitytag=' hd x264 h264 ' elif qualityStr.lower() in ['dvdrip']: qualitytag=' dvd xvid ' elif qualityStr.lower() in ['brrip']: qualitytag=' hdrip ' elif qualityStr.lower() in ['ts']: qualitytag=' webrip ' elif qualityStr.lower() in ['scr']: qualitytag='' elif qualityStr.lower() in ['dvdr']: qualitytag=' pal video_ts ' new['id'] = binsearch_result.nzbid new['name'] = name + detectedlang + qualityStr + qualitytag + downloader.__class__.__name__ new['url'] = binsearch_result.nzburl new['detail_url'] = binsearch_result.refererURL new['size'] = binsearch_result.sizeInMegs new['age'] = binsearch_result.age new['extra_check'] = extra_check results.append(new) resultno=resultno+1 log.info("Found : " + searchItem + " on " + downloader.__class__.__name__) if resultno==3: break except Exception, e: log.error("Searching from " + downloader.__class__.__name__ + " failed : " + str(e) + traceback.format_exc()) def download(self, url = '', nzb_id = ''): if 'binsearch' in url: data = { 'action': 'nzb', nzb_id: 'on' } try: return self.urlopen(url, data = data, show_error = False) except: log.error('Failed getting nzb from %s: %s', (self.getName(), traceback.format_exc())) return 'try_next' else: values = { 'url' : '/' } data_tmp = urllib.urlencode(values) req = urllib2.Request(url, data_tmp ) try: #log.error('Failed downloading from %s', self.getName()) return urllib2.urlopen(req).read() except: log.error('Failed downloading from %s: %s', (self.getName(), traceback.format_exc())) return 'try_next' config = [{ 'name': 'binnewz', 'groups': [ { 'tab': 'searcher', 'list': 'nzb_providers', 'name': 'binnewz', 'description': 'Free provider, lots of french nzbs. See <a href="http://www.binnews.in/">binnewz</a>', 'wizard': True, 'icon': 'iVBORw0KGgoAAAANSUhEUgAAABAAAAAQCAYAAAAf8/9hAAAABHNCSVQICAgIfAhkiAAAAgRJREFUOI1t009rVFcYx/HPuffOTGYmMcZoEmNUkiJRSZRAC1ropuimuy6KuHHhShe+EF+CL8AX4LpQCgoiohhMMKKMqHRTtaJJ5k8nudfFnBkjzoEf5zk8PN/zO3+egFGMYX+MS9hFG604d/A/ulG7yFFkqOGgcuUuSJK32q0NPMMaNrE9RC10UxzCedX6767cqDu2MGV8YlFz62ed9iWVkYvy/IyimEUSFaKD3QwV7ENwapmlHymVU5126tNHVh9MW3s8bfXhOW8b16TpliR5otW8jm6GHiSEYOYoF076Zjx6x29/8OHfssZzNp6Ou3XzF8zicxYtZWBislfUKL4CFgIvd5mcYuowed7PjKOSGTYWwiAsij6srChmJI058Q6qyIYD9jgIIQzWxXygPtZPpUj6gGJv/V4HGoViPsLWt77bK9P7FDtg8zPr21RrX48wT3g11OcA0MG2oii8aXB4jiInK5FmSAcOGBUawwFvtFuJO7dpbLBynuM/UK0Jn0YolXtqNfn4vl/bRZ7pfcsXdrqX3f/rhgd/L+m0J8zMdZ1eKTn7U7C4zNg+yhX+ed2/syZ2AkZQ12umSRyI8wpOqdaXdTszRmocOR5Mz2bu/ZnL81/xIsTnyFCOsKpeg9ViPBo1jxMq1UVpEjS3r+K/Pe81aJQ0qhShlQiuxPxOtL+J1heOZZ0e63LUQAAAAABJRU5ErkJggg==', 'options': [ { 'name': 'enabled', 'type': 'enabler', 'default': False, }, { 'name': 'extra_score', 'advanced': True, 'label': 'Extra Score', 'type': 'int', 'default': 0, 'description': 'Starting score for each release found via this provider.', } ], }, ], }]<|fim▁end|>
elif newsgroup == "abtv":
<|file_name|>views.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from django.shortcuts import render_to_response from django.template import RequestContext from django.http import HttpResponseRedirect, Http404 from django.core.urlresolvers import reverse from StackSmash.apps.uploader.models import Document from StackSmash.apps.uploader.forms import DocumentForm def list(request): # Make sure the user is authenticated and able to modify the blog if not request.user.is_superuser: raise Http404 # Handle file upload if request.method == 'POST': form = DocumentForm(request.POST, request.FILES) if form.is_valid(): newdoc = Document(docfile=request.FILES['docfile']) newdoc.save() # Redirect to the document list after POST return HttpResponseRedirect(reverse('upload:list')) else: form = DocumentForm() # A empty, unbound form # Load documents for the list page documents = Document.objects.all() # Render list page with the documents and the form<|fim▁hole|> 'uploader/list.html', {'documents': documents, 'form': form}, context_instance=RequestContext(request) ) def delete(request, pk): # Make sure the user is authenticated and able to modify the blog if not request.user.is_superuser: raise Http404 Document.objects.filter(pk=pk).delete() return HttpResponseRedirect(reverse('list'))<|fim▁end|>
return render_to_response(
<|file_name|>comparison.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import, print_function, division import petl as etl table = [['foo', 'bar'], ['a', 1], ['b', None]] # raises exception under Python 3 etl.select(table, 'bar', lambda v: v > 0) # no error under Python 3 etl.selectgt(table, 'bar', 0)<|fim▁hole|># or ... etl.select(table, 'bar', lambda v: v > etl.Comparable(0))<|fim▁end|>
<|file_name|>2.py<|end_file_name|><|fim▁begin|>f = open('main_h.tex','w') f.write("""\documentclass[a4paper,5pt,twocolumn,titlepage]{article} \usepackage{mathpazo} \usepackage{xeCJK} \usepackage{pstricks,pst-node,pst-tree} \usepackage{titlesec} \\titleformat*{\section}{\sf} \\titleformat*{\subsection}{\sf} %\setsansfont{DejaVu Sans Mono} \setsansfont{Source Code Pro} %\setsansfont{Monaco} %\setsansfont{Liberation Mono} %\setsansfont{Luxi Mono} %\setsansfont{Ubuntu Mono}<|fim▁hole|>%\\renewcommand\cftsubsecfont{\sf} \setCJKmainfont{SimHei} \setCJKsansfont{SimHei} \setCJKmonofont{SimHei} \usepackage{graphicx} \usepackage{amsmath} \usepackage{xcolor} \usepackage{type1cm} \usepackage{booktabs} \usepackage{geometry} %\usepackage[landscape]{geometry} \geometry{left=1cm,right=1cm,top=1cm,bottom=1.5cm,headsep=0.2cm} \usepackage{courier} %\usepackage{time} %\usepackage{charter} \usepackage{fancyhdr} \usepackage{listings} \lstset{ breaklines=true, tabsize=2, %numbers=left, %numbersep=4pt, %numberstyle=\sf\scriptsize, commentstyle=\sf\scriptsize, basicstyle=\sf\scriptsize, %frame=leftline, escapeinside=``, extendedchars=false } \usepackage[CJKbookmarks=true, colorlinks, linkcolor=black, anchorcolor=black, citecolor=black]{hyperref} \AtBeginDvi{\special{pdf:tounicode UTF8-UCS2}} \usepackage{indentfirst} \setlength{\parindent}{0em} \\newcommand*{\TitleFont}{% \\fontsize{50}{80}% \\selectfont} \\usepackage{graphicx} \\title{\TitleFont{Code Library} \\begin{center} \includegraphics[scale=2]{./image1.png} \end{center} } \\author{Himemiya Nanao @ Perfect Freeze} \setmainfont{Linux Libertine O} \usepackage{tocloft} \cftsetindents{section}{0.1in}{0.2in} \cftsetindents{subsection}{.2in}{0.3in} \cftsetindents{subsubsection}{.3in}{0.45in} \\begin{document} \maketitle \\tableofcontents \\newpage \pagenumbering{arabic} """) import os import string for x,y,z in os.walk('.'): if x == '.': continue f.write('\n\section{'+string.capwords(x[2:])+'}\n') for files in z: if ((files == '.ds_store') or (files == '.DS_Store') or (files.endswith('~')) or files.endswith('.pdf')): continue ot=files if ot.endswith(""".cpp"""): ot=ot[:-4]; elif ot.endswith(""".cxx"""): ot=ot[:-4]; elif ot.endswith("""java"""): ot=ot[:-5] elif ot.endswith('tex'): f.write('\\input{\"'+x+'/'+files+'\"}\n') continue f.write('\subsection{'+ot+'}\n') fname = x+'/'+files fname = fname.lower() if files.count('.')!=0: if fname.endswith(""".java"""): lang = """Java""" else: lang = """C++""" f.write('\\lstinputlisting[language='+lang+']{\"'+fname+'\"}\n') # print files.count('.') # print files else: f.write('\\lstinputlisting{\"'+fname+'\"}\n') f.write( """ \end{document} """) f.close()<|fim▁end|>
%\setsansfont{Droid Sans Mono} \usepackage{tocloft} \\renewcommand\cftsecfont{\sf}
<|file_name|>0010_auto_20180124_1945.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # Generated by Django 1.9.10 on 2018-01-24 19:45 from __future__ import unicode_literals from django.db import migrations, models class Migration(migrations.Migration): <|fim▁hole|> ] operations = [ migrations.AlterField( model_name='page', name='template_key', field=models.CharField(choices=[(b'content/pages/page.html', 'Page'), (b'content/pages/index_page.html', 'Index Page')], default=b'content/pages/page.html', max_length=255, verbose_name='template'), ), ]<|fim▁end|>
dependencies = [ ('page', '0009_auto_20180124_0105'),
<|file_name|>main.rs<|end_file_name|><|fim▁begin|>fn factorize(n:u64) -> Vec<u64> { let mut factors:Vec<u64> = vec!(); let mut target = n; //grab factors of two while target % 2 == 0 { factors.push(2); target /= 2; } //grab odd prime factors let mut factor = 3;<|fim▁hole|> while factor * factor <= target && target > 1 { if target % factor == 0 { factors.push(factor); target /= factor; factor = 3; } else { factor += 2; } } //if anything is left, `target` is also a factor (prime, too!) if target > 1 { factors.push(target); } //done! return factors; } fn get_largest_factor(n:u64) -> u64 { let mut largest_factor = 0; for factor in factorize(n) { if factor > largest_factor { largest_factor = factor; } } return largest_factor; } fn main() { let largest_factor = get_largest_factor(600851475143); println!("{}", largest_factor); }<|fim▁end|>
<|file_name|>learn_images.py<|end_file_name|><|fim▁begin|># coding: utf-8 """ labeled_images文件夹中: 1. 包含的文件夹名为标记名 2. 标记名下的文件夹中包含了学习图片 """ import os from sklearn import svm <|fim▁hole|>from numpy import array from utils import * clf = None def get_image_fit_data(dir_name): """读取labeled_images文件夹的图片,返回图片的特征矩阵及相应标记""" X = [] Y = [] name_list = os.listdir(dir_name) for name in name_list: if not os.path.isdir(os.path.join(dir_name, name)): continue image_files = os.listdir(os.path.join(dir_name, name)) for img in image_files: i = Image.open(os.path.join(dir_name, name, img)) X.append(array(i).flatten()) Y.append(name) return X, Y def get_classifier_from_learn(): """学习数据获取分类器""" global clf if not clf: clf = svm.SVC() X, Y = get_image_fit_data("labeled_images") clf.fit(X, Y) return clf def main(): clf = get_classifier_from_learn() print(clf) PX, PY = get_image_fit_data("predict_images") for x, y in zip(PX, PY): r = clf.predict(x.reshape(1, -1)) print(r, y) if __name__ == '__main__': main()<|fim▁end|>
from PIL import Image
<|file_name|>test_extension.py<|end_file_name|><|fim▁begin|># ~*~ coding: utf-8 ~*~ """ tests.marshmallow.test_extension ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Tests for the :class:`MarshmallowAwareApp` to ensure that it will properly register the extension and can be used, as well as testing the top level schema. """ import pytest from flask_marshmallow import fields from fleaker import Schema from fleaker.marshmallow import MarshmallowAwareApp, marsh SERVER_NAME = 'localhost' def _create_app(): """Create the app for testing.""" app = MarshmallowAwareApp.create_app('tests.marshmallow') app.config['SERVER_NAME'] = SERVER_NAME @app.route('/test') def test(): """Test route for Flask URL generation.""" return b'test' return app def test_marshmallow_extension_creation(): """Ensure creating the MM Aware app registers the extension.""" app = _create_app() # now check for the proper extension assert 'flask-marshmallow' in app.extensions assert app.extensions['flask-marshmallow'] is marsh def test_marshmallow_extension_url_for():<|fim▁hole|> """Ensure that the UrlFor field with Flask-Marshmallow works.""" app = _create_app() class TestSchema(Schema): """Only has a link field""" link = fields.UrlFor('test', _external=False) ext_link = fields.UrlFor('test', _scheme='https', _external=True) schema = TestSchema() # not in an app context, should fail with pytest.raises(RuntimeError): schema.dump({}) with app.app_context(): data = schema.dump({}).data assert data['link'] == '/test' assert data['ext_link'] == 'https://{}/test'.format(SERVER_NAME)<|fim▁end|>
<|file_name|>ja.js<|end_file_name|><|fim▁begin|>/* Copyright (c) 2003-2018, CKSource - Frederico Knabben. All rights reserved. For licensing, see LICENSE.md or https://ckeditor.com/legal/ckeditor-oss-license */ CKEDITOR.plugins.setLang( 'colorbutton', 'ja', { auto: '自動', bgColorTitle: '背景色', colors: { '000': 'Black', '800000': 'Maroon', '8B4513': 'Saddle Brown', '2F4F4F': 'Dark Slate Gray', '008080': 'Teal', '000080': 'Navy', '4B0082': 'Indigo', '696969': 'Dark Gray', B22222: 'Fire Brick', A52A2A: 'Brown', DAA520: 'Golden Rod', '006400': 'Dark Green', '40E0D0': 'Turquoise', '0000CD': 'Medium Blue', '800080': 'Purple', '808080': 'Gray', F00: 'Red', FF8C00: 'Dark Orange', FFD700: 'Gold', '008000': 'Green', '0FF': 'Cyan', '00F': 'Blue', EE82EE: 'Violet', A9A9A9: 'Dim Gray', FFA07A: 'Light Salmon', FFA500: 'Orange', FFFF00: 'Yellow', '00FF00': 'Lime', AFEEEE: 'Pale Turquoise', ADD8E6: 'Light Blue', DDA0DD: 'Plum', D3D3D3: 'Light Grey', FFF0F5: 'Lavender Blush', FAEBD7: 'Antique White',<|fim▁hole|> F0FFF0: 'Honeydew', F0FFFF: 'Azure', F0F8FF: 'Alice Blue', E6E6FA: 'Lavender', FFF: 'White', '1ABC9C': 'Strong Cyan', '2ECC71': 'Emerald', '3498DB': 'Bright Blue', '9B59B6': 'Amethyst', '4E5F70': 'Grayish Blue', 'F1C40F': 'Vivid Yellow', '16A085': 'Dark Cyan', '27AE60': 'Dark Emerald', '2980B9': 'Strong Blue', '8E44AD': 'Dark Violet', '2C3E50': 'Desaturated Blue', 'F39C12': 'Orange', 'E67E22': 'Carrot', 'E74C3C': 'Pale Red', 'ECF0F1': 'Bright Silver', '95A5A6': 'Light Grayish Cyan', 'DDD': 'Light Gray', 'D35400': 'Pumpkin', 'C0392B': 'Strong Red', 'BDC3C7': 'Silver', '7F8C8D': 'Grayish Cyan', '999': 'Dark Gray' }, more: 'その他の色...', panelTitle: '色', textColorTitle: '文字色' } );<|fim▁end|>
FFFFE0: 'Light Yellow',
<|file_name|>request.rs<|end_file_name|><|fim▁begin|>use error; use hyper; use hyper_rustls; use response; use std; use types; use url; #[derive(Debug, Clone)] pub struct Request<'a> { client: std::sync::Arc<hyper::Client>, method: types::Method, url: url::Url<'a>, headers: hyper::header::Headers, body: String, } impl<'a> Request<'a> { pub fn new() -> Request<'a> { Request { client: std::sync::Arc::new(hyper::Client::new()), method: types::Method::Get, url: url::Url::new(), headers: hyper::header::Headers::new(), body: String::new(), } } pub fn get_scheme(&self) -> types::Scheme { self.url.scheme.clone() } pub fn set_url(&mut self, url: url::Url<'a>) -> &mut Request<'a> { let client = match url.scheme { types::Scheme::Http => hyper::Client::new(), types::Scheme::Https => { let tls = hyper_rustls::TlsClient::new(); let conn = hyper::net::HttpsConnector::new(tls); hyper::Client::with_connector(conn) } }; self.client = std::sync::Arc::new(client); self.url = url; self } pub fn set_method(&mut self, method: types::Method) -> &mut Request<'a> { self.method = method; self } pub fn set_headers( &mut self, headers: hyper::header::Headers, ) -> &mut Request<'a> { self.headers = headers; self } pub fn set_body<S>(&mut self, body: S) -> &mut Request<'a> where S: Into<String> { self.body = body.into(); self } pub fn add_header<H>(&mut self, header: H) -> &mut Request<'a> where H: hyper::header::Header + hyper::header::HeaderFormat { self.headers.set(header); self } pub fn add_path<S>(&mut self, path: S) -> &mut Request<'a> where S: Into<String> { self.url.add_path(path.into()); self } pub fn add_query_param<K, S>(&mut self, key: K, val: S) -> &mut Request<'a> where K: Into<String>, S: Into<std::borrow::Cow<'a, str>> { self.url.add_query_param(key, val); self } pub fn basic_auth<U, P>(&mut self, u: U, p: Option<P>) -> &mut Request<'a> where U: Into<String>, P: Into<String> { let auth = hyper::header::Basic { username: u.into(), password: p.map(|s| s.into()), }; self.add_header(hyper::header::Authorization(auth)) }<|fim▁hole|> pub fn set_bearer_token<T>(&mut self, token: T) -> &mut Request<'a> where T: Into<String> { let auth = hyper::header::Bearer { token: token.into() }; self.add_header(hyper::header::Authorization(auth)) } pub fn send(&self) -> std::result::Result<response::Response, error::Error> { let url = try!(self.url.to_url()); let request = match self.method { types::Method::Get => self.client.get(url), types::Method::Post => self.client.post(url), types::Method::Put => self.client.put(url), }; request .headers(self.headers.clone()) .body(&self.body) .send() .map_err(error::Error::from) .map(response::Response::from) .and_then( |res| match res.ok { true => Ok(res), false => Err(error::Error::from(res)), }, ) } }<|fim▁end|>
<|file_name|>signals.py<|end_file_name|><|fim▁begin|>""" This creates Django signals that automatically update the elastic search Index When an item is created, a signal is thrown that runs the create / update index API of the Search Manager When an item is deleted, a signal is thrown that executes the delete index API of the Search Manager This way the Policy compass database and Elastic search index remains synced. """ from django.db.models.signals import post_save, post_delete from django.dispatch import receiver from .models import Metric from apps.searchmanager.signalhandlers import search_index_update, search_index_delete from apps.datasetmanager import internal_api <|fim▁hole|> instance = kwargs['instance'] search_index_update('metric', instance.id) @receiver(post_delete, sender=Metric) def delete_document_on_search_service(sender, **kwargs): instance = kwargs['instance'] search_index_delete('metric', instance.id) @receiver(post_delete, sender=Metric) def remove_metric_link_from_datasets(sender, **kwargs): instance = kwargs['instance'] internal_api.remove_metric_link(instance.id)<|fim▁end|>
@receiver(post_save, sender=Metric) def update_document_on_search_service(sender, **kwargs): if not kwargs.get('raw', False):
<|file_name|>config.py<|end_file_name|><|fim▁begin|>frame_len = .1 keys = { 'DOWN': 0x42, 'LEFT': 0x44, 'RIGHT': 0x43, 'UP': 0x41, 'Q': 0x71, 'ENTER': 0x0a, } apple_domain = 1000 <|fim▁hole|>} game_sizes = { 's': (25, 20), 'm': (50, 40), 'l': (80, 40), } initial_size = 4<|fim▁end|>
food_values = { 'apple': 3,
<|file_name|>inspector_timeline.py<|end_file_name|><|fim▁begin|># Copyright 2013 The Chromium Authors. All rights reserved. # Use of this source code is governed by a BSD-style license that can be # found in the LICENSE file. from telemetry.core.backends.chrome import timeline_recorder from telemetry.timeline import inspector_timeline_data class TabBackendException(Exception): """An exception which indicates an error response from devtools inspector.""" pass class InspectorTimeline(timeline_recorder.TimelineRecorder): """Implementation of dev tools timeline.""" class Recorder(object): """Utility class to Start and Stop recording timeline. Example usage: with inspector_timeline.InspectorTimeline.Recorder(tab): # Something to run while the timeline is recording. This is an alternative to directly calling the Start and Stop methods below. """ def __init__(self, tab): self._tab = tab def __enter__(self): self._tab.StartTimelineRecording() def __exit__(self, *args): self._tab.StopTimelineRecording() def __init__(self, inspector_backend): super(InspectorTimeline, self).__init__() self._inspector_backend = inspector_backend self._is_recording = False @property def is_timeline_recording_running(self): return self._is_recording def Start(self): """Starts recording.""" assert not self._is_recording, 'Start should only be called once.' self._is_recording = True self._inspector_backend.RegisterDomain( 'Timeline', self._OnNotification, self._OnClose) # The 'bufferEvents' parameter below means that events should not be sent # individually as messages, but instead all at once when a Timeline.stop # request is sent. request = { 'method': 'Timeline.start', 'params': {'bufferEvents': True}, } self._SendSyncRequest(request) def Stop(self): """Stops recording and returns timeline event data.""" if not self._is_recording: return None request = {'method': 'Timeline.stop'} result = self._SendSyncRequest(request) self._inspector_backend.UnregisterDomain('Timeline') self._is_recording = False raw_events = result['events']<|fim▁hole|> def _SendSyncRequest(self, request, timeout=60): """Sends a devtools remote debugging protocol request. The types of request that are valid is determined by protocol.json: https://src.chromium.org/viewvc/blink/trunk/Source/devtools/protocol.json Args: request: Request dict, may contain the keys 'method' and 'params'. timeout: Number of seconds to wait for a response. Returns: The result given in the response message. Raises: TabBackendException: The response indicates an error occurred. """ response = self._inspector_backend.SyncRequest(request, timeout) if 'error' in response: raise TabBackendException(response['error']['message']) return response['result'] def _OnNotification(self, msg): """Handler called when a message is received.""" # Since 'Timeline.start' was invoked with the 'bufferEvents' parameter, # there will be no timeline notifications while recording. pass def _OnClose(self): """Handler called when a domain is unregistered.""" pass<|fim▁end|>
return inspector_timeline_data.InspectorTimelineData(raw_events)
<|file_name|>message_publisher.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python import rospy from basics.msg import Complex from random import random rospy.init_node('message_publisher') pub = rospy.Publisher('complex', Complex) rate = rospy.Rate(2) while not rospy.is_shutdown(): msg = Complex() msg.real = random() msg.imaginary = random() <|fim▁hole|> pub.publish(msg) rate.sleep()<|fim▁end|>
<|file_name|>bgtest.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- import sys sys.path.append("..") # # Copyright (C) 2000-2005 by Yasushi Saito ([email protected]) # # Pychart is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by the # Free Software Foundation; either version 2, or (at your option) any # later version.<|fim▁hole|># FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License # for more details. # from pychart import * data = [(10, 20, 5, 5), (20, 65, 5, 5), (30, 55, 4, 4), (40, 45, 2, 2), (50, 25, 3, 3)] ar = area.T(x_axis = axis.X(label = "X label"), y_axis = axis.Y(label = "Y label")) ar.add_plot(bar_plot.T(label="foo", data = data, fill_style = fill_style.gray90, error_bar = error_bar.bar3, error_minus_col = 2, error_plus_col = 3)) canvas.default_canvas().set_background(fill_style.diag, -100, -100, 300, 300) ar.draw()<|fim▁end|>
# # Pychart is distributed in the hope that it will be useful, but WITHOUT # ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
<|file_name|>_to-iobject.js<|end_file_name|><|fim▁begin|>// to indexed object, toObject with fallback for non-array-like ES3 strings var IObject = require('./_iobject')<|fim▁hole|><|fim▁end|>
, defined = require('./_defined'); module.exports = function(it){ return IObject(defined(it)); };
<|file_name|>nolink-with-link-args.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern:aFdEfSeVEE <|fim▁hole|>/* We're testing that link_args are indeed passed when nolink is specified. So we try to compile with junk link_args and make sure they are visible in the compiler output. */ #[feature(link_args)]; #[link_args = "aFdEfSeVEEE"] #[nolink] extern {} fn main() { }<|fim▁end|>
<|file_name|>other_test.py<|end_file_name|><|fim▁begin|>from itertools import chain from django.contrib.contenttypes.models import ContentType from django.core.exceptions import ValidationError from django.test import TestCase import guardian from guardian.backends import ObjectPermissionBackend from guardian.exceptions import GuardianError from guardian.exceptions import NotUserNorGroup from guardian.exceptions import ObjectNotPersisted from guardian.exceptions import WrongAppError from guardian.models import GroupObjectPermission from guardian.models import UserObjectPermission from guardian.models import AnonymousUser from guardian.models import Group from guardian.models import Permission from guardian.models import User class UserPermissionTests(TestCase): fixtures = ['tests.json'] def setUp(self): self.user = User.objects.get(username='jack') self.ctype = ContentType.objects.create(name='foo', model='bar', app_label='fake-for-guardian-tests') self.obj1 = ContentType.objects.create(name='ct1', model='foo', app_label='guardian-tests') self.obj2 = ContentType.objects.create(name='ct2', model='bar', app_label='guardian-tests') def test_assignement(self): self.assertFalse(self.user.has_perm('change_contenttype', self.ctype)) UserObjectPermission.objects.assign('change_contenttype', self.user, self.ctype) self.assertTrue(self.user.has_perm('change_contenttype', self.ctype)) self.assertTrue(self.user.has_perm('contenttypes.change_contenttype', self.ctype)) def test_assignement_and_remove(self): UserObjectPermission.objects.assign('change_contenttype', self.user, self.ctype) self.assertTrue(self.user.has_perm('change_contenttype', self.ctype)) UserObjectPermission.objects.remove_perm('change_contenttype', self.user, self.ctype) self.assertFalse(self.user.has_perm('change_contenttype', self.ctype)) def test_ctypes(self): UserObjectPermission.objects.assign('change_contenttype', self.user, self.obj1) self.assertTrue(self.user.has_perm('change_contenttype', self.obj1)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj2)) UserObjectPermission.objects.remove_perm('change_contenttype', self.user, self.obj1) UserObjectPermission.objects.assign('change_contenttype', self.user, self.obj2) self.assertTrue(self.user.has_perm('change_contenttype', self.obj2)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj1)) UserObjectPermission.objects.assign('change_contenttype', self.user, self.obj1) UserObjectPermission.objects.assign('change_contenttype', self.user, self.obj2) self.assertTrue(self.user.has_perm('change_contenttype', self.obj2)) self.assertTrue(self.user.has_perm('change_contenttype', self.obj1)) UserObjectPermission.objects.remove_perm('change_contenttype', self.user, self.obj1) UserObjectPermission.objects.remove_perm('change_contenttype', self.user, self.obj2) self.assertFalse(self.user.has_perm('change_contenttype', self.obj2)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj1)) def test_get_for_object(self): perms = UserObjectPermission.objects.get_for_object(self.user, self.ctype) self.assertEqual(perms.count(), 0) to_assign = sorted([ 'delete_contenttype', 'change_contenttype', ]) for perm in to_assign: UserObjectPermission.objects.assign(perm, self.user, self.ctype) perms = UserObjectPermission.objects.get_for_object(self.user, self.ctype) codenames = sorted(chain(*perms.values_list('permission__codename'))) self.assertEqual(to_assign, codenames) def test_assign_validation(self): self.assertRaises(Permission.DoesNotExist, UserObjectPermission.objects.assign, 'change_group', self.user, self.user) group = Group.objects.create(name='test_group_assign_validation') ctype = ContentType.objects.get_for_model(group) perm = Permission.objects.get(codename='change_user') create_info = dict( permission = perm, user = self.user, content_type = ctype, object_pk = group.pk ) self.assertRaises(ValidationError, UserObjectPermission.objects.create, **create_info) def test_unicode(self): obj_perm = UserObjectPermission.objects.assign("change_user", self.user, self.user) self.assertTrue(isinstance(obj_perm.__unicode__(), unicode)) def test_errors(self): not_saved_user = User(username='not_saved_user') self.assertRaises(ObjectNotPersisted, UserObjectPermission.objects.assign, "change_user", self.user, not_saved_user) self.assertRaises(ObjectNotPersisted, UserObjectPermission.objects.remove_perm, "change_user", self.user, not_saved_user) self.assertRaises(ObjectNotPersisted, UserObjectPermission.objects.get_for_object, "change_user", not_saved_user) class GroupPermissionTests(TestCase): fixtures = ['tests.json'] def setUp(self): self.user = User.objects.get(username='jack') self.group, created = Group.objects.get_or_create(name='jackGroup') self.user.groups.add(self.group) self.ctype = ContentType.objects.create(name='foo', model='bar', app_label='fake-for-guardian-tests') self.obj1 = ContentType.objects.create(name='ct1', model='foo', app_label='guardian-tests') self.obj2 = ContentType.objects.create(name='ct2', model='bar', app_label='guardian-tests') def test_assignement(self): self.assertFalse(self.user.has_perm('change_contenttype', self.ctype)) self.assertFalse(self.user.has_perm('contenttypes.change_contenttype', self.ctype)) GroupObjectPermission.objects.assign('change_contenttype', self.group,<|fim▁hole|> self.ctype) self.assertTrue(self.user.has_perm('change_contenttype', self.ctype)) self.assertTrue(self.user.has_perm('contenttypes.change_contenttype', self.ctype)) def test_assignement_and_remove(self): GroupObjectPermission.objects.assign('change_contenttype', self.group, self.ctype) self.assertTrue(self.user.has_perm('change_contenttype', self.ctype)) GroupObjectPermission.objects.remove_perm('change_contenttype', self.group, self.ctype) self.assertFalse(self.user.has_perm('change_contenttype', self.ctype)) def test_ctypes(self): GroupObjectPermission.objects.assign('change_contenttype', self.group, self.obj1) self.assertTrue(self.user.has_perm('change_contenttype', self.obj1)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj2)) GroupObjectPermission.objects.remove_perm('change_contenttype', self.group, self.obj1) GroupObjectPermission.objects.assign('change_contenttype', self.group, self.obj2) self.assertTrue(self.user.has_perm('change_contenttype', self.obj2)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj1)) GroupObjectPermission.objects.assign('change_contenttype', self.group, self.obj1) GroupObjectPermission.objects.assign('change_contenttype', self.group, self.obj2) self.assertTrue(self.user.has_perm('change_contenttype', self.obj2)) self.assertTrue(self.user.has_perm('change_contenttype', self.obj1)) GroupObjectPermission.objects.remove_perm('change_contenttype', self.group, self.obj1) GroupObjectPermission.objects.remove_perm('change_contenttype', self.group, self.obj2) self.assertFalse(self.user.has_perm('change_contenttype', self.obj2)) self.assertFalse(self.user.has_perm('change_contenttype', self.obj1)) def test_get_for_object(self): group = Group.objects.create(name='get_group_perms_for_object') self.user.groups.add(group) perms = GroupObjectPermission.objects.get_for_object(group, self.ctype) self.assertEqual(perms.count(), 0) to_assign = sorted([ 'delete_contenttype', 'change_contenttype', ]) for perm in to_assign: GroupObjectPermission.objects.assign(perm, group, self.ctype) perms = GroupObjectPermission.objects.get_for_object(group, self.ctype) codenames = sorted(chain(*perms.values_list('permission__codename'))) self.assertEqual(to_assign, codenames) def test_assign_validation(self): self.assertRaises(Permission.DoesNotExist, GroupObjectPermission.objects.assign, 'change_user', self.group, self.group) user = User.objects.create(username='test_user_assign_validation') ctype = ContentType.objects.get_for_model(user) perm = Permission.objects.get(codename='change_group') create_info = dict( permission = perm, group = self.group, content_type = ctype, object_pk = user.pk ) self.assertRaises(ValidationError, GroupObjectPermission.objects.create, **create_info) def test_unicode(self): obj_perm = GroupObjectPermission.objects.assign("change_group", self.group, self.group) self.assertTrue(isinstance(obj_perm.__unicode__(), unicode)) def test_errors(self): not_saved_group = Group(name='not_saved_group') self.assertRaises(ObjectNotPersisted, GroupObjectPermission.objects.assign, "change_group", self.group, not_saved_group) self.assertRaises(ObjectNotPersisted, GroupObjectPermission.objects.remove_perm, "change_group", self.group, not_saved_group) self.assertRaises(ObjectNotPersisted, GroupObjectPermission.objects.get_for_object, "change_group", not_saved_group) class ObjectPermissionBackendTests(TestCase): def setUp(self): self.user = User.objects.create(username='jack') self.backend = ObjectPermissionBackend() def test_attrs(self): self.assertTrue(self.backend.supports_anonymous_user) self.assertTrue(self.backend.supports_object_permissions) self.assertTrue(self.backend.supports_inactive_user) def test_authenticate(self): self.assertEqual(self.backend.authenticate( self.user.username, self.user.password), None) def test_has_perm_noobj(self): result = self.backend.has_perm(self.user, "change_contenttype") self.assertFalse(result) def test_has_perm_notauthed(self): user = AnonymousUser() self.assertFalse(self.backend.has_perm(user, "change_user", self.user)) def test_has_perm_wrong_app(self): self.assertRaises(WrongAppError, self.backend.has_perm, self.user, "no_app.change_user", self.user) def test_obj_is_not_model(self): for obj in (Group, 666, "String", [2, 1, 5, 7], {}): self.assertFalse(self.backend.has_perm(self.user, "any perm", obj)) def test_not_active_user(self): user = User.objects.create(username='non active user') ctype = ContentType.objects.create(name='foo', model='bar', app_label='fake-for-guardian-tests') perm = 'change_contenttype' UserObjectPermission.objects.assign(perm, user, ctype) self.assertTrue(self.backend.has_perm(user, perm, ctype)) user.is_active = False user.save() self.assertFalse(self.backend.has_perm(user, perm, ctype)) class GuardianBaseTests(TestCase): def has_attrs(self): self.assertTrue(hasattr(guardian, '__version__')) def test_version(self): for x in guardian.VERSION: self.assertTrue(isinstance(x, (int, str))) def test_get_version(self): self.assertTrue(isinstance(guardian.get_version(), str)) class TestExceptions(TestCase): def _test_error_class(self, exc_cls): self.assertTrue(isinstance(exc_cls, GuardianError)) def test_error_classes(self): self.assertTrue(isinstance(GuardianError(), Exception)) guardian_errors = [NotUserNorGroup] for err in guardian_errors: self._test_error_class(err())<|fim▁end|>
<|file_name|>test.js<|end_file_name|><|fim▁begin|>"use strict"; Object.defineProperty(exports, "__esModule", { value: true }); const Command = require('../ember-cli/lib/models/command'); const test_1 = require("../tasks/test"); const config_1 = require("../models/config"); const common_tags_1 = require("common-tags"); const config = config_1.CliConfig.fromProject() || config_1.CliConfig.fromGlobal(); const testConfigDefaults = config.getPaths('defaults.build', [ 'progress', 'poll' ]); const TestCommand = Command.extend({ name: 'test', aliases: ['t'], description: 'Run unit tests in existing project.', works: 'insideProject', availableOptions: [ { name: 'watch', type: Boolean, aliases: ['w'], description: 'Run build when files change.' }, { name: 'code-coverage', type: Boolean, default: false, aliases: ['cc'], description: 'Coverage report will be in the coverage/ directory.' }, { name: 'config', type: String, aliases: ['c'], description: common_tags_1.oneLine `Use a specific config file. Defaults to the karma config file in .angular-cli.json.` }, { name: 'single-run', type: Boolean, aliases: ['sr'], description: 'Run tests a single time.' }, { name: 'progress', type: Boolean, default: testConfigDefaults['progress'], description: 'Log progress to the console while in progress.' }, { name: 'browsers', type: String, description: 'Override which browsers tests are run against.' }, { name: 'colors', type: Boolean, description: 'Enable or disable colors in the output (reporters and logs).' }, { name: 'log-level', type: String, description: 'Level of logging.' }, { name: 'port', type: Number, description: 'Port where the web server will be listening.' }, { name: 'reporters', type: String, description: 'List of reporters to use.' }, { name: 'sourcemaps', type: Boolean, default: true, aliases: ['sm', 'sourcemap'], description: 'Output sourcemaps.' }, { name: 'poll', type: Number, default: testConfigDefaults['poll'], description: 'Enable and define the file watching poll time period (milliseconds).' }, { name: 'environment', type: String, aliases: ['e'], description: 'Defines the build environment.' }, {<|fim▁hole|> } ], run: function (commandOptions) { const testTask = new test_1.default({ ui: this.ui, project: this.project }); if (commandOptions.watch !== undefined && !commandOptions.watch) { // if not watching ensure karma is doing a single run commandOptions.singleRun = true; } return testTask.run(commandOptions); } }); TestCommand.overrideCore = true; exports.default = TestCommand; //# sourceMappingURL=/users/hansl/sources/angular-cli/commands/test.js.map<|fim▁end|>
name: 'app', type: String, aliases: ['a'], description: 'Specifies app name to use.'
<|file_name|>forms.py<|end_file_name|><|fim▁begin|>from django import forms from .models import Topic, Entry class TopicForm(forms.ModelForm): class Meta: model = Topic fields = ['text'] labels = {'text': ''} <|fim▁hole|> class EntryForm(forms.ModelForm): class Meta: model = Entry fields = ['text'] labels = {'text': ''} widgets = {'text': forms.Textarea(attrs={'cols': 80})}<|fim▁end|>
<|file_name|>test_sql.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # -*- coding: utf-8 -*- """Tests for pdeo.database.sql""" <|fim▁hole|>from pdeo.databases import sql # if version_info[0] == 2: # utf8 for python2 # from codecs import open class PdeoDatabaseSqlTestCase(unittest.TestCase): def setUp(self): pass def tearDown(self): pass def testEntryPoints(self): sql.Database<|fim▁end|>
import unittest # import responses
<|file_name|>Queue.test.js<|end_file_name|><|fim▁begin|>const test = require('tape') const Queue = require('./Queue') test('peek on empty queue', assert => { const queue = new Queue() assert.strictEqual(queue.peek(), null) assert.end() }) test('enqueue items to the queue', assert => { const queue = new Queue() queue.enqueue('foo') queue.enqueue('bar') assert.equal(queue.length, 2) assert.equal(queue.peek(), 'foo') assert.end() }) test('dequeue items from the queue', assert => { const queue = new Queue() queue.enqueue('A') queue.enqueue('B') queue.enqueue('C') assert.equal(queue.dequeue(), 'A') assert.equal(queue.dequeue(), 'B') assert.equal(queue.dequeue(), 'C') assert.end() }) <|fim▁hole|> assert.equal(queue.peek(), 'A') assert.equal(queue.dequeue(), 'A') assert.equal(queue.dequeue(), 'B') assert.equal(queue.dequeue(), 'C') assert.end() }) test('throws error when trying to dequeue empty queue', assert => { const queue = new Queue() assert.throws(() => queue.dequeue(), RangeError) assert.end() })<|fim▁end|>
test('create queue from array', assert => { const queue = new Queue(['A', 'B', 'C'])
<|file_name|>inspect_test.go<|end_file_name|><|fim▁begin|>package commands import ( "bytes" "encoding/json" "flag" "io" "os" "strings" "testing" "github.com/codegangsta/cli" "github.com/docker/machine/libmachine" "github.com/docker/machine/libmachine/auth" "github.com/docker/machine/libmachine/engine" "github.com/docker/machine/libmachine/swarm" "github.com/stretchr/testify/assert" ) func TestCmdInspectFormat(t *testing.T) { actual, host := runInspectCommand(t, []string{"test-a"}) expected, _ := json.MarshalIndent(host, "", " ") assert.Equal(t, string(expected), actual) actual, _ = runInspectCommand(t, []string{"--format", "{{.DriverName}}", "test-a"}) assert.Equal(t, "none", actual) actual, _ = runInspectCommand(t, []string{"--format", "{{json .DriverName}}", "test-a"}) assert.Equal(t, "\"none\"", actual) actual, _ = runInspectCommand(t, []string{"--format", "{{prettyjson .Driver}}", "test-a"}) type ExpectedDriver struct { CaCertPath string IPAddress string MachineName string PrivateKeyPath string SSHPort int SSHUser string SwarmDiscovery string SwarmHost string SwarmMaster bool URL string } expected, err := json.MarshalIndent(&ExpectedDriver{MachineName: "test-a", URL: "unix:///var/run/docker.sock"}, "", " ") assert.NoError(t, err) assert.Equal(t, string(expected), actual) } func runInspectCommand(t *testing.T, args []string) (string, *libmachine.Host) { stdout := os.Stdout stderr := os.Stderr shell := os.Getenv("SHELL") r, w, _ := os.Pipe() os.Stdout = w os.Stderr = w os.Setenv("MACHINE_STORAGE_PATH", TestStoreDir)<|fim▁hole|> defer func() { os.Setenv("MACHINE_STORAGE_PATH", "") os.Setenv("SHELL", shell) os.Stdout = stdout os.Stderr = stderr }() if err := clearHosts(); err != nil { t.Fatal(err) } store, sErr := getTestStore() if sErr != nil { t.Fatal(sErr) } mcn, err := libmachine.New(store) if err != nil { t.Fatal(err) } hostOptions := &libmachine.HostOptions{ EngineOptions: &engine.EngineOptions{}, SwarmOptions: &swarm.SwarmOptions{ Master: false, Discovery: "", Address: "", Host: "", }, AuthOptions: &auth.AuthOptions{}, } flags := getTestDriverFlags() _, err = mcn.Create("test-a", "none", hostOptions, flags) if err != nil { t.Fatal(err) } outStr := make(chan string) go func() { var testOutput bytes.Buffer io.Copy(&testOutput, r) outStr <- testOutput.String() }() set := flag.NewFlagSet("inspect", 0) set.String("format", "", "") set.Parse(args) c := cli.NewContext(nil, set, set) cmdInspect(c) w.Close() out := <-outStr return strings.TrimSpace(out), getHost(c) }<|fim▁end|>
os.Setenv("SHELL", "/bin/bash")
<|file_name|>partition.ts<|end_file_name|><|fim▁begin|>import {Observable} from '../Observable'; import {filter} from './filter'; const not = <T>(fn: (val: T) => boolean) => (x: T) => !fn(x); /** * Splits the source Observable into two, one with values that satisfy a predicate, * and another with values that don't satisfy the predicate. * * Marble diagram: * * ```text * --1--2--3--4--5--6--7--8--| * partition * --1-----3-----5-----7-----| * -----2-----4-----6-----8--| * * @param predicate A function that evaluates each value emitted by the source Observable. * If it returns true, the value is emitted on the first Observable in the returned array, * if false the value is emitted on the second Observable in the array. * @returns [Observable<T>, Observable<T>] */<|fim▁hole|> filter.call(this, not(predicate)) ]; }<|fim▁end|>
export function partition<T>(predicate: (val: T) => boolean): [Observable<T>, Observable<T>] { return [ filter.call(this, predicate),
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>metadata = { "abbreviation": "ex", "capitol_timezone": "Etc/UTC", "legislature_name": "Example Legislature", "lower_chamber_name": "House of Representatives", "lower_chamber_term": 2, "lower_chamber_title": "Representative", "upper_chamber_name": "Senate", "upper_chamber_term": 6,<|fim▁hole|> { "name": "T0", "sessions": [ "S0" ], "start_year": 2009, "end_year": 2010 }, { "name": "T1", "sessions": [ "S1", "Special1" ], "start_year": 2011, "end_year": 2012 }, { "name": "T2", "sessions": [ "S2", "Special2" ], "start_year": 2013, "end_year": 2014 } ], "session_details": { "S0": {"start_date": 1250000000.0, "type": "primary", "display_name": "Session Zero"}, "S1": {"start_date": 1300000000.0, "type": "primary", "display_name": "Session One"}, "Special1": {"start_date": 1330000000.0, "type": "special", "display_name": "Special Session One"}, "S2": {"start_date": 1350000000.0, "type": "primary", "display_name": "Session Two"}, "Special2": {"start_date": 1360000000.0, "type": "special", "display_name": "Special Session Two"} } }<|fim▁end|>
"upper_chamber_title": "Senator", "name": "Example State", "terms": [
<|file_name|>Update.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3 # -*-coding:UTF-8 -* import os import re import sys import time import redis import datetime sys.path.append(os.path.join(os.environ['AIL_BIN'], 'packages'))<|fim▁hole|> sys.path.append(os.path.join(os.environ['AIL_BIN'], 'lib/')) import ConfigLoader def rreplace(s, old, new, occurrence): li = s.rsplit(old, occurrence) return new.join(li) if __name__ == '__main__': start_deb = time.time() config_loader = ConfigLoader.ConfigLoader() r_serv_term_stats = config_loader.get_redis_conn("ARDB_Trending") r_serv_termfreq = config_loader.get_redis_conn("ARDB_TermFreq") config_loader = None r_serv_term_stats.flushdb() #convert all regex: all_regex = r_serv_termfreq.smembers('TrackedRegexSet') for regex in all_regex: tags = list( r_serv_termfreq.smembers('TrackedNotificationTags_{}'.format(regex)) ) mails = list( r_serv_termfreq.smembers('TrackedNotificationEmails_{}'.format(regex)) ) new_term = regex[1:-1] res = Term.parse_json_term_to_add({"term": new_term, "type": 'regex', "tags": tags, "mails": mails, "level": 1}, '[email protected]') if res[1] == 200: term_uuid = res[0]['uuid'] list_items = r_serv_termfreq.smembers('regex_{}'.format(regex)) for paste_item in list_items: item_id = Item.get_item_id(paste_item) item_date = Item.get_item_date(item_id) Term.add_tracked_item(term_uuid, item_id, item_date) # Invalid Tracker => remove it else: print('Invalid Regex Removed: {}'.format(regex)) print(res[0]) # allow reprocess r_serv_termfreq.srem('TrackedRegexSet', regex) all_tokens = r_serv_termfreq.smembers('TrackedSetTermSet') for token in all_tokens: tags = list( r_serv_termfreq.smembers('TrackedNotificationTags_{}'.format(token)) ) mails = list( r_serv_termfreq.smembers('TrackedNotificationEmails_{}'.format(token)) ) res = Term.parse_json_term_to_add({"term": token, "type": 'word', "tags": tags, "mails": mails, "level": 1}, '[email protected]') if res[1] == 200: term_uuid = res[0]['uuid'] list_items = r_serv_termfreq.smembers('tracked_{}'.format(token)) for paste_item in list_items: item_id = Item.get_item_id(paste_item) item_date = Item.get_item_date(item_id) Term.add_tracked_item(term_uuid, item_id, item_date) # Invalid Tracker => remove it else: print('Invalid Token Removed: {}'.format(token)) print(res[0]) # allow reprocess r_serv_termfreq.srem('TrackedSetTermSet', token) all_set = r_serv_termfreq.smembers('TrackedSetSet') for curr_set in all_set: tags = list( r_serv_termfreq.smembers('TrackedNotificationTags_{}'.format(curr_set)) ) mails = list( r_serv_termfreq.smembers('TrackedNotificationEmails_{}'.format(curr_set)) ) to_remove = ',{}'.format(curr_set.split(',')[-1]) new_set = rreplace(curr_set, to_remove, '', 1) new_set = new_set[2:] new_set = new_set.replace(',', '') res = Term.parse_json_term_to_add({"term": new_set, "type": 'set', "nb_words": 1, "tags": tags, "mails": mails, "level": 1}, '[email protected]') if res[1] == 200: term_uuid = res[0]['uuid'] list_items = r_serv_termfreq.smembers('tracked_{}'.format(curr_set)) for paste_item in list_items: item_id = Item.get_item_id(paste_item) item_date = Item.get_item_date(item_id) Term.add_tracked_item(term_uuid, item_id, item_date) # Invalid Tracker => remove it else: print('Invalid Set Removed: {}'.format(curr_set)) print(res[0]) # allow reprocess r_serv_termfreq.srem('TrackedSetSet', curr_set) r_serv_termfreq.flushdb() #Set current ail version r_serv.set('ail:version', 'v2.2') #Set current ail version r_serv.hset('ail:update_date', 'v2.2', datetime.datetime.now().strftime("%Y%m%d"))<|fim▁end|>
import Item import Term
<|file_name|>ShardingFilterTestCase.java<|end_file_name|><|fim▁begin|>// Copyright 2010 The Bazel Authors. All Rights Reserved. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package com.google.testing.junit.runner.sharding.testing; import static com.google.common.truth.Truth.assertThat; import com.google.testing.junit.runner.sharding.api.ShardingFilterFactory; import java.util.ArrayList; import java.util.Collection; import java.util.Deque; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedList; import java.util.List; import java.util.Map; import java.util.Set; import junit.framework.TestCase; import org.junit.Test; import org.junit.runner.Description; import org.junit.runner.manipulation.Filter; /** * Common base class for all sharding filter tests. */ public abstract class ShardingFilterTestCase extends TestCase { static final List<Description> TEST_DESCRIPTIONS = createGenericTestCaseDescriptions(6); /** * Returns a filter of the subclass type using the given descriptions, * shard index, and total number of shards. */ protected abstract ShardingFilterFactory createShardingFilterFactory(); public final void testShardingIsCompleteAndPartitioned_oneShard() { assertShardingIsCompleteAndPartitioned(createFilters(TEST_DESCRIPTIONS, 1), TEST_DESCRIPTIONS); } public final void testShardingIsStable_oneShard() { assertShardingIsStable(createFilters(TEST_DESCRIPTIONS, 1), TEST_DESCRIPTIONS); } public final void testShardingIsCompleteAndPartitioned_moreTestsThanShards() { assertShardingIsCompleteAndPartitioned(createFilters(TEST_DESCRIPTIONS, 5), TEST_DESCRIPTIONS); } public final void testShardingIsStable_moreTestsThanShards() { assertShardingIsStable(createFilters(TEST_DESCRIPTIONS, 5), TEST_DESCRIPTIONS); } public final void testShardingIsCompleteAndPartitioned_sameNumberOfTestsAndShards() { assertShardingIsCompleteAndPartitioned(createFilters(TEST_DESCRIPTIONS, 6), TEST_DESCRIPTIONS); } public final void testShardingIsStable_sameNumberOfTestsAndShards() { assertShardingIsStable(createFilters(TEST_DESCRIPTIONS, 6), TEST_DESCRIPTIONS); } public final void testShardingIsCompleteAndPartitioned_moreShardsThanTests() { assertShardingIsCompleteAndPartitioned(createFilters(TEST_DESCRIPTIONS, 7), TEST_DESCRIPTIONS); } public final void testShardingIsStable_moreShardsThanTests() { assertShardingIsStable(createFilters(TEST_DESCRIPTIONS, 7), TEST_DESCRIPTIONS); } public final void testShardingIsCompleteAndPartitioned_duplicateDescriptions() { List<Description> descriptions = new ArrayList<>(); descriptions.addAll(createGenericTestCaseDescriptions(6)); descriptions.addAll(createGenericTestCaseDescriptions(6)); assertShardingIsCompleteAndPartitioned(createFilters(descriptions, 7), descriptions); } public final void testShardingIsStable_duplicateDescriptions() { List<Description> descriptions = new ArrayList<>(); descriptions.addAll(createGenericTestCaseDescriptions(6)); descriptions.addAll(createGenericTestCaseDescriptions(6)); assertShardingIsStable(createFilters(descriptions, 7), descriptions); } public final void testShouldRunTestSuite() { Description testSuiteDescription = createTestSuiteDescription(); Filter filter = createShardingFilterFactory().createFilter(TEST_DESCRIPTIONS, 0, 1); assertThat(filter.shouldRun(testSuiteDescription)).isTrue(); } /** * Creates a list of generic test case descriptions. * * @param numDescriptions the number of generic test descriptions to add to the list. */ public static List<Description> createGenericTestCaseDescriptions(int numDescriptions) { List<Description> descriptions = new ArrayList<>(); for (int i = 0; i < numDescriptions; i++) { descriptions.add(Description.createTestDescription(Test.class, "test" + i)); } return descriptions; } protected static final List<Filter> createFilters(List<Description> descriptions, int numShards, ShardingFilterFactory factory) { List<Filter> filters = new ArrayList<>(); for (int shardIndex = 0; shardIndex < numShards; shardIndex++) { filters.add(factory.createFilter(descriptions, shardIndex, numShards)); } return filters; } protected final List<Filter> createFilters(List<Description> descriptions, int numShards) { return createFilters(descriptions, numShards, createShardingFilterFactory()); } protected static void assertThrowsExceptionForUnknownDescription(Filter filter) { try { filter.shouldRun(Description.createTestDescription(Object.class, "unknown")); fail("expected thrown exception"); } catch (IllegalArgumentException expected) { } } /** * Simulates test sharding with the given filters and test descriptions. * * @param filters a list of filters, one per test shard * @param descriptions a list of test descriptions * @return a mapping from each filter to the descriptions of the tests that would be run * by the shard associated with that filter. */ protected static Map<Filter, List<Description>> simulateTestRun(List<Filter> filters, List<Description> descriptions) { Map<Filter, List<Description>> descriptionsRun = new HashMap<>(); for (Filter filter : filters) { for (Description description : descriptions) { if (filter.shouldRun(description)) { addDescriptionForFilterToMap(descriptionsRun, filter, description); } } } return descriptionsRun; } /** * Simulates test sharding with the given filters and test descriptions, for a * set of test descriptions that is in a different order in every test shard. * * @param filters a list of filters, one per test shard * @param descriptions a list of test descriptions * @return a mapping from each filter to the descriptions of the tests that would be run * by the shard associated with that filter. */ protected static Map<Filter, List<Description>> simulateSelfRandomizingTestRun( List<Filter> filters, List<Description> descriptions) { if (descriptions.isEmpty()) { return new HashMap<>(); } Deque<Description> mutatingDescriptions = new LinkedList<>(descriptions); Map<Filter, List<Description>> descriptionsRun = new HashMap<>(); for (Filter filter : filters) { // rotate the queue so that each filter gets the descriptions in a different order mutatingDescriptions.addLast(mutatingDescriptions.pollFirst()); for (Description description : descriptions) { if (filter.shouldRun(description)) { addDescriptionForFilterToMap(descriptionsRun, filter, description); } } } return descriptionsRun; } /** * Creates a test suite description (a Description that returns true * when {@link org.junit.runner.Description#isSuite()} is called.) */ protected static Description createTestSuiteDescription() { Description testSuiteDescription = Description.createSuiteDescription("testSuite"); testSuiteDescription.addChild(Description.createSuiteDescription("testCase")); return testSuiteDescription; } /** * Tests that the sharding is complete (each test is run at least once) and * partitioned (each test is run at most once) -- in other words, that * each test is run exactly once. This is a requirement of all test * sharding functions. */ protected static void assertShardingIsCompleteAndPartitioned(List<Filter> filters, List<Description> descriptions) { Map<Filter, List<Description>> run = simulateTestRun(filters, descriptions); assertThatCollectionContainsExactlyElementsInList(getAllValuesInMap(run), descriptions); run = simulateSelfRandomizingTestRun(filters, descriptions); assertThatCollectionContainsExactlyElementsInList(getAllValuesInMap(run), descriptions); } /** * Tests that sharding is stable for the given filters, regardless of the * ordering of the descriptions. This is useful for verifying that sharding * works with self-randomizing test suites, and a requirement of all test * sharding functions. */ protected static void assertShardingIsStable( List<Filter> filters, List<Description> descriptions) { Map<Filter, List<Description>> run1 = simulateTestRun(filters, descriptions); Map<Filter, List<Description>> run2 = simulateTestRun(filters, descriptions); assertThat(run2).isEqualTo(run1); Map<Filter, List<Description>> randomizedRun1 = simulateSelfRandomizingTestRun(filters, descriptions); Map<Filter, List<Description>> randomizedRun2 = simulateSelfRandomizingTestRun(filters, descriptions); assertThat(randomizedRun2).isEqualTo(randomizedRun1); } private static void addDescriptionForFilterToMap( Map<Filter, List<Description>> descriptionsRun, Filter filter, Description description) { List<Description> descriptions = descriptionsRun.get(filter); if (descriptions == null) { descriptions = new ArrayList<>(); descriptionsRun.put(filter, descriptions); } descriptions.add(description); } private static Collection<Description> getAllValuesInMap(Map<Filter, List<Description>> map) { Collection<Description> allDescriptions = new ArrayList<>(); for (List<Description> descriptions : map.values()) { allDescriptions.addAll(descriptions); } return allDescriptions; } /** * Returns whether the Collection and the List contain exactly the same elements with the same * frequency, ignoring the ordering. */ private static void assertThatCollectionContainsExactlyElementsInList( Collection<Description> actual, List<Description> expectedDescriptions) { String basicAssertionMessage = "Elements of collection " + actual + " are not the same as the " + "elements of expected list " + expectedDescriptions + ". "; if (actual.size() != expectedDescriptions.size()) { throw new AssertionError(basicAssertionMessage + "The number of elements is different."); } List<Description> actualDescriptions = new ArrayList<Description>(actual); // Keeps track of already reviewed descriptions, so they won't be checked again when next // encountered. // Note: this algorithm has O(n^2) time complexity and will be slow for large inputs. Set<Description> reviewedDescriptions = new HashSet<>(); for (int i = 0; i < actual.size(); i++) { Description currDescription = actualDescriptions.get(i); // If already reviewed, skip.<|fim▁hole|> } int actualFreq = 0; int expectedFreq = 0; // Count the frequency of the current description in both lists. for (int j = 0; j < actual.size(); j++) { if (currDescription.equals(actualDescriptions.get(j))) { actualFreq++; } if (currDescription.equals(expectedDescriptions.get(j))) { expectedFreq++; } } if (actualFreq < expectedFreq) { throw new AssertionError(basicAssertionMessage + "There are " + (expectedFreq - actualFreq) + " missing occurrences of " + currDescription + "."); } else if (actualFreq > expectedFreq) { throw new AssertionError(basicAssertionMessage + "There are " + (actualFreq - expectedFreq) + " unexpected occurrences of " + currDescription + "."); } reviewedDescriptions.add(currDescription); } } }<|fim▁end|>
if (reviewedDescriptions.contains(currDescription)) { continue;
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import get_object_or_404, render_to_response from django.http import Http404, HttpResponseRedirect from django.template import RequestContext from django.core.urlresolvers import reverse from django.contrib.auth.decorators import login_required from django.contrib.auth.models import User from django.core.exceptions import ObjectDoesNotExist from django.contrib.contenttypes.models import ContentType from basic.messages.models import Message, TO_STATUS_READ, TO_STATUS_DELETED, FROM_STATUS_DELETED from basic.messages.forms import MessageForm from basic.tools.baseconv import base62 @login_required def message_list(request, mailbox=None, template_name='messages/message_list.html'): """ Returns a list of user messages. Template:: ``messages/message_list.html`` Context: message_list List of Message objects mailbox String representing the current 'mailbox' """ if mailbox == 'sent': message_list = Message.objects.sent(request.user) elif mailbox == 'inbox': message_list = Message.objects.new(request.user) elif mailbox == 'trash': message_list = Message.objects.trash(request.user) else: message_list = Message.objects.archive(request.user) return render_to_response(template_name, { 'message_list': message_list, 'mailbox': mailbox or 'archive' }, context_instance=RequestContext(request)) <|fim▁hole|> template_name='messages/message_form.html'): """ Handles a new message and displays a form. Template:: ``messages/message_form.html`` Context: form MessageForm object """ next = request.GET.get('next', None) if request.GET.get('to', None): to_user = get_object_or_404(User, username=request.GET['to']) else: to_user = None if content_type_id and object_id: content_type = ContentType.objects.get(pk=base62.to_decimal(content_type_id)) Model = content_type.model_class() try: related_object = Model.objects.get(pk=base62.to_decimal(object_id)) except ObjectDoesNotExist: raise Http404, "The object ID was invalid." else: related_object = None form = MessageForm(request.POST or None, initial={'to_user': to_user}) if form.is_valid(): message = form.save(commit=False) if related_object: message.object = related_object message.from_user = request.user message = form.save() return HttpResponseRedirect(next or reverse('messages:messages')) return render_to_response(template_name, { 'form': form, 'to_user': to_user, 'related_object': related_object, 'next': next, }, context_instance=RequestContext(request)) def message_reply(request, object_id, template_name='messages/message_form.html'): """ Handles a reply to a specific message. """ original_message = get_object_or_404(Message, pk=object_id) next = request.GET.get('next', None) initial = { 'to_user': original_message.from_user, 'subject': 'Re: %s' % original_message.subject } form = MessageForm(request.POST or None, initial=initial) if form.is_valid(): message = form.save(commit=False) message.object = original_message.object message.from_user = request.user message = form.save() return HttpResponseRedirect(next or reverse('messages:messages')) return render_to_response(template_name, { 'form': form, 'message': original_message, 'next': next, }, context_instance=RequestContext(request)) @login_required def message_remove(request, object_id, template_name='messages/message_remove_confirm.html'): """ Remove a message. """ message = get_object_or_404(Message, pk=object_id) next = request.GET.get('next', None) if request.method == 'POST': if message.to_user == request.user: message.to_status = TO_STATUS_DELETED else: message.from_status = FROM_STATUS_DELETED message.save() return HttpResponseRedirect(next or reverse('messages:messages')) return render_to_response(template_name, { 'message': message, 'next': next, }, context_instance=RequestContext(request)) @login_required def message_detail(request, object_id, template_name='messages/message_detail.html'): """ Return a message. """ message = get_object_or_404(Message, pk=object_id) content_type = ContentType.objects.get_for_model(message) thread_list = Message.objects.filter(object_id=message.object.pk, content_type=content_type).order_by('id') if message.to_user == request.user: message.to_status = TO_STATUS_READ message.save() return render_to_response(template_name, { 'message': message, 'thread_list': thread_list }, context_instance=RequestContext(request))<|fim▁end|>
@login_required def message_create(request, content_type_id=None, object_id=None,
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import patterns, include, url from django.contrib import admin from rest_framework import viewsets, routers from voting_app.models import Topic from voting_app.views import Vote from voting_app.serializer import TopicSerializer admin.autodiscover() # ViewSets define the view behavior.<|fim▁hole|> router = routers.DefaultRouter() router.register(r'topics', TopicViewSet) urlpatterns = patterns('', url(r'^$', 'voting_app.views.index', name='index'), url(r'^', include(router.urls)), url(r'^vote/$', Vote.as_view()), url(r'^api-auth/', include('rest_framework.urls', namespace='rest_framework')), url(r'^admin/', include(admin.site.urls)), )<|fim▁end|>
class TopicViewSet(viewsets.ModelViewSet): model = Topic serializer_class = TopicSerializer queryset = Topic.objects.all().filter(hide=False)
<|file_name|>ndvi.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # Version 0.1 # NDVI automated acquisition and calculation by Vladyslav Popov # Using landsat-util, source: https://github.com/developmentseed/landsat-util # Uses Amazon Web Services Public Dataset (Lansat 8) # Script should be run every day from os.path import join, abspath, dirname, exists import os import errno import shutil from tempfile import mkdtemp import subprocess import urllib2 import logging import sys import datetime import re from landsat.search import Search from landsat.ndvi import NDVIWithManualColorMap # Enable logging logging.basicConfig(stream=sys.stderr, level=logging.DEBUG) # Get current date current_date = datetime.datetime.now().date() print 'Current date is:', current_date # Let`s subtract 1 day from current date sub_date = current_date - datetime.timedelta(days=1) print 'Subtract date is:', sub_date # Scene search by date and WRS-2 row and path search = Search() try: search_results = search.search(paths_rows='177,025', start_date=sub_date, end_date=current_date) search_string = str(search_results.get('results')) search_list = re.compile('\w+').findall(search_string) scene_id = str(search_list.pop(5)) print scene_id l = len(scene_id) print l #exit if we have no current image except Exception: raise SystemExit('Closing...') # String concat for building Red Band URL for download url_red = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B4.TIF' # String concat for building NIR Band URL for download url_nir = 'http://landsat-pds.s3.amazonaws.com/L8/177/025/' + scene_id + '/' + scene_id + '_B5.TIF' # Build filenames for band rasters and output NDVI file red_file = scene_id + '_B4.TIF' nir_file = scene_id + '_B5.TIF' ndvi_file = scene_id + '_NDVI.TIF' print 'Filenames builded succsessfuly' # Create directories for future pssing base_dir = os.getcwd() temp_folder = join(base_dir, "temp_folder") scene_folder = join(temp_folder, scene_id) if not os.path.exists(temp_folder): os.makedirs(temp_folder) if not os.path.exists(scene_folder): os.makedirs(scene_folder) # Download section for Band 4 using urllib2 file_name = url_red.split('/')[-1] u = urllib2.urlopen(url_red) f = open("temp_folder/"+scene_id+"/"+file_name, 'wb') meta = u.info() file_size = int(meta.getheaders("Content-Length")[0]) print "Downloading: %s Bytes: %s" % (file_name, file_size) file_size_dl = 0 block_sz = 8192 while True: buffer = u.read(block_sz) if not buffer: break <|fim▁hole|> file_size_dl += len(buffer) f.write(buffer) status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size) status = status + chr(8)*(len(status)+1) print status, f.close() # Download section for Band 5 using urllib2 file_name = url_nir.split('/')[-1] u = urllib2.urlopen(url_nir) f = open("temp_folder/"+scene_id+"/"+file_name, 'wb') meta = u.info() file_size = int(meta.getheaders("Content-Length")[0]) print "Downloading: %s Bytes: %s" % (file_name, file_size) file_size_dl = 0 block_sz = 8192 while True: buffer = u.read(block_sz) if not buffer: break file_size_dl += len(buffer) f.write(buffer) status = r"%10d [%3.2f%%]" % (file_size_dl, file_size_dl * 100. / file_size) status = status + chr(8)*(len(status)+1) print status, f.close() # NDVI processing # Lets create new instance of class nd = NDVIWithManualColorMap(path=temp_folder+"/"+scene_id, dst_path=temp_folder) # Start process print nd.run() # Create virtual dataset for deviding tiff into tiles subprocess.call(["gdalbuildvrt", "-a_srs", "EPSG:3857", "NDVImap.vrt", "temp_folder/"+scene_id+"/"+ndvi_file]) # Remove old tiles shutil.rmtree("ndvi_tiles", ignore_errors=True) # Start process of deviding with virtual dataset subprocess.call(["./gdal2tilesp.py", "-w", "none", "-s EPSG:3857", "-p", "mercator", "-z 8-12", "--format=PNG", "--processes=4", "-o", "tms", "NDVImap.vrt", "ndvi_tiles"]) # Let`s clean temporary files (bands, ndvi, vrt) shutil.rmtree("temp_folder", ignore_errors=True) os.remove("NDVImap.vrt") print 'All temporary data was succsessfully removed' # Close script raise SystemExit('Closing...')<|fim▁end|>
<|file_name|>complain.py<|end_file_name|><|fim▁begin|>"""User-friendly exception handler for swood.""" import http.client import traceback import sys import os __file__ = os.path.abspath(__file__) class ComplainToUser(Exception): """When used with ComplaintFormatter, tells the user what error (of theirs) caused the failure and exits.""" pass def can_submit(): if not os.path.isdir(os.path.expanduser("~/.swood")): os.mkdir(os.path.expanduser("~/.swood")) sbpath = os.path.expanduser("~/.swood/submit-bugs") if os.path.isfile(sbpath): try: with open(sbpath) as sb: resp = sb.read(1) if resp == "1": return 1 elif resp == "0": return 0 except: pass while True: resp = input( "Something went wrong. Do you want to send an anonymous bug report? (Type Y or N): ").lower() if resp in ("yes", "y", "true"): try: with open(sbpath, "w") as sb: sb.write("1") except: pass return 1 elif resp in ("no", "n", "false"): try: with open(sbpath, "w") as sb: sb.write("0") except: pass return 0 class ComplaintFormatter: """Notifies the user when the program fails predictably and uploads bug reports. When used in a with statement, ComplaintFormatter catches all exceptions. If the exception is a ComplainToUser exception, it will simply print the error message and exit (with an exit code of 1). If the exception is something else (i.e. an actual, unexpected exception), it will upload the traceback to the swood debug server (unless the user has opted out of sending bug reports.) """ def __init__(self, version=None): self.version = version def __enter__(self): pass def __exit__(self, exc_type, exc, tb): if isinstance(exc, ComplainToUser): print("Error: {}".format(exc), file=sys.stderr) sys.exit(1) elif isinstance(exc, Exception): # scrub stack of full path names for extra privacy # also normalizes the paths, helping to detect dupes scrubbed_stack = traceback.extract_tb(tb) # cut off traces of stuff that isn't ours others_cutoff = next(idx for idx, fs in enumerate(scrubbed_stack) if os.path.samefile( os.path.dirname(fs.filename), os.path.dirname(__file__))) scrubbed_stack = scrubbed_stack[others_cutoff:] # rewrite paths so they contain only relative directories # (hides username on Windows and Linux) dirstart = os.path.abspath( os.path.join(os.path.dirname(__file__), "..")) for fs in scrubbed_stack: fs.filename = os.path.relpath( fs.filename, start=dirstart).replace("\\", "/") str_tb = "Traceback (most recent call last):\n" + \ "".join(traceback.format_list(scrubbed_stack)) + \ "".join(traceback.format_exception_only(exc_type, exc)) if self.version is not None: str_tb = "# " + self.version + "\n" + str_tb if "--optout" in sys.argv or "-o" in sys.argv: print( "Something went wrong. A bug report will not be sent because of your command-line flag.", file=sys.stderr)<|fim▁hole|> elif os.environ.get("SWOOD_OPTOUT") == "1": print( "Something went wrong. A bug report will not be sent because of your environment variable.", file=sys.stderr) return False elif not can_submit(): print( "Something went wrong. A bug report will not be sent because of your config setting.", file=sys.stderr) return False else: print( "Something went wrong. A bug report will be sent to help figure it out. (see --optout)", file=sys.stderr) try: conn = http.client.HTTPSConnection("meme.institute") conn.request("POST", "/swood/bugs/submit", str_tb) resp = conn.getresponse().read().decode("utf-8") if resp == "done": print("New bug submitted!", file=sys.stderr) elif resp == "dupe": print( "This bug is already in the queue to be fixed.", file=sys.stderr) else: raise Exception except Exception: print("Submission of bug report failed.", file=sys.stderr) traceback.print_exc() return True<|fim▁end|>
return False
<|file_name|>NormalCurve.java<|end_file_name|><|fim▁begin|>/**************************************************** Statistics Online Computational Resource (SOCR) http://www.StatisticsResource.org All SOCR programs, materials, tools and resources are developed by and freely disseminated to the entire community. Users may revise, extend, redistribute, modify under the terms of the Lesser GNU General Public License as published by the Open Source Initiative http://opensource.org/licenses/. All efforts should be made to develop and distribute factually correct, useful, portable and extensible resource all available in all digital formats for free over the Internet. SOCR resources are distributed in the hope that they will be useful, but without any warranty; without any explicit, implicit or implied warranty for merchantability or fitness for a particular purpose. See the GNU Lesser General Public License for more details see http://opensource.org/licenses/lgpl-license.php. http://www.SOCR.ucla.edu http://wiki.stat.ucla.edu/socr It s Online, Therefore, It Exists! ****************************************************/ /* created by annie che 20060915. */ package edu.ucla.stat.SOCR.util; import java.awt.BasicStroke; import java.awt.Color; import java.awt.Graphics; import java.awt.Graphics2D; import java.util.ArrayList; import java.util.HashMap; import java.util.Iterator; import java.util.Set; import edu.ucla.stat.SOCR.analyses.result.NormalPowerResult; import edu.ucla.stat.SOCR.distributions.Domain; import edu.ucla.stat.SOCR.distributions.IntervalData; import edu.ucla.stat.SOCR.distributions.NormalDistribution; import edu.ucla.stat.SOCR.modeler.Modeler; import edu.ucla.stat.SOCR.modeler.gui.ModelerColor; /** * This class models an interactive histogram. The user can click on the horizontal axes to add points to the data set. */ public class NormalCurve extends ModelerHistogram { protected boolean drawData = false; protected double[] rawData = null; protected NormalDistribution dataDist = null; protected int[] freq = null; protected int sampleSize; protected Domain domain; protected IntervalData intervalData; protected double maxRelFreq = -1; protected Frequency frequency = null; protected HashMap map = null; private double mu0; private double muA; private double sigma; private double sampleSE; private double ciLeft; private double ciRight; private NormalDistribution normal0 = null; private NormalDistribution normalA = null; private boolean fillArea = true; private Color fillColor1 = Color.PINK; private Color fillColor2 = fillColor1.brighter(); private Color fillColor3 = Color.YELLOW; private Color fillColor4 = fillColor3.brighter(); private Color ciColor = Color.GREEN; private double xIntersect; private double yIntersect; private boolean useSampleMean = false; private String hypothesisType = null; private static byte NORMAL_CURVE_THICKNESS = 1; public NormalCurve(double a, double b, double w) { super(a, b, w); this.modelType = Modeler.CONTINUOUS_DISTRIBUTION_TYPE; setDrawUserClicks(false); } public NormalCurve() { super(); setDrawUserClicks(false); } /** * @param rawData the rawData to set * @uml.property name="rawData" */ public void setRawData(double[] input) { sampleSize = input.length; try { this.rawData = input; double dataMax = 0; try { dataMax = QSortAlgorithm.max(this.rawData); } catch (Exception e) { } double dataMin = 0; try { dataMin = QSortAlgorithm.min(this.rawData); } catch (Exception e) { } domain = new Domain(dataMin, dataMax, 1); intervalData = new IntervalData(domain, null); setIntervalData(intervalData); frequency = new Frequency(rawData); map = frequency.getMap(); frequency.computeFrequency(); maxRelFreq = frequency.getMaxRelFreq(); } catch (Exception e) { } } public void setRawDataDistribution(NormalDistribution normal) { this.dataDist = normal; } /** * @return the rawData * @uml.property name="rawData" */ public double[] getRawData() { return this.rawData; } public void paintComponent(Graphics g) { super.paintComponent(g); Graphics2D G2 = (Graphics2D) g; G2.setStroke(new BasicStroke(NORMAL_CURVE_THICKNESS)); // how thick the pen it. float[] HSBVal = new float[3]; double x = 0; double width = .5; double d = 1; try { if (rawData.length > 0) { Set keySet = map.keySet(); Iterator iterator = keySet.iterator(); int freqSize = map.size(); int dataCount = -1; String id = null; while (iterator.hasNext()) { id = (String)iterator.next(); dataCount = ((Integer)map.get(id)).intValue();; x = Double.parseDouble(id); double dataCountDouble = dataCount; double sampleSizeDouble = sampleSize; d = dataCountDouble/sampleSizeDouble; g.setColor(Color.PINK); drawBox(g, x - width / 2, 0, x + width / 2, d); g.setColor(Color.PINK); fillBox(g, x - width / 2, 0, x + width / 2, d); } } } catch (Exception e) { } if (modelX1 != null && modelX1.length > 0 && modelX2 != null && modelX2.length > 0) { double maxXVal1 = modelX1[0], maxYVal1 = modelY1[0]; int terms1 = modelY1.length; // how many dx. double maxXVal2 = modelX2[0], maxYVal2 = modelY1[0]; int term2s = modelY2.length; // how many dx. double xa, ya; int subLth = 0; subLth = (int) (modelX1.length / modelCount); double x1 = 0; double y1 = 0; //for (int j = 0; j < modelCount; j++) { int j = 0; x1 = (double) modelX1[0 + j * subLth]; y1 = (double) modelY1[0 + j * subLth]; for (int i = 1; i < subLth; i++) { xa = modelX1[i + j * subLth]; ya = modelY1[i + j * subLth]; x1 = xa; y1 = ya; } G2.setStroke(new BasicStroke(NORMAL_CURVE_THICKNESS)); G2.setColor(this.getOutlineColor2()); x1 = (double) modelX2[0 + j * subLth]; y1 = (double) modelY2[0 + j * subLth]; for (int i = 1; i < subLth; i++) { xa = modelX2[i + j * subLth]; ya = modelY2[i + j * subLth]; if (fillArea) { G2.setColor(Color.YELLOW); G2.setStroke(new BasicStroke(3f)); if (hypothesisType.equalsIgnoreCase(NormalPowerResult.HYPOTHESIS_TYPE_GT) && (x1 > ciRight)) { //fillBox(G2, x1, normal0.getDensity(xa), xa, normalA.getDensity(xa) && (x1 < ciLeft)) { fillBox(G2, x1, 0, xa, normalA.getDensity(xa)); } else if (hypothesisType.equalsIgnoreCase(NormalPowerResult.HYPOTHESIS_TYPE_LT)&& (x1 < ciLeft)) { //fillBox(G2, x1, normal0.getDensity(xa), xa, normalA.getDensity(xa)); fillBox(G2, x1, 0, xa, normalA.getDensity(xa)); } else if (hypothesisType.equalsIgnoreCase(NormalPowerResult.HYPOTHESIS_TYPE_NE)&& (((x1 > ciRight)) || (x1 < ciLeft))) { //fillBox(G2, x1, normal0.getDensity(xa), xa, normalA.getDensity(xa)); fillBox(G2, x1, 0, xa, normalA.getDensity(xa)); } } x1 = xa; y1 = ya; } // model curve G2.setStroke(new BasicStroke(NORMAL_CURVE_THICKNESS)); G2.setColor(this.getOutlineColor1()); x1 = (double) modelX1[0 + j * subLth]; y1 = (double) modelY1[0 + j * subLth]; ////////////////////System.outprintln("NormalCurve mu0 = " + mu0); for (int i = 1; i < subLth; i++) { xa = modelX1[i + j * subLth]; ya = modelY1[i + j * subLth]; //if (x1 < mu0 + 5 * sigma || x1 > mu0 - 5 * sigma) drawLine(G2, x1, y1, xa, ya); // when modelCount == any # x1 = xa; y1 = ya; } subLth = (int) (modelX2.length / modelCount); // model curve G2.setStroke(new BasicStroke(NORMAL_CURVE_THICKNESS)); G2.setColor(this.getOutlineColor2()); x1 = (double) modelX2[0 + j * subLth]; y1 = (double) modelY2[0 + j * subLth]; ////////////////////System.outprintln("NormalCurve muA = " + muA); for (int i = 1; i < subLth; i++) { xa = modelX2[i + j * subLth]; ya = modelY2[i + j * subLth]; //if (x1 < muA + 5 * sigma || x1 > muA - 5 * sigma) drawLine(G2, x1, y1, xa, ya); // when modelCount == any # x1 = xa; y1 = ya; } } // draw it second time (looks nicer) G2.setStroke(new BasicStroke(NORMAL_CURVE_THICKNESS)); g.setColor(Color.BLACK); super.drawAxis(g, -yMax, yMax, 0.1 * yMax, xMin, VERTICAL); // 6 args super.drawAxis(g, xMin, xMax, (xMax - xMin) / 10, 0, HORIZONTAL, axisType, listOfTicks); // c must be 0. } protected void drawAxisWithDomain(Graphics g, Domain domain, double c, int orientation, int type, ArrayList list){ double t; double currentUpperBound = domain.getUpperBound(); // of the model (distribution) double currentLowerBound = domain.getLowerBound(); int domainSize = domain.getSize(); if (orientation == HORIZONTAL){ this.drawLine(g, currentLowerBound, c, currentUpperBound, c); //Draw tick marks, depending on type for (int i = 0; i < domainSize; i++){ if (type == MIDPOINTS) { t = domain.getValue(i); } else { t = domain.getBound(i); } g.setColor(ModelerColor.HISTOGRAM_TICKMARK); //g.setStroke(new BasicStroke(3.05f)); //drawTick(g, t, c, VERTICAL); } if (type == BOUNDS) { t = domain.getUpperBound(); drawTick(g, t, c, VERTICAL); } //Draw labels if (type == MIDPOINTS) { t = domain.getLowerValue(); } else { t = domain.getLowerBound(); } drawLabel(g, format(t), t, c, BELOW); if (type == MIDPOINTS) { t = domain.getUpperValue(); } else { t = domain.getUpperBound(); } drawLabel(g, format(t), t, c, BELOW); //double mu0 = 0; //double muA = 0; //double sigma = 0; //ciLeft = 0; //ciRight = 0; //double sampleSE = 0; //NormalDistribution normal0 = null; //NormalDistribution normalA = null; if (list != null) { //for (int i = 0; i < list.size(); i++) { try { mu0 = Double.parseDouble(((String)list.get(0))); muA = Double.parseDouble(((String)list.get(1))); sigma = Double.parseDouble(((String)list.get(2))); ////////////System.outprintln("NormalCurve mu0 = " + mu0); ////////////System.outprintln("NormalCurve muA = " + muA); //sampleSE = Double.parseDouble(((String)list.get(3))); normal0 = new NormalDistribution(mu0, sigma); normalA = new NormalDistribution(muA, sigma); //ciLeft = mu0 - 1.96 * sigma; //ciRight = mu0 + 1.96 * sigma; //t = Double.parseDouble(((String)list.get(i))); drawLabel(g, format(mu0), mu0, c, BELOW); drawLabel(g, format(muA), muA, c, BELOW); Color oldColor = g.getColor(); g.setColor(this.getOutlineColor1()); drawLine(g, mu0, 0, mu0, normal0.getMaxDensity()); //drawLine(g, ciLeft, 0, ciLeft, normal0.getDensity(ciLeft)); //drawLine(g, ciRight, 0, ciRight, normal0.getDensity(ciRight)); g.setColor(this.getOutlineColor2()); drawLine(g, muA, 0, muA, normalA.getMaxDensity()); double density = 0; if (hypothesisType == null) hypothesisType = (String)list.get(5); ////////System.outprintln("NormalCurve hypothesisType = " + hypothesisType); if (hypothesisType.equalsIgnoreCase(NormalPowerResult.HYPOTHESIS_TYPE_NE)){ try { g.setColor(this.getOutlineColor1()); ciLeft = Double.parseDouble(((String)list.get(3))); //drawLabel(g, format(ciLeft), ciLeft, c, BELOW); density = Math.max(normal0.getDensity(ciLeft), normalA.getDensity(ciLeft)); g.setColor(ciColor); drawLine(g, ciLeft, 0, ciLeft, density); g.setColor(this.getOutlineColor1()); //hypothesisType = (String)list.get(5); //////////////System.outprintln("NormalCurve ciLeft = " + ciLeft + " density = " + density); } catch (Exception e) { //////////////System.outprintln("NormalCurve e = " + e); } try { g.setColor(this.getOutlineColor1()); ciRight = Double.parseDouble(((String)list.get(4))); //drawLabel(g, format(ciLeft), ciLeft, c, BELOW); density = Math.max(normal0.getDensity(ciRight), normalA.getDensity(ciRight)); g.setColor(ciColor); drawLine(g, ciRight, 0, ciRight, density); g.setColor(this.getOutlineColor1()); //hypothesisType = (String)list.get(5); //////////////System.outprintln("NormalCurve ciRight = " + ciRight + " density = " + density); } catch (Exception e) { //////////////System.outprintln("NormalCurve e = " + e); } } else if (muA < mu0) { hypothesisType = NormalPowerResult.HYPOTHESIS_TYPE_LT; } else if (muA > mu0) { hypothesisType = NormalPowerResult.HYPOTHESIS_TYPE_GT; } if (hypothesisType.equalsIgnoreCase(NormalPowerResult.HYPOTHESIS_TYPE_LT)) { try { g.setColor(this.getOutlineColor1()); ciLeft = Double.parseDouble(((String)list.get(3))); //drawLabel(g, format(ciLeft), ciLeft, c, BELOW); density = Math.max(normal0.getDensity(ciLeft), normalA.getDensity(ciLeft)); g.setColor(ciColor); drawLine(g, ciLeft, 0, ciLeft, density); g.setColor(this.getOutlineColor1()); //hypothesisType = (String)list.get(5); ////////System.outprintln("NormalCurve ciLeft = " + ciLeft + " density = " + density); } catch (Exception e) { ////////System.outprintln("NormalCurve Exception e = " + e); } } else if (hypothesisType.equalsIgnoreCase(NormalPowerResult.HYPOTHESIS_TYPE_GT)) { try { g.setColor(this.getOutlineColor1()); ciRight = Double.parseDouble(((String)list.get(4))); //drawLabel(g, format(ciLeft), ciLeft, c, BELOW); density = Math.max(normal0.getDensity(ciRight), normalA.getDensity(ciRight)); g.setColor(ciColor); drawLine(g, ciRight, 0, ciRight, density); g.setColor(this.getOutlineColor1()); //hypothesisType = (String)list.get(5); ////////System.outprintln("NormalCurve ciRight = " + ciRight + " density = " + density); } catch (Exception e) { ////////System.outprintln("NormalCurve Exception e = " + e); } } ////////////System.outprintln("NormalCurve hypothesisType = " + hypothesisType); double x1 = 0, y1 = 0, xa = 0; g.setColor(oldColor); } catch (Exception e) { //////////////System.outprintln("NormalCurve last e = " + e); } //} } } else{ //Draw thte line drawLine(g, c, domain.getLowerBound(), c, domain.getUpperBound()); //drawLine(g, c, -10, c, 10); //Draw tick marks, depending on type for (int i = 0; i < domain.getSize(); i++){ if (type == MIDPOINTS) t = domain.getValue(i); else t = domain.getBound(i); //drawTick(g, c, t, HORIZONTAL); } if (type == BOUNDS) drawTick(g, c, domain.getUpperBound(), HORIZONTAL); //Draw labels if (type == MIDPOINTS) t = domain.getLowerValue(); else t = domain.getLowerBound(); g.setColor(ModelerColor.HISTOGRAM_LABEL); drawLabel(g, format(t), c, t, LEFT); if (type == MIDPOINTS) t = domain.getUpperValue(); else t = domain.getUpperBound(); drawLabel(g, format(t), c, t, LEFT); } int sum = Math.abs(currentXUpperBound) + Math.abs(currentXLowerBound); // int diff = Math.abs(currentXUpperBound) - Math.abs(currentXLowerBound); // } /** * @return the maxRelFreq * @uml.property name="maxRelFreq" */ public double getMaxRelFreq() { return this.maxRelFreq; } /** * @param fillArea the fillArea to set * @uml.property name="fillArea" */ public void setFillArea(boolean fillArea) { this.fillArea = fillArea; } /* private void findIntersection(double[] x1, double[] y1, double[] x2, double[] y2) { double numberTooSmall = 1E-10; boolean[] willUse = new boolean[x1.length]; for (int i = 0; i < x1.length; i++) { if (y1[i] < numberTooSmall || y2[i] < numberTooSmall) { willUse[i] = false; } else { willUse[i] = true; } } } */ public void setSampleMeanOption(boolean input) { this.useSampleMean = input; } public boolean withinSampleMeanCurve(double x, double y) {//, double scale) { double f = normalA.getDensity(x); //////////System.outprintln("NormailCurve x = " + x + ", y = " + y + ", f = " + f); <|fim▁hole|> return false; } } public void resetHypotheseType() { hypothesisType = null; } }<|fim▁end|>
if (f <= y && f >= 0.0001) { return true; } else {
<|file_name|>test_api.py<|end_file_name|><|fim▁begin|>""" This file is part of the tractor library. See LICENSE.txt for licensing, CONTRIBUTORS.txt for contributor information. Created on Jan 06, 2012. """ from pkg_resources import resource_filename # pylint: disable=E0611 from tractor import AttachmentWrapper from tractor import Base64Converter from tractor import TicketWrapper from tractor import create_wrapper_for_ticket_update from tractor import make_api from tractor import make_api_from_config from tractor.api import TractorApi from tractor.tests.base import BaseTestCase from tractor.ticket import ATTRIBUTE_NAMES from tractor.ticket import OwnerAttribute from tractor.ticket import RESOLUTION_ATTRIBUTE_VALUES from tractor.ticket import ReporterAttribute from tractor.ticket import STATUS_ATTRIBUTE_VALUES from xmlrpclib import Fault class TractorApiTestCase(BaseTestCase): def test_create_from_config(self): fn = resource_filename('tractor', 'tests/test_simple.ini') api = make_api_from_config(fn) self.assertTrue(isinstance(api, TractorApi)) def test_create_ticket(self): api = self.__create_api() t_wrapper = self.__create_ticket_wrapper() notify = False ticket_id = api.create_ticket(t_wrapper, notify) t_wrapper2 = self.__create_ticket_wrapper(summary='Test Ticket 2', description='Another Test Ticket.') ticket_id2 = api.create_ticket(t_wrapper2) self.assert_not_equal(ticket_id, ticket_id2) def test_get_ticket(self): api = self.__create_api() t_wrapper = self.__create_ticket_wrapper() ticket_id = api.create_ticket(t_wrapper) get_ticket = api.get_ticket(ticket_id) self.assert_equal(get_ticket.ticket_id, ticket_id) self.assert_is_not_none(get_ticket.time) self.assert_equal(get_ticket.changetime, get_ticket.time) for attr_name, attr_cls in ATTRIBUTE_NAMES.iteritems(): ori_value = getattr(t_wrapper, attr_name) get_value = getattr(get_ticket, attr_name) if attr_name == ReporterAttribute.NAME \ or attr_name == OwnerAttribute.NAME: # are set by the trac automatically self.assert_is_not_none(get_value) elif not ori_value is None: self.assert_equal(ori_value, get_value) elif attr_cls.IS_OPTIONAL or attr_cls.DEFAULT_VALUE is None: self.assert_is_none(get_value) else: self.assert_equal(get_value, attr_cls.DEFAULT_VALUE) def test_update_ticket(self): api = self.__create_api() t_wrapper = self.__create_ticket_wrapper() ticket_id = api.create_ticket(t_wrapper) self.assert_is_none(t_wrapper.milestone) update_wrapper = create_wrapper_for_ticket_update(ticket_id=ticket_id, milestone='milestone1') updated_ticket = api.update_ticket(update_wrapper) self.assert_equal(updated_ticket.milestone, 'milestone1') self.assert_is_not_none(updated_ticket.time) self.assert_is_not_none(updated_ticket.changetime) def test_assign_ticket(self): api = self.__create_api() t_wrapper = self.__create_ticket_wrapper() ticket_id = api.create_ticket(t_wrapper) self.assert_is_none(t_wrapper.owner) other_user = 'another user' assigned_ticket = api.assign_ticket(ticket_id, other_user) self.assert_is_not_none(assigned_ticket.time) self.assert_is_not_none(assigned_ticket.changetime) self.assert_equal(assigned_ticket.owner, other_user) def test_close_ticket(self): api = self.__create_api() t_wrapper = self.__create_ticket_wrapper() ticket_id = api.create_ticket(t_wrapper) self.assert_is_none(t_wrapper.resolution) self.assert_not_equal(t_wrapper.status, STATUS_ATTRIBUTE_VALUES.CLOSED) res_state = RESOLUTION_ATTRIBUTE_VALUES.WORKSFORME closed_ticket = api.close_ticket(ticket_id, res_state) self.assert_equal(closed_ticket.status, STATUS_ATTRIBUTE_VALUES.CLOSED) self.assert_equal(closed_ticket.resolution, res_state) def test_delete_ticket(self): api = self.__create_api() t_wrapper = self.__create_ticket_wrapper() ticket_id = api.create_ticket(t_wrapper) self.assert_true(api.delete_ticket(ticket_id)) self.assert_raises(Fault, api.get_ticket, ticket_id) def test_add_attachment(self): api = self.__create_api() t_wrapper = self.__create_ticket_wrapper() ticket_id = api.create_ticket(t_wrapper) att = self.__create_attachment_wrapper() file_name1 = api.add_attachment(ticket_id, att, replace_existing=True) self.assert_equal(att.file_name, file_name1) file_name2 = api.add_attachment(ticket_id, att, replace_existing=True) self.assert_equal(file_name1, file_name2) file_name3 = api.add_attachment(ticket_id, att, replace_existing=False)<|fim▁hole|> def test_get_attachment(self): api = self.__create_api() t_wrapper = self.__create_ticket_wrapper() ticket_id = api.create_ticket(t_wrapper) att = self.__create_attachment_wrapper() file_name = api.add_attachment(ticket_id, att) binary_content = api.get_attachment(ticket_id, file_name) self.assert_is_not_none(binary_content) content = Base64Converter.decode_to_string(binary_content) self.assert_equal(content, att.content) def test_get_all_attachments(self): api = self.__create_api() t_wrapper = self.__create_ticket_wrapper() ticket_id = api.create_ticket(t_wrapper) att1 = self.__create_attachment_wrapper() fn1 = api.add_attachment(ticket_id, att1) att2 = self.__create_attachment_wrapper(file_name='other.txt', content='some content', description='Another attachment.') fn2 = api.add_attachment(ticket_id, att2) att_info = api.get_all_ticket_attachments(ticket_id) self.assert_equal(len(att_info), 2) for att in att_info: self.assert_is_not_none(att.author) self.assert_is_not_none(att.size) self.assert_is_not_none(att.time) self.assert_is_none(att.content) if att.file_name == fn2: self.assert_equal(att.description, 'Another attachment.') else: self.assert_equal(att.file_name, fn1) self.assert_equal(att.description, 'An arbitrary test file.') att_info2 = api.get_all_ticket_attachments(ticket_id, fetch_content=True) self.assert_equal(len(att_info2), 2) for att in att_info2: self.assert_is_not_none(att.author) self.assert_is_not_none(att.size) self.assert_is_not_none(att.time) self.assert_is_not_none(att.content) if att.file_name == fn2: self.assert_equal(att.description, 'Another attachment.') else: self.assert_equal(att.file_name, 'test_file.txt') self.assert_equal(att.description, 'An arbitrary test file.') def delete_attachment(self): api = self.__create_api() t_wrapper = self.__create_ticket_wrapper() ticket_id = api.create_ticket(t_wrapper) att = self.__create_attachment_wrapper() file_name = api.add_attachment(ticket_id, att) self.assert_true(api.delete_attachment(ticket_id, file_name)) self.assert_raises(Fault, api.get_attachment, *(ticket_id, file_name)) def test_ticket_id_and_att_file_name_not_none(self): api = self.__create_api() t_wrapper = self.__create_ticket_wrapper() ticket_id = api.create_ticket(t_wrapper) self.assert_raises(ValueError, api.get_ticket, None) self.assert_raises(ValueError, api.update_ticket, TicketWrapper()) self.assert_raises(ValueError, api.assign_ticket, *(None, 'user1')) self.assert_raises(ValueError, api.close_ticket, *(None, 'closed')) self.assert_raises(ValueError, api.delete_ticket, None) att = self.__create_attachment_wrapper() self.assert_raises(ValueError, api.add_attachment, *(None, att)) fn = api.add_attachment(ticket_id, att) self.assert_raises(ValueError, api.get_attachment, *(None, fn)) self.assert_raises(ValueError, api.get_all_ticket_attachments, None) self.assert_raises(ValueError, api.delete_attachment, *(None, fn)) self.assert_raises(ValueError, api.get_attachment, *(ticket_id, None)) self.assert_raises(ValueError, api.delete_attachment, *(ticket_id, None)) def __create_api(self, **kw): if not 'username' in kw: kw['username'] = 'test_user' if not 'password' in kw: kw['password'] = 'password' if not 'realm' in kw: kw['realm'] = 'http://mycompany.com/mytrac/login/xmlrpc' if not 'load_dummy' in kw: kw['load_dummy'] = True return make_api(**kw) def __create_ticket_wrapper(self, **kw): if not 'summary' in kw: kw['summary'] = 'Test Ticket' if not 'description' in kw: kw['description'] = 'A standard Test Ticket.' return TicketWrapper(**kw) def __create_attachment_wrapper(self, **kw): if not 'content' in kw: kw['content'] = 'This is a test attachment.' if not 'file_name' in kw: kw['file_name'] = 'test_file.txt' if not 'description' in kw: kw['description'] = 'An arbitrary test file.' return AttachmentWrapper(**kw)<|fim▁end|>
self.assert_not_equal(file_name1, file_name3)
<|file_name|>hill-sphere.js<|end_file_name|><|fim▁begin|>/** * Calculates the radius of the Hill Sphere, * for a body with mass `m1` * @param {Number} m1 Mass of the lighter body * @param {Number} m2 Mass of the heavier body * @param {Number} a Semi-major axis * @param {Number} e Eccentricity * @return {Number} Hill Sphere radius */ function hillSphere( m1, m2, a, e ) { return a * ( 1 - e ) * Math.pow( ( m1 / ( 3 * m2 ) ), 1/3 ) } <|fim▁hole|>module.exports = hillSphere<|fim▁end|>
<|file_name|>0011_auto_20150618_0003.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- from __future__ import unicode_literals from django.db import models, migrations class Migration(migrations.Migration): dependencies = [ ('orchestra', '0010_merge'),<|fim▁hole|> operations = [ migrations.AlterField( model_name='project', name='process_slug', field=models.CharField(max_length=200, choices=[('website_enhancement_experiment', 'Website Enhancement Experiment'), ('website_enhancement', 'Website Enhancement'), ('doctors', 'Doctors Process')]), ), migrations.AlterField( model_name='task', name='step_slug', field=models.CharField(max_length=200, choices=[('website_enhancement_experiment', 'Website Enhancement'), ('website_enhancement', 'Website Enhancement'), ('export', 'Export'), ('design', 'Design'), ('content_extraction', ' Content Extraction')]), ), migrations.AlterField( model_name='taskassignment', name='status', field=models.IntegerField(choices=[(0, 'Processing'), (1, 'Submitted')]), ), migrations.AlterField( model_name='taskassignment', name='worker', field=models.ForeignKey(blank=True, null=True, to='orchestra.Worker'), ), ]<|fim▁end|>
]
<|file_name|>gecko.mako.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ // `data` comes from components/style/properties.mako.rs; see build.rs for more details. <%! from data import to_rust_ident, to_camel_case from data import Keyword %> <%namespace name="helpers" file="/helpers.mako.rs" /> use app_units::Au; use cssparser::Color; use custom_properties::ComputedValuesMap; use gecko_bindings::bindings; % for style_struct in data.style_structs: use gecko_bindings::structs::${style_struct.gecko_ffi_name}; use gecko_bindings::bindings::Gecko_Construct_Default_${style_struct.gecko_ffi_name}; use gecko_bindings::bindings::Gecko_CopyConstruct_${style_struct.gecko_ffi_name}; use gecko_bindings::bindings::Gecko_Destroy_${style_struct.gecko_ffi_name}; % endfor use gecko_bindings::bindings::Gecko_Construct_nsStyleVariables; use gecko_bindings::bindings::Gecko_CopyCursorArrayFrom; use gecko_bindings::bindings::Gecko_CopyFontFamilyFrom; use gecko_bindings::bindings::Gecko_CopyImageValueFrom; use gecko_bindings::bindings::Gecko_CopyListStyleImageFrom; use gecko_bindings::bindings::Gecko_CopyListStyleTypeFrom; use gecko_bindings::bindings::Gecko_Destroy_nsStyleVariables; use gecko_bindings::bindings::Gecko_EnsureImageLayersLength; use gecko_bindings::bindings::Gecko_FontFamilyList_AppendGeneric; use gecko_bindings::bindings::Gecko_FontFamilyList_AppendNamed; use gecko_bindings::bindings::Gecko_FontFamilyList_Clear; use gecko_bindings::bindings::Gecko_SetCursorArrayLength; use gecko_bindings::bindings::Gecko_SetCursorImage; use gecko_bindings::bindings::Gecko_NewCSSShadowArray; use gecko_bindings::bindings::Gecko_nsStyleFont_SetLang; use gecko_bindings::bindings::Gecko_nsStyleFont_CopyLangFrom; use gecko_bindings::bindings::Gecko_SetListStyleImage; use gecko_bindings::bindings::Gecko_SetListStyleImageNone; use gecko_bindings::bindings::Gecko_SetListStyleType; use gecko_bindings::bindings::Gecko_SetNullImageValue; use gecko_bindings::bindings::ServoComputedValuesBorrowedOrNull; use gecko_bindings::bindings::{Gecko_ResetFilters, Gecko_CopyFiltersFrom}; use gecko_bindings::bindings::RawGeckoPresContextBorrowed; use gecko_bindings::structs::{self, StyleComplexColor}; use gecko_bindings::structs::nsStyleVariables; use gecko_bindings::sugar::ns_style_coord::{CoordDataValue, CoordData, CoordDataMut}; use gecko_bindings::sugar::ownership::HasArcFFI; use gecko::values::convert_nscolor_to_rgba; use gecko::values::convert_rgba_to_nscolor; use gecko::values::GeckoStyleCoordConvertible; use gecko::values::round_border_to_device_pixels; use logical_geometry::WritingMode; use properties::longhands; use properties::{Importance, LonghandId}; use properties::{PropertyDeclaration, PropertyDeclarationBlock, PropertyDeclarationId}; use std::fmt::{self, Debug}; use std::mem::{forget, transmute, zeroed}; use std::ptr; use std::sync::Arc; use std::cmp; use values::computed::ToComputedValue; use values::{Either, Auto}; use computed_values::border_style; pub mod style_structs { % for style_struct in data.style_structs: pub use super::${style_struct.gecko_struct_name} as ${style_struct.name}; % endfor } #[derive(Clone, Debug)] pub struct ComputedValues { % for style_struct in data.style_structs: ${style_struct.ident}: Arc<style_structs::${style_struct.name}>, % endfor custom_properties: Option<Arc<ComputedValuesMap>>, shareable: bool, pub writing_mode: WritingMode, pub root_font_size: Au, } impl ComputedValues { pub fn inherit_from(parent: &Self, default: &Self) -> Arc<Self> { Arc::new(ComputedValues { custom_properties: parent.custom_properties.clone(), shareable: parent.shareable, writing_mode: parent.writing_mode, root_font_size: parent.root_font_size, % for style_struct in data.style_structs: % if style_struct.inherited: ${style_struct.ident}: parent.${style_struct.ident}.clone(), % else: ${style_struct.ident}: default.${style_struct.ident}.clone(), % endif % endfor }) } pub fn new(custom_properties: Option<Arc<ComputedValuesMap>>, shareable: bool, writing_mode: WritingMode, root_font_size: Au, % for style_struct in data.style_structs: ${style_struct.ident}: Arc<style_structs::${style_struct.name}>, % endfor ) -> Self { ComputedValues { custom_properties: custom_properties, shareable: shareable, writing_mode: writing_mode, root_font_size: root_font_size, % for style_struct in data.style_structs: ${style_struct.ident}: ${style_struct.ident}, % endfor } } pub fn default_values(pres_context: RawGeckoPresContextBorrowed) -> Arc<Self> { Arc::new(ComputedValues { custom_properties: None, shareable: true, writing_mode: WritingMode::empty(), // FIXME(bz): This seems dubious root_font_size: longhands::font_size::get_initial_value(), // FIXME(bz): Also seems dubious? % for style_struct in data.style_structs: ${style_struct.ident}: style_structs::${style_struct.name}::default(pres_context), % endfor }) } #[inline] pub fn is_display_contents(&self) -> bool { self.get_box().clone_display() == longhands::display::computed_value::T::contents } % for style_struct in data.style_structs: #[inline] pub fn clone_${style_struct.name_lower}(&self) -> Arc<style_structs::${style_struct.name}> { self.${style_struct.ident}.clone() } #[inline] pub fn get_${style_struct.name_lower}(&self) -> &style_structs::${style_struct.name} { &self.${style_struct.ident} } #[inline] pub fn mutate_${style_struct.name_lower}(&mut self) -> &mut style_structs::${style_struct.name} { Arc::make_mut(&mut self.${style_struct.ident}) } % endfor pub fn custom_properties(&self) -> Option<Arc<ComputedValuesMap>> { self.custom_properties.as_ref().map(|x| x.clone()) } #[allow(non_snake_case)] pub fn has_moz_binding(&self) -> bool { !self.get_box().gecko.mBinding.mRawPtr.is_null() } // FIXME(bholley): Implement this properly. #[inline] pub fn is_multicol(&self) -> bool { false } pub fn to_declaration_block(&self, property: PropertyDeclarationId) -> PropertyDeclarationBlock { match property { % for prop in data.longhands: % if prop.animatable: PropertyDeclarationId::Longhand(LonghandId::${prop.camel_case}) => { PropertyDeclarationBlock::with_one( PropertyDeclaration::${prop.camel_case}( % if prop.boxed: Box::new( % endif longhands::${prop.ident}::SpecifiedValue::from_computed_value( &self.get_${prop.style_struct.ident.strip("_")}().clone_${prop.ident}()) % if prop.boxed: ) % endif ), Importance::Normal ) }, % endif % endfor PropertyDeclarationId::Custom(_name) => unimplemented!(), _ => unimplemented!() } } } <%def name="declare_style_struct(style_struct)"> pub struct ${style_struct.gecko_struct_name} { gecko: ${style_struct.gecko_ffi_name}, } </%def> <%def name="impl_simple_setter(ident, gecko_ffi_name)"> #[allow(non_snake_case)] pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) { ${set_gecko_property(gecko_ffi_name, "v")} } </%def> <%def name="impl_simple_clone(ident, gecko_ffi_name)"> #[allow(non_snake_case)] pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T { self.gecko.${gecko_ffi_name} } </%def> <%def name="impl_simple_copy(ident, gecko_ffi_name, *kwargs)"> #[allow(non_snake_case)] pub fn copy_${ident}_from(&mut self, other: &Self) { self.gecko.${gecko_ffi_name} = other.gecko.${gecko_ffi_name}; } </%def> <%def name="impl_coord_copy(ident, gecko_ffi_name)"> #[allow(non_snake_case)] pub fn copy_${ident}_from(&mut self, other: &Self) { self.gecko.${gecko_ffi_name}.copy_from(&other.gecko.${gecko_ffi_name}); } </%def> <%! def get_gecko_property(ffi_name, self_param = "self"): if "mBorderColor" in ffi_name: return ffi_name.replace("mBorderColor", "unsafe { *%s.gecko.__bindgen_anon_1.mBorderColor.as_ref() }" % self_param) return "%s.gecko.%s" % (self_param, ffi_name) def set_gecko_property(ffi_name, expr): if "mBorderColor" in ffi_name: ffi_name = ffi_name.replace("mBorderColor", "*self.gecko.__bindgen_anon_1.mBorderColor.as_mut()") return "unsafe { %s = %s };" % (ffi_name, expr) return "self.gecko.%s = %s;" % (ffi_name, expr) %> <%def name="impl_keyword_setter(ident, gecko_ffi_name, keyword, cast_type='u8')"> #[allow(non_snake_case)] pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) { use properties::longhands::${ident}::computed_value::T as Keyword; // FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts let result = match v { % for value in keyword.values_for('gecko'): Keyword::${to_rust_ident(value)} => structs::${keyword.gecko_constant(value)} ${keyword.maybe_cast(cast_type)}, % endfor }; ${set_gecko_property(gecko_ffi_name, "result")} } </%def> <%def name="impl_keyword_clone(ident, gecko_ffi_name, keyword)"> #[allow(non_snake_case)] pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T { use properties::longhands::${ident}::computed_value::T as Keyword; // FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts match ${get_gecko_property(gecko_ffi_name)} ${keyword.maybe_cast("u32")} { % for value in keyword.values_for('gecko'): structs::${keyword.gecko_constant(value)} => Keyword::${to_rust_ident(value)}, % endfor % if keyword.gecko_inexhaustive: x => panic!("Found unexpected value in style struct for ${ident} property: {:?}", x), % endif } } </%def> /// Convert a Servo color into an nscolor; with currentColor as 0 /// /// Call sites will need to be updated after https://bugzilla.mozilla.org/show_bug.cgi?id=760345 fn color_to_nscolor_zero_currentcolor(color: Color) -> structs::nscolor { match color { Color::RGBA(rgba) => { convert_rgba_to_nscolor(&rgba) }, Color::CurrentColor => 0, } } <%def name="impl_color_setter(ident, gecko_ffi_name, complex_color=True)"> #[allow(unreachable_code)] #[allow(non_snake_case)] pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) { % if complex_color: let result = v.into(); % else: let result = color_to_nscolor_zero_currentcolor(v); % endif ${set_gecko_property(gecko_ffi_name, "result")} } </%def> <%def name="impl_color_copy(ident, gecko_ffi_name, complex_color=True)"> #[allow(non_snake_case)] pub fn copy_${ident}_from(&mut self, other: &Self) { let color = ${get_gecko_property(gecko_ffi_name, self_param = "other")}; ${set_gecko_property(gecko_ffi_name, "color")}; } </%def> <%def name="impl_color_clone(ident, gecko_ffi_name, complex_color=True)"> #[allow(non_snake_case)] pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T { % if complex_color: ${get_gecko_property(gecko_ffi_name)}.into() % else: Color::RGBA(convert_nscolor_to_rgba(${get_gecko_property(gecko_ffi_name)})) % endif } </%def> <%def name="impl_keyword(ident, gecko_ffi_name, keyword, need_clone, **kwargs)"> <%call expr="impl_keyword_setter(ident, gecko_ffi_name, keyword, **kwargs)"></%call> <%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call> %if need_clone: <%call expr="impl_keyword_clone(ident, gecko_ffi_name, keyword)"></%call> % endif </%def> <%def name="impl_simple(ident, gecko_ffi_name, need_clone=False)"> <%call expr="impl_simple_setter(ident, gecko_ffi_name)"></%call> <%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call> % if need_clone: <%call expr="impl_simple_clone(ident, gecko_ffi_name)"></%call> % endif </%def> <%def name="impl_absolute_length(ident, gecko_ffi_name, need_clone=False)"> #[allow(non_snake_case)] pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) { ${set_gecko_property(gecko_ffi_name, "v.0")} } <%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call> % if need_clone: #[allow(non_snake_case)] pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T { Au(self.gecko.${gecko_ffi_name}) } % endif </%def> <%def name="impl_position(ident, gecko_ffi_name, need_clone=False)"> #[allow(non_snake_case)] pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) { ${set_gecko_property("%s.mXPosition" % gecko_ffi_name, "v.horizontal.into()")} ${set_gecko_property("%s.mYPosition" % gecko_ffi_name, "v.vertical.into()")} } <%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call> % if need_clone: #[allow(non_snake_case)] pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T { use values::computed::Position; Position { horizontal: self.gecko.${gecko_ffi_name}.mXPosition.into(), vertical: self.gecko.${gecko_ffi_name}.mYPosition.into(), } } % endif </%def> <%def name="impl_color(ident, gecko_ffi_name, need_clone=False, complex_color=True)"> <%call expr="impl_color_setter(ident, gecko_ffi_name, complex_color)"></%call> <%call expr="impl_color_copy(ident, gecko_ffi_name, complex_color)"></%call> % if need_clone: <%call expr="impl_color_clone(ident, gecko_ffi_name, complex_color)"></%call> % endif </%def> <%def name="impl_svg_paint(ident, gecko_ffi_name, need_clone=False, complex_color=True)"> #[allow(non_snake_case)] pub fn set_${ident}(&mut self, mut v: longhands::${ident}::computed_value::T) { use values::computed::SVGPaintKind; use self::structs::nsStyleSVGPaintType; let ref mut paint = ${get_gecko_property(gecko_ffi_name)}; unsafe { bindings::Gecko_nsStyleSVGPaint_Reset(paint); } let fallback = v.fallback.take(); match v.kind { SVGPaintKind::None => return, SVGPaintKind::ContextFill => { paint.mType = nsStyleSVGPaintType::eStyleSVGPaintType_ContextFill; } SVGPaintKind::ContextStroke => { paint.mType = nsStyleSVGPaintType::eStyleSVGPaintType_ContextStroke; } SVGPaintKind::PaintServer(url) => { unsafe { bindings::Gecko_nsStyleSVGPaint_SetURLValue(paint, url.for_ffi()); } } SVGPaintKind::Color(color) => { paint.mType = nsStyleSVGPaintType::eStyleSVGPaintType_Color; unsafe { *paint.mPaint.mColor.as_mut() = color_to_nscolor_zero_currentcolor(color); } } } if let Some(fallback) = fallback { paint.mFallbackColor = color_to_nscolor_zero_currentcolor(fallback); } } #[allow(non_snake_case)] pub fn copy_${ident}_from(&mut self, other: &Self) { unsafe { bindings::Gecko_nsStyleSVGPaint_CopyFrom( &mut ${get_gecko_property(gecko_ffi_name)}, & ${get_gecko_property(gecko_ffi_name, "other")} ); } } </%def> <%def name="impl_app_units(ident, gecko_ffi_name, need_clone, round_to_pixels=False)"> #[allow(non_snake_case)] pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) { % if round_to_pixels: let au_per_device_px = Au(self.gecko.mTwipsPerPixel); self.gecko.${gecko_ffi_name} = round_border_to_device_pixels(v, au_per_device_px).0; % else: self.gecko.${gecko_ffi_name} = v.0; % endif } <%call expr="impl_simple_copy(ident, gecko_ffi_name)"></%call> %if need_clone: #[allow(non_snake_case)] pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T { Au(self.gecko.${gecko_ffi_name}) } % endif </%def> <%def name="impl_split_style_coord(ident, gecko_ffi_name, index, need_clone=False)"> #[allow(non_snake_case)] pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) { v.to_gecko_style_coord(&mut self.gecko.${gecko_ffi_name}.data_at_mut(${index})); } #[allow(non_snake_case)] pub fn copy_${ident}_from(&mut self, other: &Self) { self.gecko.${gecko_ffi_name}.data_at_mut(${index}).copy_from(&other.gecko.${gecko_ffi_name}.data_at(${index})); } % if need_clone: #[allow(non_snake_case)] pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T { use properties::longhands::${ident}::computed_value::T; T::from_gecko_style_coord(&self.gecko.${gecko_ffi_name}.data_at(${index})) .expect("clone for ${ident} failed") } % endif </%def> <%def name="impl_style_coord(ident, gecko_ffi_name, need_clone=False)"> #[allow(non_snake_case)] pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) { v.to_gecko_style_coord(&mut self.gecko.${gecko_ffi_name}); } #[allow(non_snake_case)] pub fn copy_${ident}_from(&mut self, other: &Self) { self.gecko.${gecko_ffi_name}.copy_from(&other.gecko.${gecko_ffi_name}); } % if need_clone: #[allow(non_snake_case)] pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T { use properties::longhands::${ident}::computed_value::T; T::from_gecko_style_coord(&self.gecko.${gecko_ffi_name}) .expect("clone for ${ident} failed") } % endif </%def> <%def name="impl_corner_style_coord(ident, gecko_ffi_name, x_index, y_index, need_clone=False)"> #[allow(non_snake_case)] pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) { v.0.width.to_gecko_style_coord(&mut self.gecko.${gecko_ffi_name}.data_at_mut(${x_index})); v.0.height.to_gecko_style_coord(&mut self.gecko.${gecko_ffi_name}.data_at_mut(${y_index})); } #[allow(non_snake_case)] pub fn copy_${ident}_from(&mut self, other: &Self) { self.gecko.${gecko_ffi_name}.data_at_mut(${x_index}) .copy_from(&other.gecko.${gecko_ffi_name}.data_at(${x_index})); self.gecko.${gecko_ffi_name}.data_at_mut(${y_index}) .copy_from(&other.gecko.${gecko_ffi_name}.data_at(${y_index})); } % if need_clone: #[allow(non_snake_case)] pub fn clone_${ident}(&self) -> longhands::${ident}::computed_value::T { use properties::longhands::${ident}::computed_value::T; use euclid::Size2D; let width = GeckoStyleCoordConvertible::from_gecko_style_coord( &self.gecko.${gecko_ffi_name}.data_at(${x_index})) .expect("Failed to clone ${ident}"); let height = GeckoStyleCoordConvertible::from_gecko_style_coord( &self.gecko.${gecko_ffi_name}.data_at(${y_index})) .expect("Failed to clone ${ident}"); T(Size2D::new(width, height)) } % endif </%def> <%def name="impl_css_url(ident, gecko_ffi_name, need_clone=False, only_resolved=False)"> #[allow(non_snake_case)] pub fn set_${ident}(&mut self, v: longhands::${ident}::computed_value::T) { use gecko_bindings::sugar::refptr::RefPtr; match v { Either::First(url) => { let refptr = unsafe { % if only_resolved: // -moz-binding can't handle relative URIs if !url.has_resolved() { self.gecko.${gecko_ffi_name}.clear(); return; } % endif let ptr = bindings::Gecko_NewURLValue(url.for_ffi()); if ptr.is_null() { self.gecko.${gecko_ffi_name}.clear(); return; } RefPtr::from_addrefed(ptr) }; self.gecko.${gecko_ffi_name}.set_move(refptr) } Either::Second(_none) => { unsafe { self.gecko.${gecko_ffi_name}.clear(); } } } } #[allow(non_snake_case)] pub fn copy_${ident}_from(&mut self, other: &Self) { unsafe { self.gecko.${gecko_ffi_name}.set(&other.gecko.${gecko_ffi_name}); } } % if need_clone: <% raise Exception("Do not know how to handle clone ") %> % endif </%def> <%def name="impl_logical(name, need_clone=False, **kwargs)"> ${helpers.logical_setter(name, need_clone)} </%def> <%def name="impl_style_struct(style_struct)"> impl ${style_struct.gecko_struct_name} { #[allow(dead_code, unused_variables)] pub fn default(pres_context: RawGeckoPresContextBorrowed) -> Arc<Self> { let mut result = Arc::new(${style_struct.gecko_struct_name} { gecko: unsafe { zeroed() } }); unsafe { Gecko_Construct_Default_${style_struct.gecko_ffi_name}(&mut Arc::get_mut(&mut result).unwrap().gecko, pres_context); } result } pub fn get_gecko(&self) -> &${style_struct.gecko_ffi_name} { &self.gecko } } impl Drop for ${style_struct.gecko_struct_name} { fn drop(&mut self) { unsafe { Gecko_Destroy_${style_struct.gecko_ffi_name}(&mut self.gecko); } } } impl Clone for ${style_struct.gecko_struct_name} { fn clone(&self) -> Self { unsafe { let mut result = ${style_struct.gecko_struct_name} { gecko: zeroed() }; Gecko_CopyConstruct_${style_struct.gecko_ffi_name}(&mut result.gecko, &self.gecko); result } } } // FIXME(bholley): Make bindgen generate Debug for all types. %if style_struct.gecko_ffi_name in ("nsStyle" + x for x in "Border Display List Background Font SVGReset".split()): impl Debug for ${style_struct.gecko_struct_name} { // FIXME(bholley): Generate this. fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "Gecko style struct: ${style_struct.gecko_struct_name}") } } %else: impl Debug for ${style_struct.gecko_struct_name} { // FIXME(bholley): Generate this. fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.gecko.fmt(f) } } %endif </%def> <%def name="raw_impl_trait(style_struct, skip_longhands='', skip_additionals='')"> <% longhands = [x for x in style_struct.longhands if not (skip_longhands == "*" or x.name in skip_longhands.split())] # # Make a list of types we can't auto-generate. # force_stub = []; # These live in an nsFont member in Gecko. Should be straightforward to do manually. force_stub += ["font-variant"] # These have unusual representations in gecko. force_stub += ["list-style-type"] # Types used with predefined_type()-defined properties that we can auto-generate. predefined_types = { "length::LengthOrAuto": impl_style_coord, "length::LengthOrNormal": impl_style_coord, "Length": impl_absolute_length, "Position": impl_position, "LengthOrPercentage": impl_style_coord, "LengthOrPercentageOrAuto": impl_style_coord, "LengthOrPercentageOrNone": impl_style_coord, "LengthOrNone": impl_style_coord, "MaxLength": impl_style_coord, "MinLength": impl_style_coord, "Number": impl_simple, "Opacity": impl_simple, "CSSColor": impl_color, "SVGPaint": impl_svg_paint, "UrlOrNone": impl_css_url, } def longhand_method(longhand): args = dict(ident=longhand.ident, gecko_ffi_name=longhand.gecko_ffi_name, need_clone=longhand.need_clone) # get the method and pass additional keyword or type-specific arguments if longhand.logical: method = impl_logical args.update(name=longhand.name) elif longhand.keyword: method = impl_keyword args.update(keyword=longhand.keyword) if "font" in longhand.ident: args.update(cast_type=longhand.cast_type) else: method = predefined_types[longhand.predefined_type] if longhand.predefined_type in ["CSSColor"]: args.update(complex_color=longhand.complex_color) method(**args) picked_longhands, stub_longhands = [], [] for x in longhands: if (x.keyword or x.predefined_type in predefined_types or x.logical) and x.name not in force_stub: picked_longhands.append(x) else: stub_longhands.append(x) # If one of the longhands is not handled # by either: # - being a keyword # - being a predefined longhand # - being a longhand with manual glue code (i.e. in skip_longhands) # - being generated as a stub # # then we raise an error here. # # If you hit this error, please add `product="servo"` to the longhand. # In case the longhand is used in a shorthand, add it to the force_stub # list above. for stub in stub_longhands: if stub.name not in force_stub: raise Exception("Don't know what to do with longhand %s in style struct %s" % (stub.name,style_struct. gecko_struct_name)) %> impl ${style_struct.gecko_struct_name} { /* * Manually-Implemented Methods. */ ${caller.body().strip()} /* * Auto-Generated Methods. */ <% for longhand in picked_longhands: longhand_method(longhand) %> /* * Stubs. */ % for longhand in stub_longhands: #[allow(non_snake_case)] pub fn set_${longhand.ident}(&mut self, _: longhands::${longhand.ident}::computed_value::T) { warn!("stylo: Unimplemented property setter: ${longhand.name}"); } #[allow(non_snake_case)] pub fn copy_${longhand.ident}_from(&mut self, _: &Self) { warn!("stylo: Unimplemented property setter: ${longhand.name}"); } % if longhand.need_clone: #[allow(non_snake_case)] pub fn clone_${longhand.ident}(&self) -> longhands::${longhand.ident}::computed_value::T { unimplemented!() } % endif % if longhand.need_index: pub fn ${longhand.ident}_count(&self) -> usize { 0 } pub fn ${longhand.ident}_at(&self, _index: usize) -> longhands::${longhand.ident}::computed_value::SingleComputedValue { unimplemented!() } % endif % endfor <% additionals = [x for x in style_struct.additional_methods if skip_additionals != "*" and not x.name in skip_additionals.split()] %> % for additional in additionals: ${additional.stub()} % endfor } </%def> <% data.manual_style_structs = [] %> <%def name="impl_trait(style_struct_name, skip_longhands='', skip_additionals='')"> <%self:raw_impl_trait style_struct="${next(x for x in data.style_structs if x.name == style_struct_name)}" skip_longhands="${skip_longhands}" skip_additionals="${skip_additionals}"> ${caller.body()} </%self:raw_impl_trait> <% data.manual_style_structs.append(style_struct_name) %> </%def> <%! class Side(object): def __init__(self, name, index): self.name = name self.ident = name.lower() self.index = index class Corner(object): def __init__(self, vert, horiz, index): self.x_name = "HalfCorner::eCorner" + vert + horiz + "X" self.y_name = "HalfCorner::eCorner" + vert + horiz + "Y" self.ident = (vert + "_" + horiz).lower() self.x_index = 2 * index self.y_index = 2 * index + 1 class GridLine(object): def __init__(self, name): self.ident = "grid-" + name.lower() self.name = self.ident.replace('-', '_') self.gecko = "m" + to_camel_case(self.ident) SIDES = [Side("Top", 0), Side("Right", 1), Side("Bottom", 2), Side("Left", 3)] CORNERS = [Corner("Top", "Left", 0), Corner("Top", "Right", 1), Corner("Bottom", "Right", 2), Corner("Bottom", "Left", 3)] GRID_LINES = map(GridLine, ["row-start", "row-end", "column-start", "column-end"]) %> #[allow(dead_code)] fn static_assert() { unsafe { % for corner in CORNERS: transmute::<_, [u32; ${corner.x_index}]>([1; structs::${corner.x_name} as usize]); transmute::<_, [u32; ${corner.y_index}]>([1; structs::${corner.y_name} as usize]); % endfor } // Note: using the above technique with an enum hits a rust bug when |structs| is in a different crate. % for side in SIDES: { const DETAIL: u32 = [0][(structs::Side::eSide${side.name} as usize != ${side.index}) as usize]; let _ = DETAIL; } % endfor } <% border_style_keyword = Keyword("border-style", "none solid double dotted dashed hidden groove ridge inset outset") %> <% skip_border_longhands = " ".join(["border-{0}-{1}".format(x.ident, y) for x in SIDES for y in ["color", "style", "width"]] + ["border-{0}-radius".format(x.ident.replace("_", "-")) for x in CORNERS]) %> <%self:impl_trait style_struct_name="Border" skip_longhands="${skip_border_longhands} border-image-source border-image-outset border-image-repeat border-image-width border-image-slice" skip_additionals="*"> % for side in SIDES: <% impl_keyword("border_%s_style" % side.ident, "mBorderStyle[%s]" % side.index, border_style_keyword, need_clone=True) %> <% impl_color("border_%s_color" % side.ident, "(mBorderColor)[%s]" % side.index, need_clone=True) %> <% impl_app_units("border_%s_width" % side.ident, "mComputedBorder.%s" % side.ident, need_clone=True, round_to_pixels=True) %> pub fn border_${side.ident}_has_nonzero_width(&self) -> bool { self.gecko.mComputedBorder.${side.ident} != 0 } % endfor % for corner in CORNERS: <% impl_corner_style_coord("border_%s_radius" % corner.ident, "mBorderRadius", corner.x_index, corner.y_index, need_clone=True) %> % endfor pub fn set_border_image_source(&mut self, v: longhands::border_image_source::computed_value::T) { unsafe { // Prevent leaking of the last elements we did set Gecko_SetNullImageValue(&mut self.gecko.mBorderImageSource); } if let Some(image) = v.0 { // TODO: We need to make border-image-source match with background-image // until then we are setting with_url to false self.gecko.mBorderImageSource.set(image, false, &mut false) } } pub fn copy_border_image_source_from(&mut self, other: &Self) { unsafe { Gecko_CopyImageValueFrom(&mut self.gecko.mBorderImageSource, &other.gecko.mBorderImageSource); } } pub fn set_border_image_outset(&mut self, v: longhands::border_image_outset::computed_value::T) { % for side in SIDES: v.${side.index}.to_gecko_style_coord(&mut self.gecko.mBorderImageOutset .data_at_mut(${side.index})); % endfor } pub fn copy_border_image_outset_from(&mut self, other: &Self) { % for side in SIDES: self.gecko.mBorderImageOutset.data_at_mut(${side.index}) .copy_from(&other.gecko.mBorderImageOutset.data_at(${side.index})); % endfor } pub fn set_border_image_repeat(&mut self, v: longhands::border_image_repeat::computed_value::T) { use properties::longhands::border_image_repeat::computed_value::RepeatKeyword; use gecko_bindings::structs::{NS_STYLE_BORDER_IMAGE_REPEAT_STRETCH, NS_STYLE_BORDER_IMAGE_REPEAT_REPEAT}; use gecko_bindings::structs::{NS_STYLE_BORDER_IMAGE_REPEAT_ROUND, NS_STYLE_BORDER_IMAGE_REPEAT_SPACE}; % for i, side in enumerate(["H", "V"]): let k = match v.${i} { RepeatKeyword::Stretch => NS_STYLE_BORDER_IMAGE_REPEAT_STRETCH, RepeatKeyword::Repeat => NS_STYLE_BORDER_IMAGE_REPEAT_REPEAT, RepeatKeyword::Round => NS_STYLE_BORDER_IMAGE_REPEAT_ROUND, RepeatKeyword::Space => NS_STYLE_BORDER_IMAGE_REPEAT_SPACE, }; self.gecko.mBorderImageRepeat${side} = k as u8; % endfor } pub fn copy_border_image_repeat_from(&mut self, other: &Self) { self.gecko.mBorderImageRepeatH = other.gecko.mBorderImageRepeatH; self.gecko.mBorderImageRepeatV = other.gecko.mBorderImageRepeatV; } pub fn set_border_image_width(&mut self, v: longhands::border_image_width::computed_value::T) { use properties::longhands::border_image_width::computed_value::SingleComputedValue; % for side in SIDES: match v.${side.index} { SingleComputedValue::Auto => { self.gecko.mBorderImageWidth.data_at_mut(${side.index}).set_value(CoordDataValue::Auto) }, SingleComputedValue::LengthOrPercentage(l) => { l.to_gecko_style_coord(&mut self.gecko.mBorderImageWidth.data_at_mut(${side.index})) }, SingleComputedValue::Number(n) => { self.gecko.mBorderImageWidth.data_at_mut(${side.index}).set_value(CoordDataValue::Factor(n)) }, } % endfor } pub fn copy_border_image_width_from(&mut self, other: &Self) { % for side in SIDES: self.gecko.mBorderImageWidth.data_at_mut(${side.index}) .copy_from(&other.gecko.mBorderImageWidth.data_at(${side.index})); % endfor } pub fn set_border_image_slice(&mut self, v: longhands::border_image_slice::computed_value::T) { use gecko_bindings::structs::{NS_STYLE_BORDER_IMAGE_SLICE_NOFILL, NS_STYLE_BORDER_IMAGE_SLICE_FILL}; use properties::longhands::border_image_slice::computed_value::PercentageOrNumber; for (i, corner) in v.corners.iter().enumerate() { match *corner { PercentageOrNumber::Percentage(p) => { self.gecko.mBorderImageSlice.data_at_mut(i).set_value(CoordDataValue::Percent(p.0)) }, PercentageOrNumber::Number(n) => { self.gecko.mBorderImageSlice.data_at_mut(i).set_value(CoordDataValue::Factor(n)) }, } } let fill = if v.fill { NS_STYLE_BORDER_IMAGE_SLICE_FILL } else { NS_STYLE_BORDER_IMAGE_SLICE_NOFILL }; self.gecko.mBorderImageFill = fill as u8; } pub fn copy_border_image_slice_from(&mut self, other: &Self) { for i in 0..4 { self.gecko.mBorderImageSlice.data_at_mut(i) .copy_from(&other.gecko.mBorderImageSlice.data_at(i)); } self.gecko.mBorderImageFill = other.gecko.mBorderImageFill; } </%self:impl_trait> <% skip_margin_longhands = " ".join(["margin-%s" % x.ident for x in SIDES]) %> <%self:impl_trait style_struct_name="Margin" skip_longhands="${skip_margin_longhands}"> % for side in SIDES: <% impl_split_style_coord("margin_%s" % side.ident, "mMargin", side.index, need_clone=True) %> % endfor </%self:impl_trait> <% skip_padding_longhands = " ".join(["padding-%s" % x.ident for x in SIDES]) %> <%self:impl_trait style_struct_name="Padding" skip_longhands="${skip_padding_longhands}"> % for side in SIDES: <% impl_split_style_coord("padding_%s" % side.ident, "mPadding", side.index, need_clone=True) %> % endfor </%self:impl_trait> <% skip_position_longhands = " ".join(x.ident for x in SIDES + GRID_LINES) %> <%self:impl_trait style_struct_name="Position" skip_longhands="${skip_position_longhands} z-index box-sizing order align-content justify-content align-self justify-self align-items justify-items grid-auto-rows grid-auto-columns"> % for side in SIDES: <% impl_split_style_coord("%s" % side.ident, "mOffset", side.index, need_clone=True) %> % endfor pub fn set_z_index(&mut self, v: longhands::z_index::computed_value::T) { use properties::longhands::z_index::computed_value::T; match v { T::Auto => self.gecko.mZIndex.set_value(CoordDataValue::Auto), T::Number(n) => self.gecko.mZIndex.set_value(CoordDataValue::Integer(n)), } } pub fn copy_z_index_from(&mut self, other: &Self) { use gecko_bindings::structs::nsStyleUnit; // z-index is never a calc(). If it were, we'd be leaking here, so // assert that it isn't. debug_assert!(self.gecko.mZIndex.unit() != nsStyleUnit::eStyleUnit_Calc); unsafe { self.gecko.mZIndex.copy_from_unchecked(&other.gecko.mZIndex); } } pub fn clone_z_index(&self) -> longhands::z_index::computed_value::T { use properties::longhands::z_index::computed_value::T; return match self.gecko.mZIndex.as_value() { CoordDataValue::Auto => T::Auto, CoordDataValue::Integer(n) => T::Number(n), _ => { debug_assert!(false); T::Number(0) } } } pub fn set_align_content(&mut self, v: longhands::align_content::computed_value::T) { self.gecko.mAlignContent = v.bits() } ${impl_simple_copy('align_content', 'mAlignContent')} pub fn set_justify_content(&mut self, v: longhands::justify_content::computed_value::T) { self.gecko.mJustifyContent = v.bits() } ${impl_simple_copy('justify_content', 'mJustifyContent')} pub fn set_align_self(&mut self, v: longhands::align_self::computed_value::T) { self.gecko.mAlignSelf = v.0.bits() } ${impl_simple_copy('align_self', 'mAlignSelf')} pub fn set_justify_self(&mut self, v: longhands::justify_self::computed_value::T) { self.gecko.mJustifySelf = v.0.bits() } ${impl_simple_copy('justify_self', 'mJustifySelf')} pub fn set_align_items(&mut self, v: longhands::align_items::computed_value::T) { self.gecko.mAlignItems = v.0.bits() } ${impl_simple_copy('align_items', 'mAlignItems')} pub fn set_justify_items(&mut self, v: longhands::justify_items::computed_value::T) { self.gecko.mJustifyItems = v.0.bits() } ${impl_simple_copy('justify_items', 'mJustifyItems')} pub fn clone_justify_items(&self) -> longhands::justify_items::computed_value::T { use values::specified::align::{AlignFlags, JustifyItems}; JustifyItems(AlignFlags::from_bits(self.gecko.mJustifyItems) .expect("mJustifyItems contains valid flags")) } pub fn set_box_sizing(&mut self, v: longhands::box_sizing::computed_value::T) { use computed_values::box_sizing::T; use gecko_bindings::structs::StyleBoxSizing; // TODO: guess what to do with box-sizing: padding-box self.gecko.mBoxSizing = match v { T::content_box => StyleBoxSizing::Content, T::border_box => StyleBoxSizing::Border } } ${impl_simple_copy('box_sizing', 'mBoxSizing')} pub fn set_order(&mut self, v: longhands::order::computed_value::T) { self.gecko.mOrder = v; } pub fn clone_order(&self) -> longhands::order::computed_value::T { self.gecko.mOrder } ${impl_simple_copy('order', 'mOrder')} % for value in GRID_LINES: pub fn set_${value.name}(&mut self, v: longhands::${value.name}::computed_value::T) { use nsstring::nsCString; use gecko_bindings::structs::{nsStyleGridLine_kMinLine, nsStyleGridLine_kMaxLine}; let ident = v.ident.unwrap_or(String::new()); self.gecko.${value.gecko}.mLineName.assign_utf8(&nsCString::from(&*ident)); self.gecko.${value.gecko}.mHasSpan = v.is_span; self.gecko.${value.gecko}.mInteger = v.integer.map(|i| { // clamping the integer between a range cmp::max(nsStyleGridLine_kMinLine, cmp::min(i, nsStyleGridLine_kMaxLine)) }).unwrap_or(0); } pub fn copy_${value.name}_from(&mut self, other: &Self) { self.gecko.${value.gecko}.mHasSpan = other.gecko.${value.gecko}.mHasSpan; self.gecko.${value.gecko}.mInteger = other.gecko.${value.gecko}.mInteger; self.gecko.${value.gecko}.mLineName.assign(&*other.gecko.${value.gecko}.mLineName); } % endfor % for kind in ["rows", "columns"]: pub fn set_grid_auto_${kind}(&mut self, v: longhands::grid_auto_rows::computed_value::T) { use values::specified::grid::TrackSize; match v { TrackSize::FitContent(lop) => { // Gecko sets min value to None and max value to the actual value in fit-content // https://dxr.mozilla.org/mozilla-central/rev/0eef1d5/layout/style/nsRuleNode.cpp#8221 self.gecko.mGridAuto${kind.title()}Min.set_value(CoordDataValue::None); lop.to_gecko_style_coord(&mut self.gecko.mGridAuto${kind.title()}Max); }, TrackSize::Breadth(breadth) => { // Set the value to both fields if there's one breadth value // https://dxr.mozilla.org/mozilla-central/rev/0eef1d5/layout/style/nsRuleNode.cpp#8230 breadth.to_gecko_style_coord(&mut self.gecko.mGridAuto${kind.title()}Min); breadth.to_gecko_style_coord(&mut self.gecko.mGridAuto${kind.title()}Max); }, TrackSize::MinMax(min, max) => { min.to_gecko_style_coord(&mut self.gecko.mGridAuto${kind.title()}Min); max.to_gecko_style_coord(&mut self.gecko.mGridAuto${kind.title()}Max); }, } } pub fn copy_grid_auto_${kind}_from(&mut self, other: &Self) { self.gecko.mGridAuto${kind.title()}Min.copy_from(&other.gecko.mGridAuto${kind.title()}Min); self.gecko.mGridAuto${kind.title()}Max.copy_from(&other.gecko.mGridAuto${kind.title()}Max); } % endfor </%self:impl_trait> <% skip_outline_longhands = " ".join("outline-style outline-width".split() + ["-moz-outline-radius-{0}".format(x.ident.replace("_", "")) for x in CORNERS]) %> <%self:impl_trait style_struct_name="Outline" skip_longhands="${skip_outline_longhands}" skip_additionals="*"> #[allow(non_snake_case)] pub fn set_outline_style(&mut self, v: longhands::outline_style::computed_value::T) { // FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts let result = match v { % for value in border_style_keyword.values_for('gecko'): Either::Second(border_style::T::${to_rust_ident(value)}) => structs::${border_style_keyword.gecko_constant(value)} ${border_style_keyword.maybe_cast("u8")}, % endfor Either::First(Auto) => structs::${border_style_keyword.gecko_constant('auto')} ${border_style_keyword.maybe_cast("u8")}, }; ${set_gecko_property("mOutlineStyle", "result")} } #[allow(non_snake_case)] pub fn copy_outline_style_from(&mut self, other: &Self) { self.gecko.mOutlineStyle = other.gecko.mOutlineStyle; } #[allow(non_snake_case)] pub fn clone_outline_style(&self) -> longhands::outline_style::computed_value::T { // FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts match ${get_gecko_property("mOutlineStyle")} ${border_style_keyword.maybe_cast("u32")} { % for value in border_style_keyword.values_for('gecko'): structs::${border_style_keyword.gecko_constant(value)} => Either::Second(border_style::T::${value}), % endfor structs::${border_style_keyword.gecko_constant('auto')} => Either::First(Auto), % if border_style_keyword.gecko_inexhaustive: x => panic!("Found unexpected value in style struct for outline_style property: {:?}", x), % endif } } <% impl_app_units("outline_width", "mActualOutlineWidth", need_clone=True, round_to_pixels=True) %> % for corner in CORNERS: <% impl_corner_style_coord("_moz_outline_radius_%s" % corner.ident.replace("_", ""), "mOutlineRadius", corner.x_index, corner.y_index) %> % endfor pub fn outline_has_nonzero_width(&self) -> bool { self.gecko.mActualOutlineWidth != 0 } </%self:impl_trait> <%self:impl_trait style_struct_name="Font" skip_longhands="font-family font-size font-size-adjust font-weight font-synthesis -x-lang" skip_additionals="*"> pub fn set_font_family(&mut self, v: longhands::font_family::computed_value::T) { use properties::longhands::font_family::computed_value::FontFamily; use gecko_bindings::structs::FontFamilyType; let list = &mut self.gecko.mFont.fontlist; unsafe { Gecko_FontFamilyList_Clear(list); } for family in &v.0 { match *family { FontFamily::FamilyName(ref name) => { unsafe { Gecko_FontFamilyList_AppendNamed(list, name.0.as_ptr()); } } FontFamily::Generic(ref name) => { let family_type = if name == &atom!("serif") { FontFamilyType::eFamily_serif } else if name == &atom!("sans-serif") { FontFamilyType::eFamily_sans_serif } else if name == &atom!("cursive") { FontFamilyType::eFamily_cursive } else if name == &atom!("fantasy") { FontFamilyType::eFamily_fantasy } else if name == &atom!("monospace") { FontFamilyType::eFamily_monospace } else if name == &atom!("-moz-fixed") { FontFamilyType::eFamily_moz_fixed } else { panic!("Unknown generic font family") }; unsafe { Gecko_FontFamilyList_AppendGeneric(list, family_type); } } } } } pub fn font_family_count(&self) -> usize { 0 } pub fn font_family_at(&self, _: usize) -> longhands::font_family::computed_value::FontFamily { unimplemented!() } pub fn copy_font_family_from(&mut self, other: &Self) { unsafe { Gecko_CopyFontFamilyFrom(&mut self.gecko.mFont, &other.gecko.mFont); } } // FIXME(bholley): Gecko has two different sizes, one of which (mSize) is the // actual computed size, and the other of which (mFont.size) is the 'display // size' which takes font zooming into account. We don't handle font zooming yet. pub fn set_font_size(&mut self, v: longhands::font_size::computed_value::T) { self.gecko.mFont.size = v.0; self.gecko.mSize = v.0; } pub fn copy_font_size_from(&mut self, other: &Self) { self.gecko.mFont.size = other.gecko.mFont.size; self.gecko.mSize = other.gecko.mSize; } pub fn clone_font_size(&self) -> longhands::font_size::computed_value::T { Au(self.gecko.mSize) } pub fn set_font_weight(&mut self, v: longhands::font_weight::computed_value::T) { self.gecko.mFont.weight = v as u16; } ${impl_simple_copy('font_weight', 'mFont.weight')} pub fn clone_font_weight(&self) -> longhands::font_weight::computed_value::T { debug_assert!(self.gecko.mFont.weight >= 100); debug_assert!(self.gecko.mFont.weight <= 900); debug_assert!(self.gecko.mFont.weight % 10 == 0); unsafe { transmute(self.gecko.mFont.weight) } } pub fn set_font_synthesis(&mut self, v: longhands::font_synthesis::computed_value::T) { use gecko_bindings::structs::{NS_FONT_SYNTHESIS_WEIGHT, NS_FONT_SYNTHESIS_STYLE}; self.gecko.mFont.synthesis = 0; if v.weight { self.gecko.mFont.synthesis |= NS_FONT_SYNTHESIS_WEIGHT as u8; } if v.style { self.gecko.mFont.synthesis |= NS_FONT_SYNTHESIS_STYLE as u8; } } pub fn copy_font_synthesis_from(&mut self, other: &Self) { self.gecko.mFont.synthesis = other.gecko.mFont.synthesis; } pub fn set_font_size_adjust(&mut self, v: longhands::font_size_adjust::computed_value::T) { use properties::longhands::font_size_adjust::computed_value::T; match v { T::None => self.gecko.mFont.sizeAdjust = -1.0 as f32, T::Number(n) => self.gecko.mFont.sizeAdjust = n.0 as f32, } } pub fn copy_font_size_adjust_from(&mut self, other: &Self) { self.gecko.mFont.sizeAdjust = other.gecko.mFont.sizeAdjust; } pub fn clone_font_size_adjust(&self) -> longhands::font_size_adjust::computed_value::T { use properties::longhands::font_size_adjust::computed_value::T; use values::specified::Number; match self.gecko.mFont.sizeAdjust { -1.0 => T::None, _ => T::Number(Number(self.gecko.mFont.sizeAdjust)), } } #[allow(non_snake_case)] pub fn set__x_lang(&mut self, v: longhands::_x_lang::computed_value::T) { let ptr = v.0.as_ptr(); forget(v); unsafe { Gecko_nsStyleFont_SetLang(&mut self.gecko, ptr); } } #[allow(non_snake_case)] pub fn copy__x_lang_from(&mut self, other: &Self) { unsafe { Gecko_nsStyleFont_CopyLangFrom(&mut self.gecko, &other.gecko); } } </%self:impl_trait> <%def name="impl_copy_animation_or_transition_value(type, ident, gecko_ffi_name)"> #[allow(non_snake_case)] pub fn copy_${type}_${ident}_from(&mut self, other: &Self) { unsafe { self.gecko.m${type.capitalize()}s.ensure_len(other.gecko.m${type.capitalize()}s.len()) }; let count = other.gecko.m${type.capitalize()}${gecko_ffi_name}Count; self.gecko.m${type.capitalize()}${gecko_ffi_name}Count = count; // The length of mTransitions or mAnimations is often greater than m{Transition|Animation}XXCount, // don't copy values over the count. for (index, gecko) in self.gecko.m${type.capitalize()}s.iter_mut().enumerate().take(count as usize) { gecko.m${gecko_ffi_name} = other.gecko.m${type.capitalize()}s[index].m${gecko_ffi_name}; } } </%def> <%def name="impl_animation_or_transition_count(type, ident, gecko_ffi_name)"> #[allow(non_snake_case)] pub fn ${type}_${ident}_count(&self) -> usize { self.gecko.m${type.capitalize()}${gecko_ffi_name}Count as usize } </%def> <%def name="impl_animation_or_transition_time_value(type, ident, gecko_ffi_name)"> #[allow(non_snake_case)] pub fn set_${type}_${ident}(&mut self, v: longhands::${type}_${ident}::computed_value::T) { debug_assert!(!v.0.is_empty()); let input_len = v.0.len(); unsafe { self.gecko.m${type.capitalize()}s.ensure_len(input_len) }; self.gecko.m${type.capitalize()}${gecko_ffi_name}Count = input_len as u32; for (i, gecko) in self.gecko.m${type.capitalize()}s.iter_mut().enumerate() { gecko.m${gecko_ffi_name} = v.0[i % input_len].seconds() * 1000.; } } #[allow(non_snake_case)] pub fn ${type}_${ident}_at(&self, index: usize) -> longhands::${type}_${ident}::computed_value::SingleComputedValue { use values::specified::Time; Time(self.gecko.m${type.capitalize()}s[index].m${gecko_ffi_name} / 1000.) } ${impl_animation_or_transition_count(type, ident, gecko_ffi_name)} ${impl_copy_animation_or_transition_value(type, ident, gecko_ffi_name)} </%def> <%def name="impl_animation_or_transition_timing_function(type)"> pub fn set_${type}_timing_function(&mut self, v: longhands::${type}_timing_function::computed_value::T) { debug_assert!(!v.0.is_empty()); let input_len = v.0.len(); unsafe { self.gecko.m${type.capitalize()}s.ensure_len(input_len) }; self.gecko.m${type.capitalize()}TimingFunctionCount = input_len as u32; for (i, gecko) in self.gecko.m${type.capitalize()}s.iter_mut().enumerate() { gecko.mTimingFunction = v.0[i % input_len].into(); } } ${impl_animation_or_transition_count(type, 'timing_function', 'TimingFunction')} ${impl_copy_animation_or_transition_value(type, 'timing_function', 'TimingFunction')} pub fn ${type}_timing_function_at(&self, index: usize) -> longhands::${type}_timing_function::computed_value::SingleComputedValue { self.gecko.m${type.capitalize()}s[index].mTimingFunction.into() } </%def> <%def name="impl_transition_time_value(ident, gecko_ffi_name)"> ${impl_animation_or_transition_time_value('transition', ident, gecko_ffi_name)} </%def> <%def name="impl_transition_count(ident, gecko_ffi_name)"> ${impl_animation_or_transition_count('transition', ident, gecko_ffi_name)} </%def> <%def name="impl_copy_animation_value(ident, gecko_ffi_name)"> ${impl_copy_animation_or_transition_value('animation', ident, gecko_ffi_name)} </%def> <%def name="impl_transition_timing_function()"> ${impl_animation_or_transition_timing_function('transition')} </%def> <%def name="impl_animation_count(ident, gecko_ffi_name)"> ${impl_animation_or_transition_count('animation', ident, gecko_ffi_name)} </%def> <%def name="impl_animation_time_value(ident, gecko_ffi_name)"> ${impl_animation_or_transition_time_value('animation', ident, gecko_ffi_name)} </%def> <%def name="impl_animation_timing_function()"> ${impl_animation_or_transition_timing_function('animation')} </%def> <%def name="impl_animation_keyword(ident, gecko_ffi_name, keyword, cast_type='u8')"> #[allow(non_snake_case)] pub fn set_animation_${ident}(&mut self, v: longhands::animation_${ident}::computed_value::T) { use properties::longhands::animation_${ident}::single_value::computed_value::T as Keyword; use gecko_bindings::structs; debug_assert!(!v.0.is_empty()); let input_len = v.0.len(); unsafe { self.gecko.mAnimations.ensure_len(input_len) }; self.gecko.mAnimation${gecko_ffi_name}Count = input_len as u32; for (i, gecko) in self.gecko.mAnimations.iter_mut().enumerate() { let result = match v.0[i % input_len] { % for value in keyword.gecko_values(): Keyword::${to_rust_ident(value)} => structs::${keyword.gecko_constant(value)} ${keyword.maybe_cast(cast_type)}, % endfor }; gecko.m${gecko_ffi_name} = result; } } #[allow(non_snake_case)] pub fn animation_${ident}_at(&self, index: usize) -> longhands::animation_${ident}::computed_value::SingleComputedValue { use properties::longhands::animation_${ident}::single_value::computed_value::T as Keyword; match self.gecko.mAnimations[index].m${gecko_ffi_name} ${keyword.maybe_cast("u32")} { % for value in keyword.gecko_values(): structs::${keyword.gecko_constant(value)} => Keyword::${to_rust_ident(value)}, % endfor x => panic!("Found unexpected value for animation-${ident}: {:?}", x), } } ${impl_animation_count(ident, gecko_ffi_name)} ${impl_copy_animation_value(ident, gecko_ffi_name)} </%def> <% skip_box_longhands= """display overflow-y vertical-align animation-name animation-delay animation-duration animation-direction animation-fill-mode animation-play-state animation-iteration-count animation-timing-function transition-duration transition-delay transition-timing-function transition-property page-break-before page-break-after scroll-snap-points-x scroll-snap-points-y transform scroll-snap-type-y scroll-snap-coordinate perspective-origin transform-origin -moz-binding""" %> <%self:impl_trait style_struct_name="Box" skip_longhands="${skip_box_longhands}"> // We manually-implement the |display| property until we get general // infrastructure for preffing certain values. <% display_keyword = Keyword("display", "inline block inline-block table inline-table table-row-group " + "table-header-group table-footer-group table-row table-column-group " + "table-column table-cell table-caption list-item flex none " + "inline-flex grid inline-grid ruby ruby-base ruby-base-container " + "ruby-text ruby-text-container contents flow-root -webkit-box " + "-webkit-inline-box -moz-box -moz-inline-box -moz-grid -moz-inline-grid " + "-moz-grid-group -moz-grid-line -moz-stack -moz-inline-stack -moz-deck " + "-moz-popup -moz-groupbox", gecko_enum_prefix="StyleDisplay", gecko_strip_moz_prefix=False) %> pub fn set_display(&mut self, v: longhands::display::computed_value::T) { use properties::longhands::display::computed_value::T as Keyword; // FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts let result = match v { % for value in display_keyword.values_for('gecko'): Keyword::${to_rust_ident(value)} => structs::${display_keyword.gecko_constant(value)}, % endfor }; self.gecko.mDisplay = result; self.gecko.mOriginalDisplay = result; } /// Set the display value from the style adjustment code. This is pretty /// much like set_display, but without touching the mOriginalDisplay field, /// which we want to keep. pub fn set_adjusted_display(&mut self, v: longhands::display::computed_value::T) { use properties::longhands::display::computed_value::T as Keyword; let result = match v { % for value in display_keyword.values_for('gecko'): Keyword::${to_rust_ident(value)} => structs::${display_keyword.gecko_constant(value)}, % endfor }; self.gecko.mDisplay = result; } pub fn copy_display_from(&mut self, other: &Self) { self.gecko.mDisplay = other.gecko.mDisplay; self.gecko.mOriginalDisplay = other.gecko.mDisplay; } <%call expr="impl_keyword_clone('display', 'mDisplay', display_keyword)"></%call> // overflow-y is implemented as a newtype of overflow-x, so we need special handling. // We could generalize this if we run into other newtype keywords. <% overflow_x = data.longhands_by_name["overflow-x"] %> pub fn set_overflow_y(&mut self, v: longhands::overflow_y::computed_value::T) { use properties::longhands::overflow_x::computed_value::T as BaseType; // FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts self.gecko.mOverflowY = match v.0 { % for value in overflow_x.keyword.values_for('gecko'): BaseType::${to_rust_ident(value)} => structs::${overflow_x.keyword.gecko_constant(value)} as u8, % endfor }; } ${impl_simple_copy('overflow_y', 'mOverflowY')} pub fn clone_overflow_y(&self) -> longhands::overflow_y::computed_value::T { use properties::longhands::overflow_x::computed_value::T as BaseType; use properties::longhands::overflow_y::computed_value::T as NewType; // FIXME(bholley): Align binary representations and ditch |match| for cast + static_asserts match self.gecko.mOverflowY as u32 { % for value in overflow_x.keyword.values_for('gecko'): structs::${overflow_x.keyword.gecko_constant(value)} => NewType(BaseType::${to_rust_ident(value)}), % endfor x => panic!("Found unexpected value in style struct for overflow_y property: {}", x), } } pub fn set_vertical_align(&mut self, v: longhands::vertical_align::computed_value::T) { <% keyword = data.longhands_by_name["vertical-align"].keyword %> use properties::longhands::vertical_align::computed_value::T; // FIXME: Align binary representations and ditch |match| for cast + static_asserts match v { % for value in keyword.values_for('gecko'): T::${to_rust_ident(value)} => self.gecko.mVerticalAlign.set_value( CoordDataValue::Enumerated(structs::${keyword.gecko_constant(value)})), % endfor T::LengthOrPercentage(v) => self.gecko.mVerticalAlign.set(v), } } pub fn clone_vertical_align(&self) -> longhands::vertical_align::computed_value::T { use properties::longhands::vertical_align::computed_value::T; use values::computed::LengthOrPercentage; match self.gecko.mVerticalAlign.as_value() { % for value in keyword.values_for('gecko'): CoordDataValue::Enumerated(structs::${keyword.gecko_constant(value)}) => T::${to_rust_ident(value)}, % endfor CoordDataValue::Enumerated(_) => panic!("Unexpected enum variant for vertical-align"), _ => { let v = LengthOrPercentage::from_gecko_style_coord(&self.gecko.mVerticalAlign) .expect("Expected length or percentage for vertical-align"); T::LengthOrPercentage(v) } } } <%call expr="impl_coord_copy('vertical_align', 'mVerticalAlign')"></%call> // Temp fix for Bugzilla bug 24000. // Map 'auto' and 'avoid' to false, and 'always', 'left', and 'right' to true. // "A conforming user agent may interpret the values 'left' and 'right' // as 'always'." - CSS2.1, section 13.3.1 pub fn set_page_break_before(&mut self, v: longhands::page_break_before::computed_value::T) { use computed_values::page_break_before::T; let result = match v { T::auto => false, T::always => true, T::avoid => false, T::left => true, T::right => true }; self.gecko.mBreakBefore = result; } ${impl_simple_copy('page_break_before', 'mBreakBefore')} // Temp fix for Bugzilla bug 24000. // See set_page_break_before for detail. pub fn set_page_break_after(&mut self, v: longhands::page_break_after::computed_value::T) { use computed_values::page_break_after::T; let result = match v { T::auto => false, T::always => true, T::avoid => false, T::left => true, T::right => true }; self.gecko.mBreakAfter = result; } ${impl_simple_copy('page_break_after', 'mBreakAfter')} pub fn set_scroll_snap_points_x(&mut self, v: longhands::scroll_snap_points_x::computed_value::T) { match v.0 { None => self.gecko.mScrollSnapPointsX.set_value(CoordDataValue::None), Some(l) => l.to_gecko_style_coord(&mut self.gecko.mScrollSnapPointsX), }; } ${impl_coord_copy('scroll_snap_points_x', 'mScrollSnapPointsX')} pub fn set_scroll_snap_points_y(&mut self, v: longhands::scroll_snap_points_y::computed_value::T) { match v.0 { None => self.gecko.mScrollSnapPointsY.set_value(CoordDataValue::None), Some(l) => l.to_gecko_style_coord(&mut self.gecko.mScrollSnapPointsY), }; } ${impl_coord_copy('scroll_snap_points_y', 'mScrollSnapPointsY')} pub fn set_scroll_snap_coordinate(&mut self, v: longhands::scroll_snap_coordinate::computed_value::T) { unsafe { self.gecko.mScrollSnapCoordinate.set_len_pod(v.0.len() as u32); } for (gecko, servo) in self.gecko.mScrollSnapCoordinate .iter_mut() .zip(v.0.iter()) { gecko.mXPosition = servo.horizontal.into(); gecko.mYPosition = servo.vertical.into(); } } pub fn copy_scroll_snap_coordinate_from(&mut self, other: &Self) { unsafe { self.gecko.mScrollSnapCoordinate .set_len_pod(other.gecko.mScrollSnapCoordinate.len() as u32); } for (this, that) in self.gecko.mScrollSnapCoordinate .iter_mut() .zip(other.gecko.mScrollSnapCoordinate.iter()) { *this = *that; } } pub fn clone_scroll_snap_coordinate(&self) -> longhands::scroll_snap_coordinate::computed_value::T { let vec = self.gecko.mScrollSnapCoordinate.iter().map(|f| f.into()).collect(); longhands::scroll_snap_coordinate::computed_value::T(vec) } ${impl_css_url('_moz_binding', 'mBinding', only_resolved=True)} <%def name="transform_function_arm(name, keyword, items)"> <% pattern = None if name == "matrix": # m11, m12, m13, .. indices = [str(i) + str(j) for i in range(1, 5) for j in range(1, 5)] # m11: number1, m12: number2, .. single_patterns = ["m%s: number%s" % (index, i + 1) for (i, index) in enumerate(indices)] pattern = "ComputedMatrix { %s }" % ", ".join(single_patterns) else: # Generate contents of pattern from items pattern = ", ".join([b + str(a+1) for (a,b) in enumerate(items)]) # First %s substituted with the call to GetArrayItem, the second # %s substituted with the corresponding variable css_value_setters = { "length" : "bindings::Gecko_CSSValue_SetAbsoluteLength(%s, %s.0)", "percentage" : "bindings::Gecko_CSSValue_SetPercentage(%s, %s)", "lop" : "%s.set_lop(%s)", "angle" : "bindings::Gecko_CSSValue_SetAngle(%s, %s.0)", "number" : "bindings::Gecko_CSSValue_SetNumber(%s, %s)", } %> longhands::transform::computed_value::ComputedOperation::${name.title()}(${pattern}) => { bindings::Gecko_CSSValue_SetFunction(gecko_value, ${len(items) + 1}); bindings::Gecko_CSSValue_SetKeyword( bindings::Gecko_CSSValue_GetArrayItem(gecko_value, 0), eCSSKeyword_${keyword} ); % for index, item in enumerate(items): ${css_value_setters[item] % ( "bindings::Gecko_CSSValue_GetArrayItem(gecko_value, %d)" % (index + 1), item + str(index + 1) )}; % endfor } </%def> pub fn convert_transform(input: Vec<longhands::transform::computed_value::ComputedOperation>, output: &mut structs::root::RefPtr<structs::root::nsCSSValueSharedList>) { use gecko_bindings::structs::nsCSSKeyword::*; use gecko_bindings::sugar::refptr::RefPtr; use properties::longhands::transform::computed_value::ComputedMatrix; unsafe { output.clear() }; let list = unsafe { RefPtr::from_addrefed(bindings::Gecko_NewCSSValueSharedList(input.len() as u32)) }; let mut cur = list.mHead; let mut iter = input.into_iter(); while !cur.is_null() { let gecko_value = unsafe { &mut (*cur).mValue }; let servo = iter.next().expect("Gecko_NewCSSValueSharedList should create a shared \ value list of the same length as the transform vector"); unsafe { match servo { ${transform_function_arm("matrix", "matrix3d", ["number"] * 16)} ${transform_function_arm("skew", "skew", ["angle"] * 2)} ${transform_function_arm("translate", "translate3d", ["lop", "lop", "length"])} ${transform_function_arm("scale", "scale3d", ["number"] * 3)} ${transform_function_arm("rotate", "rotate3d", ["number"] * 3 + ["angle"])} ${transform_function_arm("perspective", "perspective", ["length"])} } cur = (*cur).mNext; } } debug_assert!(iter.next().is_none()); unsafe { output.set_move(list) }; } pub fn set_transform(&mut self, other: longhands::transform::computed_value::T) { let vec = if let Some(v) = other.0 { v } else { unsafe { self.gecko.mSpecifiedTransform.clear(); } return; }; Self::convert_transform(vec, &mut self.gecko.mSpecifiedTransform); } pub fn copy_transform_from(&mut self, other: &Self) { unsafe { self.gecko.mSpecifiedTransform.set(&other.gecko.mSpecifiedTransform); } } <%def name="computed_operation_arm(name, keyword, items)"> <% # %s is substituted with the call to GetArrayItem. css_value_getters = { "length" : "Au(bindings::Gecko_CSSValue_GetAbsoluteLength(%s))", "lop" : "%s.get_lop()", "angle" : "Angle(bindings::Gecko_CSSValue_GetAngle(%s))", "number" : "bindings::Gecko_CSSValue_GetNumber(%s)", } %> eCSSKeyword_${keyword} => { ComputedOperation::${name.title()}( % if name == "matrix": ComputedMatrix { % endif % for index, item in enumerate(items): % if name == "matrix": m${index / 4 + 1}${index % 4 + 1}: % endif ${css_value_getters[item] % ( "bindings::Gecko_CSSValue_GetArrayItemConst(gecko_value, %d)" % (index + 1) )}, % endfor % if name == "matrix": } % endif ) }, </%def> pub fn clone_transform(&self) -> longhands::transform::computed_value::T { use app_units::Au; use gecko_bindings::structs::nsCSSKeyword::*; use properties::longhands::transform::computed_value; use properties::longhands::transform::computed_value::ComputedMatrix; use properties::longhands::transform::computed_value::ComputedOperation; use values::computed::Angle; if self.gecko.mSpecifiedTransform.mRawPtr.is_null() { return computed_value::T(None); } let mut result = vec![]; let mut cur = unsafe { (*self.gecko.mSpecifiedTransform.to_safe().get()).mHead }; while !cur.is_null() { let gecko_value = unsafe { &(*cur).mValue }; let transform_function = unsafe { bindings::Gecko_CSSValue_GetKeyword(bindings::Gecko_CSSValue_GetArrayItemConst(gecko_value, 0)) }; let servo = unsafe { match transform_function { ${computed_operation_arm("matrix", "matrix3d", ["number"] * 16)} ${computed_operation_arm("skew", "skew", ["angle"] * 2)} ${computed_operation_arm("translate", "translate3d", ["lop", "lop", "length"])} ${computed_operation_arm("scale", "scale3d", ["number"] * 3)} ${computed_operation_arm("rotate", "rotate3d", ["number"] * 3 + ["angle"])} ${computed_operation_arm("perspective", "perspective", ["length"])} _ => panic!("We shouldn't set any other transform function types"), } }; result.push(servo); unsafe { cur = (&*cur).mNext }; } computed_value::T(Some(result)) } ${impl_transition_time_value('delay', 'Delay')} ${impl_transition_time_value('duration', 'Duration')} ${impl_transition_timing_function()} pub fn set_transition_property(&mut self, v: longhands::transition_property::computed_value::T) { use gecko_bindings::structs::nsCSSPropertyID_eCSSPropertyExtra_no_properties; if !v.0.is_empty() { unsafe { self.gecko.mTransitions.ensure_len(v.0.len()) }; self.gecko.mTransitionPropertyCount = v.0.len() as u32; for (servo, gecko) in v.0.into_iter().zip(self.gecko.mTransitions.iter_mut()) { gecko.mProperty = servo.into(); } } else { // In gecko |none| is represented by eCSSPropertyExtra_no_properties. self.gecko.mTransitionPropertyCount = 1; self.gecko.mTransitions[0].mProperty = nsCSSPropertyID_eCSSPropertyExtra_no_properties; } } pub fn transition_property_at(&self, index: usize) -> longhands::transition_property::computed_value::SingleComputedValue { self.gecko.mTransitions[index].mProperty.into() } pub fn copy_transition_property_from(&mut self, other: &Self) { unsafe { self.gecko.mTransitions.ensure_len(other.gecko.mTransitions.len()) }; let count = other.gecko.mTransitionPropertyCount; self.gecko.mTransitionPropertyCount = count; for (index, transition) in self.gecko.mTransitions.iter_mut().enumerate().take(count as usize) { transition.mProperty = other.gecko.mTransitions[index].mProperty; } } ${impl_transition_count('property', 'Property')} pub fn animations_equals(&self, other: &Self) -> bool { unsafe { bindings::Gecko_StyleAnimationsEquals(&self.gecko.mAnimations, &other.gecko.mAnimations) } } pub fn set_animation_name(&mut self, v: longhands::animation_name::computed_value::T) { use nsstring::nsCString; debug_assert!(!v.0.is_empty()); unsafe { self.gecko.mAnimations.ensure_len(v.0.len()) }; self.gecko.mAnimationNameCount = v.0.len() as u32; for (servo, gecko) in v.0.into_iter().zip(self.gecko.mAnimations.iter_mut()) { // TODO This is inefficient. We should fix this in bug 1329169. gecko.mName.assign_utf8(&nsCString::from(servo.0.to_string())); } } pub fn animation_name_at(&self, index: usize) -> longhands::animation_name::computed_value::SingleComputedValue { use Atom; use properties::longhands::animation_name::single_value::SpecifiedValue as AnimationName; // XXX: Is there any effective ways? AnimationName(Atom::from(String::from_utf16_lossy(&self.gecko.mAnimations[index].mName[..]))) } pub fn copy_animation_name_from(&mut self, other: &Self) { unsafe { self.gecko.mAnimations.ensure_len(other.gecko.mAnimations.len()) }; let count = other.gecko.mAnimationNameCount; self.gecko.mAnimationNameCount = count; // The length of mAnimations is often greater than mAnimationXXCount, // don't copy values over the count. for (index, animation) in self.gecko.mAnimations.iter_mut().enumerate().take(count as usize) { animation.mName.assign(&*other.gecko.mAnimations[index].mName); } } ${impl_animation_count('name', 'Name')} ${impl_animation_time_value('delay', 'Delay')} ${impl_animation_time_value('duration', 'Duration')} ${impl_animation_keyword('direction', 'Direction', data.longhands_by_name["animation-direction"].keyword)} ${impl_animation_keyword('fill_mode', 'FillMode', data.longhands_by_name["animation-fill-mode"].keyword)} ${impl_animation_keyword('play_state', 'PlayState', data.longhands_by_name["animation-play-state"].keyword)} pub fn set_animation_iteration_count(&mut self, v: longhands::animation_iteration_count::computed_value::T) { use std::f32; use properties::longhands::animation_iteration_count::single_value::SpecifiedValue as AnimationIterationCount; debug_assert!(!v.0.is_empty()); let input_len = v.0.len(); unsafe { self.gecko.mAnimations.ensure_len(input_len) }; self.gecko.mAnimationIterationCountCount = input_len as u32; for (i, gecko) in self.gecko.mAnimations.iter_mut().enumerate() { match v.0[i % input_len] { AnimationIterationCount::Number(n) => gecko.mIterationCount = n, AnimationIterationCount::Infinite => gecko.mIterationCount = f32::INFINITY, } } } pub fn animation_iteration_count_at(&self, index: usize) -> longhands::animation_iteration_count::computed_value::SingleComputedValue { use properties::longhands::animation_iteration_count::single_value::computed_value::T as AnimationIterationCount; if self.gecko.mAnimations[index].mIterationCount.is_infinite() { AnimationIterationCount::Infinite } else { AnimationIterationCount::Number(self.gecko.mAnimations[index].mIterationCount) } } ${impl_animation_count('iteration_count', 'IterationCount')} ${impl_copy_animation_value('iteration_count', 'IterationCount')} ${impl_animation_timing_function()} <% scroll_snap_type_keyword = Keyword("scroll-snap-type", "none mandatory proximity") %> ${impl_keyword('scroll_snap_type_y', 'mScrollSnapTypeY', scroll_snap_type_keyword, need_clone=False)} pub fn set_perspective_origin(&mut self, v: longhands::perspective_origin::computed_value::T) { self.gecko.mPerspectiveOrigin[0].set(v.horizontal); self.gecko.mPerspectiveOrigin[1].set(v.vertical); } pub fn copy_perspective_origin_from(&mut self, other: &Self) { self.gecko.mPerspectiveOrigin[0].copy_from(&other.gecko.mPerspectiveOrigin[0]); self.gecko.mPerspectiveOrigin[1].copy_from(&other.gecko.mPerspectiveOrigin[1]); } pub fn set_transform_origin(&mut self, v: longhands::transform_origin::computed_value::T) { self.gecko.mTransformOrigin[0].set(v.horizontal); self.gecko.mTransformOrigin[1].set(v.vertical); self.gecko.mTransformOrigin[2].set(v.depth); } pub fn copy_transform_origin_from(&mut self, other: &Self) { self.gecko.mTransformOrigin[0].copy_from(&other.gecko.mTransformOrigin[0]); self.gecko.mTransformOrigin[1].copy_from(&other.gecko.mTransformOrigin[1]); self.gecko.mTransformOrigin[2].copy_from(&other.gecko.mTransformOrigin[2]); } pub fn clone_transform_origin(&self) -> longhands::transform_origin::computed_value::T { use properties::longhands::transform_origin::computed_value::T; use values::computed::LengthOrPercentage; T { horizontal: LengthOrPercentage::from_gecko_style_coord(&self.gecko.mTransformOrigin[0]) .expect("clone for LengthOrPercentage failed"), vertical: LengthOrPercentage::from_gecko_style_coord(&self.gecko.mTransformOrigin[1]) .expect("clone for LengthOrPercentage failed"), depth: Au::from_gecko_style_coord(&self.gecko.mTransformOrigin[2]) .expect("clone for Length failed"), } } </%self:impl_trait> <%def name="simple_image_array_property(name, shorthand, field_name)"> <% image_layers_field = "mImage" if shorthand == "background" else "mMask" %> pub fn copy_${shorthand}_${name}_from(&mut self, other: &Self) { use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType; unsafe { Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field}, other.gecko.${image_layers_field}.mLayers.len(), LayerType::${shorthand.title()}); } for (layer, other) in self.gecko.${image_layers_field}.mLayers.iter_mut() .zip(other.gecko.${image_layers_field}.mLayers.iter()) .take(other.gecko.${image_layers_field} .${field_name}Count as usize) { layer.${field_name} = other.${field_name}; } self.gecko.${image_layers_field}.${field_name}Count = other.gecko.${image_layers_field}.${field_name}Count; } pub fn set_${shorthand}_${name}(&mut self, v: longhands::${shorthand}_${name}::computed_value::T) { use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType; unsafe { Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field}, v.0.len(), LayerType::${shorthand.title()}); } self.gecko.${image_layers_field}.${field_name}Count = v.0.len() as u32; for (servo, geckolayer) in v.0.into_iter() .zip(self.gecko.${image_layers_field}.mLayers.iter_mut()) { geckolayer.${field_name} = { ${caller.body()} }; } } </%def> <%def name="impl_common_image_layer_properties(shorthand)"> <% image_layers_field = "mImage" if shorthand == "background" else "mMask" %> <%self:simple_image_array_property name="repeat" shorthand="${shorthand}" field_name="mRepeat"> use properties::longhands::${shorthand}_repeat::single_value::computed_value::T; use gecko_bindings::structs::nsStyleImageLayers_Repeat; use gecko_bindings::structs::NS_STYLE_IMAGELAYER_REPEAT_REPEAT; use gecko_bindings::structs::NS_STYLE_IMAGELAYER_REPEAT_NO_REPEAT; use gecko_bindings::structs::NS_STYLE_IMAGELAYER_REPEAT_SPACE; use gecko_bindings::structs::NS_STYLE_IMAGELAYER_REPEAT_ROUND; let (repeat_x, repeat_y) = match servo { T::repeat_x => (NS_STYLE_IMAGELAYER_REPEAT_REPEAT, NS_STYLE_IMAGELAYER_REPEAT_NO_REPEAT), T::repeat_y => (NS_STYLE_IMAGELAYER_REPEAT_NO_REPEAT, NS_STYLE_IMAGELAYER_REPEAT_REPEAT), T::repeat => (NS_STYLE_IMAGELAYER_REPEAT_REPEAT, NS_STYLE_IMAGELAYER_REPEAT_REPEAT), T::space => (NS_STYLE_IMAGELAYER_REPEAT_SPACE, NS_STYLE_IMAGELAYER_REPEAT_SPACE), T::round => (NS_STYLE_IMAGELAYER_REPEAT_ROUND, NS_STYLE_IMAGELAYER_REPEAT_ROUND), T::no_repeat => (NS_STYLE_IMAGELAYER_REPEAT_NO_REPEAT, NS_STYLE_IMAGELAYER_REPEAT_NO_REPEAT), }; nsStyleImageLayers_Repeat { mXRepeat: repeat_x as u8, mYRepeat: repeat_y as u8, } </%self:simple_image_array_property> <%self:simple_image_array_property name="clip" shorthand="${shorthand}" field_name="mClip"> use gecko_bindings::structs::StyleGeometryBox; use properties::longhands::${shorthand}_clip::single_value::computed_value::T; match servo { T::border_box => StyleGeometryBox::Border, T::padding_box => StyleGeometryBox::Padding, T::content_box => StyleGeometryBox::Content, % if shorthand == "mask": T::fill_box => StyleGeometryBox::Fill, T::stroke_box => StyleGeometryBox::Stroke, T::view_box => StyleGeometryBox::View, T::no_clip => StyleGeometryBox::NoClip, % elif shorthand == "background": T::text => StyleGeometryBox::Text, % endif } </%self:simple_image_array_property> <%self:simple_image_array_property name="origin" shorthand="${shorthand}" field_name="mOrigin"> use gecko_bindings::structs::StyleGeometryBox; use properties::longhands::${shorthand}_origin::single_value::computed_value::T; match servo { T::border_box => StyleGeometryBox::Border, T::padding_box => StyleGeometryBox::Padding, T::content_box => StyleGeometryBox::Content, % if shorthand == "mask": T::fill_box => StyleGeometryBox::Fill, T::stroke_box => StyleGeometryBox::Stroke, T::view_box => StyleGeometryBox::View, % endif } </%self:simple_image_array_property> % for orientation in [("x", "Horizontal"), ("y", "Vertical")]: pub fn copy_${shorthand}_position_${orientation[0]}_from(&mut self, other: &Self) { use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType; self.gecko.${image_layers_field}.mPosition${orientation[0].upper()}Count = cmp::min(1, other.gecko.${image_layers_field}.mPosition${orientation[0].upper()}Count); self.gecko.${image_layers_field}.mLayers.mFirstElement.mPosition = other.gecko.${image_layers_field}.mLayers.mFirstElement.mPosition; unsafe { Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field}, other.gecko.${image_layers_field}.mLayers.len(), LayerType::${shorthand.capitalize()}); } for (layer, other) in self.gecko.${image_layers_field}.mLayers.iter_mut() .zip(other.gecko.${image_layers_field}.mLayers.iter()) { layer.mPosition.m${orientation[0].upper()}Position = other.mPosition.m${orientation[0].upper()}Position; } self.gecko.${image_layers_field}.mPosition${orientation[0].upper()}Count = other.gecko.${image_layers_field}.mPosition${orientation[0].upper()}Count; } pub fn clone_${shorthand}_position_${orientation[0]}(&self) -> longhands::${shorthand}_position_${orientation[0]}::computed_value::T { use values::computed::position::${orientation[1]}Position; longhands::${shorthand}_position_${orientation[0]}::computed_value::T( self.gecko.${image_layers_field}.mLayers.iter() .take(self.gecko.${image_layers_field}.mPosition${orientation[0].upper()}Count as usize) .map(|position| ${orientation[1]}Position(position.mPosition.m${orientation[0].upper()}Position.into())) .collect() ) } pub fn set_${shorthand}_position_${orientation[0]}(&mut self, v: longhands::${shorthand}_position_${orientation[0]}::computed_value::T) { use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType; unsafe { Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field}, v.0.len(), LayerType::${shorthand.capitalize()}); } self.gecko.${image_layers_field}.mPosition${orientation[0].upper()}Count = v.0.len() as u32; for (servo, geckolayer) in v.0.into_iter().zip(self.gecko.${image_layers_field} .mLayers.iter_mut()) { geckolayer.mPosition.m${orientation[0].upper()}Position = servo.0.into(); } } % endfor <%self:simple_image_array_property name="size" shorthand="${shorthand}" field_name="mSize"> use gecko_bindings::structs::nsStyleImageLayers_Size_Dimension; use gecko_bindings::structs::nsStyleImageLayers_Size_DimensionType; use gecko_bindings::structs::{nsStyleCoord_CalcValue, nsStyleImageLayers_Size}; use properties::longhands::background_size::single_value::computed_value::T; let mut width = nsStyleCoord_CalcValue::new(); let mut height = nsStyleCoord_CalcValue::new(); let (w_type, h_type) = match servo { T::Explicit(size) => { let mut w_type = nsStyleImageLayers_Size_DimensionType::eAuto; let mut h_type = nsStyleImageLayers_Size_DimensionType::eAuto; if let Some(w) = size.width.to_calc_value() { width = w; w_type = nsStyleImageLayers_Size_DimensionType::eLengthPercentage; } if let Some(h) = size.height.to_calc_value() { height = h; h_type = nsStyleImageLayers_Size_DimensionType::eLengthPercentage; } (w_type, h_type) } T::Cover => (nsStyleImageLayers_Size_DimensionType::eCover, nsStyleImageLayers_Size_DimensionType::eCover), T::Contain => (nsStyleImageLayers_Size_DimensionType::eContain, nsStyleImageLayers_Size_DimensionType::eContain), }; nsStyleImageLayers_Size { mWidth: nsStyleImageLayers_Size_Dimension { _base: width }, mHeight: nsStyleImageLayers_Size_Dimension { _base: height }, mWidthType: w_type as u8, mHeightType: h_type as u8, } </%self:simple_image_array_property> pub fn clone_${shorthand}_size(&self) -> longhands::background_size::computed_value::T { use gecko_bindings::structs::nsStyleCoord_CalcValue as CalcValue; use gecko_bindings::structs::nsStyleImageLayers_Size_DimensionType as DimensionType; use properties::longhands::background_size::single_value::computed_value::{ExplicitSize, T}; use values::computed::LengthOrPercentageOrAuto; fn to_servo(value: CalcValue, ty: u8) -> LengthOrPercentageOrAuto { if ty == DimensionType::eAuto as u8 { LengthOrPercentageOrAuto::Auto } else { debug_assert!(ty == DimensionType::eLengthPercentage as u8); LengthOrPercentageOrAuto::Calc(value.into()) } } longhands::background_size::computed_value::T( self.gecko.${image_layers_field}.mLayers.iter().map(|ref layer| { if DimensionType::eCover as u8 == layer.mSize.mWidthType { debug_assert!(layer.mSize.mHeightType == DimensionType::eCover as u8); return T::Cover } if DimensionType::eContain as u8 == layer.mSize.mWidthType { debug_assert!(layer.mSize.mHeightType == DimensionType::eContain as u8); return T::Contain } T::Explicit(ExplicitSize { width: to_servo(layer.mSize.mWidth._base, layer.mSize.mWidthType), height: to_servo(layer.mSize.mHeight._base, layer.mSize.mHeightType), }) }).collect() ) } pub fn copy_${shorthand}_image_from(&mut self, other: &Self) { unsafe { Gecko_CopyImageValueFrom(&mut self.gecko.${image_layers_field}.mLayers.mFirstElement.mImage, &other.gecko.${image_layers_field}.mLayers.mFirstElement.mImage); } } #[allow(unused_variables)] pub fn set_${shorthand}_image(&mut self, images: longhands::${shorthand}_image::computed_value::T, cacheable: &mut bool) { use gecko_bindings::structs::nsStyleImageLayers_LayerType as LayerType; unsafe { // Prevent leaking of the last elements we did set for image in &mut self.gecko.${image_layers_field}.mLayers { Gecko_SetNullImageValue(&mut image.mImage) } // XXXManishearth clear mSourceURI for masks Gecko_EnsureImageLayersLength(&mut self.gecko.${image_layers_field}, images.0.len(), LayerType::${shorthand.title()}); } self.gecko.${image_layers_field}.mImageCount = images.0.len() as u32; for (image, geckoimage) in images.0.into_iter().zip(self.gecko.${image_layers_field} .mLayers.iter_mut()) { % if shorthand == "background": if let Some(image) = image.0 { geckoimage.mImage.set(image, true, cacheable) } % else: use properties::longhands::mask_image::single_value::computed_value::T; match image { T::Image(image) => geckoimage.mImage.set(image, false, cacheable), _ => () // we need to support url valeus } % endif } } <% fill_fields = "mRepeat mClip mOrigin mPositionX mPositionY mImage mSize" if shorthand == "background": fill_fields += " mAttachment mBlendMode" else: # mSourceURI uses mImageCount fill_fields += " mMaskMode mComposite" %> pub fn fill_arrays(&mut self) { use gecko_bindings::bindings::Gecko_FillAll${shorthand.title()}Lists; use std::cmp; let mut max_len = 1; % for member in fill_fields.split(): max_len = cmp::max(max_len, self.gecko.${image_layers_field}.${member}Count); % endfor // XXXManishearth Gecko does an optimization here where it only // fills things in if any of the properties have been set unsafe { // While we could do this manually, we'd need to also manually // run all the copy constructors, so we just delegate to gecko Gecko_FillAll${shorthand.title()}Lists(&mut self.gecko.${image_layers_field}, max_len); } } </%def> // TODO: Gecko accepts lists in most background-related properties. We just use // the first element (which is the common case), but at some point we want to // add support for parsing these lists in servo and pushing to nsTArray's. <% skip_background_longhands = """background-repeat background-image background-clip background-origin background-attachment background-size background-position background-blend-mode background-position-x background-position-y""" %> <%self:impl_trait style_struct_name="Background" skip_longhands="${skip_background_longhands}" skip_additionals="*"> <% impl_common_image_layer_properties("background") %> <%self:simple_image_array_property name="attachment" shorthand="background" field_name="mAttachment"> use properties::longhands::background_attachment::single_value::computed_value::T; match servo { T::scroll => structs::NS_STYLE_IMAGELAYER_ATTACHMENT_SCROLL as u8, T::fixed => structs::NS_STYLE_IMAGELAYER_ATTACHMENT_FIXED as u8, T::local => structs::NS_STYLE_IMAGELAYER_ATTACHMENT_LOCAL as u8, } </%self:simple_image_array_property> <%self:simple_image_array_property name="blend_mode" shorthand="background" field_name="mBlendMode"> use properties::longhands::background_blend_mode::single_value::computed_value::T; match servo { T::normal => structs::NS_STYLE_BLEND_NORMAL as u8, T::multiply => structs::NS_STYLE_BLEND_MULTIPLY as u8, T::screen => structs::NS_STYLE_BLEND_SCREEN as u8, T::overlay => structs::NS_STYLE_BLEND_OVERLAY as u8, T::darken => structs::NS_STYLE_BLEND_DARKEN as u8, T::lighten => structs::NS_STYLE_BLEND_LIGHTEN as u8, T::color_dodge => structs::NS_STYLE_BLEND_COLOR_DODGE as u8, T::color_burn => structs::NS_STYLE_BLEND_COLOR_BURN as u8, T::hard_light => structs::NS_STYLE_BLEND_HARD_LIGHT as u8, T::soft_light => structs::NS_STYLE_BLEND_SOFT_LIGHT as u8, T::difference => structs::NS_STYLE_BLEND_DIFFERENCE as u8, T::exclusion => structs::NS_STYLE_BLEND_EXCLUSION as u8, T::hue => structs::NS_STYLE_BLEND_HUE as u8, T::saturation => structs::NS_STYLE_BLEND_SATURATION as u8, T::color => structs::NS_STYLE_BLEND_COLOR as u8, T::luminosity => structs::NS_STYLE_BLEND_LUMINOSITY as u8, } </%self:simple_image_array_property> </%self:impl_trait> <%self:impl_trait style_struct_name="List" skip_longhands="list-style-image list-style-type quotes -moz-image-region" skip_additionals="*"> pub fn set_list_style_image(&mut self, image: longhands::list_style_image::computed_value::T) { use values::Either; match image { Either::Second(_none) => { unsafe { Gecko_SetListStyleImageNone(&mut self.gecko); } } Either::First(ref url) => { unsafe { Gecko_SetListStyleImage(&mut self.gecko, url.for_ffi()); } // We don't need to record this struct as uncacheable, like when setting // background-image to a url() value, since only properties in reset structs // are re-used from the applicable declaration cache, and the List struct // is an inherited struct. } } } pub fn copy_list_style_image_from(&mut self, other: &Self) { unsafe { Gecko_CopyListStyleImageFrom(&mut self.gecko, &other.gecko); } } pub fn set_list_style_type(&mut self, v: longhands::list_style_type::computed_value::T) { use properties::longhands::list_style_type::computed_value::T as Keyword; <% keyword = data.longhands_by_name["list-style-type"].keyword # The first four are @counter-styles # The rest have special fallback behavior special = """upper-roman lower-roman upper-alpha lower-alpha japanese-informal japanese-formal korean-hangul-formal korean-hanja-informal korean-hanja-formal simp-chinese-informal simp-chinese-formal trad-chinese-informal trad-chinese-formal""".split() %> let result = match v { % for value in keyword.values_for('gecko'): % if value in special: // Special keywords are implemented as @counter-styles // and need to be manually set as strings Keyword::${to_rust_ident(value)} => structs::${keyword.gecko_constant("none")}, % else: Keyword::${to_rust_ident(value)} => structs::${keyword.gecko_constant(value)}, % endif % endfor }; unsafe { Gecko_SetListStyleType(&mut self.gecko, result as u32); } } pub fn copy_list_style_type_from(&mut self, other: &Self) { unsafe { Gecko_CopyListStyleTypeFrom(&mut self.gecko, &other.gecko); } } pub fn set_quotes(&mut self, other: longhands::quotes::computed_value::T) { use gecko_bindings::bindings::Gecko_NewStyleQuoteValues; use gecko_bindings::sugar::refptr::UniqueRefPtr; use nsstring::nsCString; let mut refptr = unsafe { UniqueRefPtr::from_addrefed(Gecko_NewStyleQuoteValues(other.0.len() as u32)) }; for (servo, gecko) in other.0.into_iter().zip(refptr.mQuotePairs.iter_mut()) { gecko.first.assign_utf8(&nsCString::from(&*servo.0)); gecko.second.assign_utf8(&nsCString::from(&*servo.1)); } unsafe { self.gecko.mQuotes.set_move(refptr.get()) } } pub fn copy_quotes_from(&mut self, other: &Self) { unsafe { self.gecko.mQuotes.set(&other.gecko.mQuotes); } } #[allow(non_snake_case)] pub fn set__moz_image_region(&mut self, v: longhands::_moz_image_region::computed_value::T) { use values::Either; match v { Either::Second(_auto) => { self.gecko.mImageRegion.x = 0; self.gecko.mImageRegion.y = 0; self.gecko.mImageRegion.width = 0; self.gecko.mImageRegion.height = 0; } Either::First(rect) => { self.gecko.mImageRegion.x = rect.left.unwrap_or(Au(0)).0; self.gecko.mImageRegion.y = rect.top.unwrap_or(Au(0)).0; self.gecko.mImageRegion.height = rect.bottom.unwrap_or(Au(0)).0 - self.gecko.mImageRegion.y; self.gecko.mImageRegion.width = rect.right.unwrap_or(Au(0)).0 - self.gecko.mImageRegion.x; } } } ${impl_simple_copy('_moz_image_region', 'mImageRegion')} </%self:impl_trait> <%self:impl_trait style_struct_name="Table" skip_longhands="-x-span"> #[allow(non_snake_case)] pub fn set__x_span(&mut self, v: longhands::_x_span::computed_value::T) { self.gecko.mSpan = v.0 } ${impl_simple_copy('_x_span', 'mSpan')} </%self:impl_trait> <%self:impl_trait style_struct_name="Effects" skip_longhands="box-shadow clip filter"> pub fn set_box_shadow(&mut self, v: longhands::box_shadow::computed_value::T) { self.gecko.mBoxShadow.replace_with_new(v.0.len() as u32); for (servo, gecko_shadow) in v.0.into_iter() .zip(self.gecko.mBoxShadow.iter_mut()) { gecko_shadow.mXOffset = servo.offset_x.0; gecko_shadow.mYOffset = servo.offset_y.0; gecko_shadow.mRadius = servo.blur_radius.0; gecko_shadow.mSpread = servo.spread_radius.0; gecko_shadow.mSpread = servo.spread_radius.0; gecko_shadow.mInset = servo.inset; gecko_shadow.mColor = match servo.color { Color::RGBA(rgba) => { gecko_shadow.mHasColor = true; convert_rgba_to_nscolor(&rgba) }, // TODO handle currentColor // https://bugzilla.mozilla.org/show_bug.cgi?id=760345 Color::CurrentColor => 0, } } } pub fn copy_box_shadow_from(&mut self, other: &Self) { self.gecko.mBoxShadow.copy_from(&other.gecko.mBoxShadow); } pub fn clone_box_shadow(&self) -> longhands::box_shadow::computed_value::T { let buf = self.gecko.mBoxShadow.iter().map(|shadow| { longhands::box_shadow::single_value::computed_value::T { offset_x: Au(shadow.mXOffset), offset_y: Au(shadow.mYOffset), blur_radius: Au(shadow.mRadius), spread_radius: Au(shadow.mSpread), inset: shadow.mInset, color: Color::RGBA(convert_nscolor_to_rgba(shadow.mColor)), } }).collect(); longhands::box_shadow::computed_value::T(buf) } pub fn set_clip(&mut self, v: longhands::clip::computed_value::T) { use gecko_bindings::structs::NS_STYLE_CLIP_AUTO; use gecko_bindings::structs::NS_STYLE_CLIP_RECT; use gecko_bindings::structs::NS_STYLE_CLIP_LEFT_AUTO; use gecko_bindings::structs::NS_STYLE_CLIP_TOP_AUTO; use gecko_bindings::structs::NS_STYLE_CLIP_RIGHT_AUTO; use gecko_bindings::structs::NS_STYLE_CLIP_BOTTOM_AUTO; use values::Either; match v { Either::First(rect) => { self.gecko.mClipFlags = NS_STYLE_CLIP_RECT as u8; if let Some(left) = rect.left { self.gecko.mClip.x = left.0; } else { self.gecko.mClip.x = 0; self.gecko.mClipFlags |= NS_STYLE_CLIP_LEFT_AUTO as u8; } if let Some(top) = rect.top { self.gecko.mClip.y = top.0; } else { self.gecko.mClip.y = 0; self.gecko.mClipFlags |= NS_STYLE_CLIP_TOP_AUTO as u8; } if let Some(bottom) = rect.bottom { self.gecko.mClip.height = bottom.0 - self.gecko.mClip.y; } else { self.gecko.mClip.height = 1 << 30; // NS_MAXSIZE self.gecko.mClipFlags |= NS_STYLE_CLIP_BOTTOM_AUTO as u8; } if let Some(right) = rect.right { self.gecko.mClip.width = right.0 - self.gecko.mClip.x; } else { self.gecko.mClip.width = 1 << 30; // NS_MAXSIZE self.gecko.mClipFlags |= NS_STYLE_CLIP_RIGHT_AUTO as u8; } }, Either::Second(_auto) => { self.gecko.mClipFlags = NS_STYLE_CLIP_AUTO as u8; self.gecko.mClip.x = 0; self.gecko.mClip.y = 0; self.gecko.mClip.width = 0; self.gecko.mClip.height = 0; } } } pub fn copy_clip_from(&mut self, other: &Self) { self.gecko.mClip = other.gecko.mClip; self.gecko.mClipFlags = other.gecko.mClipFlags; } pub fn set_filter(&mut self, v: longhands::filter::computed_value::T) { use properties::longhands::filter::computed_value::Filter::*; use gecko_bindings::structs::nsCSSShadowArray; use gecko_bindings::structs::nsStyleFilter; use gecko_bindings::structs::NS_STYLE_FILTER_BLUR; use gecko_bindings::structs::NS_STYLE_FILTER_BRIGHTNESS; use gecko_bindings::structs::NS_STYLE_FILTER_CONTRAST; use gecko_bindings::structs::NS_STYLE_FILTER_GRAYSCALE; use gecko_bindings::structs::NS_STYLE_FILTER_INVERT; use gecko_bindings::structs::NS_STYLE_FILTER_OPACITY; use gecko_bindings::structs::NS_STYLE_FILTER_SATURATE; use gecko_bindings::structs::NS_STYLE_FILTER_SEPIA; use gecko_bindings::structs::NS_STYLE_FILTER_HUE_ROTATE; use gecko_bindings::structs::NS_STYLE_FILTER_DROP_SHADOW; fn fill_filter(m_type: u32, value: CoordDataValue, gecko_filter: &mut nsStyleFilter){ gecko_filter.mType = m_type; gecko_filter.mFilterParameter.set_value(value); } unsafe { Gecko_ResetFilters(&mut self.gecko, v.filters.len()); } debug_assert!(v.filters.len() == self.gecko.mFilters.len()); for (servo, gecko_filter) in v.filters.into_iter().zip(self.gecko.mFilters.iter_mut()) { //TODO: URL, drop-shadow match servo { Blur(len) => fill_filter(NS_STYLE_FILTER_BLUR, CoordDataValue::Coord(len.0), gecko_filter), Brightness(factor) => fill_filter(NS_STYLE_FILTER_BRIGHTNESS, CoordDataValue::Factor(factor), gecko_filter), Contrast(factor) => fill_filter(NS_STYLE_FILTER_CONTRAST, CoordDataValue::Factor(factor), gecko_filter), Grayscale(factor) => fill_filter(NS_STYLE_FILTER_GRAYSCALE, CoordDataValue::Factor(factor), gecko_filter), HueRotate(angle) => fill_filter(NS_STYLE_FILTER_HUE_ROTATE, CoordDataValue::Radian(angle.radians()), gecko_filter), Invert(factor) => fill_filter(NS_STYLE_FILTER_INVERT, CoordDataValue::Factor(factor), gecko_filter), Opacity(factor) => fill_filter(NS_STYLE_FILTER_OPACITY, CoordDataValue::Factor(factor), gecko_filter), Saturate(factor) => fill_filter(NS_STYLE_FILTER_SATURATE, CoordDataValue::Factor(factor), gecko_filter), Sepia(factor) => fill_filter(NS_STYLE_FILTER_SEPIA, CoordDataValue::Factor(factor), gecko_filter), DropShadow(shadow) => { gecko_filter.mType = NS_STYLE_FILTER_DROP_SHADOW; fn init_shadow(filter: &mut nsStyleFilter) -> &mut nsCSSShadowArray { unsafe { let ref mut union = filter.__bindgen_anon_1; let mut shadow_array: &mut *mut nsCSSShadowArray = union.mDropShadow.as_mut(); *shadow_array = Gecko_NewCSSShadowArray(1); &mut **shadow_array } } let mut gecko_shadow = init_shadow(gecko_filter); gecko_shadow.mArray[0].mXOffset = shadow.offset_x.0; gecko_shadow.mArray[0].mYOffset = shadow.offset_y.0; gecko_shadow.mArray[0].mRadius = shadow.blur_radius.0; // mSpread is not supported in the spec, so we leave it as 0 gecko_shadow.mArray[0].mInset = false; // Not supported in spec level 1 gecko_shadow.mArray[0].mColor = match shadow.color { Color::RGBA(rgba) => { gecko_shadow.mArray[0].mHasColor = true; convert_rgba_to_nscolor(&rgba) }, // TODO handle currentColor // https://bugzilla.mozilla.org/show_bug.cgi?id=760345 Color::CurrentColor => 0, }; } Url(ref url) => { unsafe { bindings::Gecko_nsStyleFilter_SetURLValue(gecko_filter, url.for_ffi()); } } } } } pub fn copy_filter_from(&mut self, other: &Self) { unsafe { Gecko_CopyFiltersFrom(&other.gecko as *const _ as *mut _, &mut self.gecko); } } </%self:impl_trait> <%self:impl_trait style_struct_name="InheritedTable" skip_longhands="border-spacing"> pub fn set_border_spacing(&mut self, v: longhands::border_spacing::computed_value::T) { self.gecko.mBorderSpacingCol = v.horizontal.0; self.gecko.mBorderSpacingRow = v.vertical.0; } pub fn copy_border_spacing_from(&mut self, other: &Self) { self.gecko.mBorderSpacingCol = other.gecko.mBorderSpacingCol; self.gecko.mBorderSpacingRow = other.gecko.mBorderSpacingRow; } </%self:impl_trait> <%self:impl_trait style_struct_name="InheritedText" skip_longhands="text-align text-emphasis-style text-shadow line-height letter-spacing word-spacing -webkit-text-stroke-width text-emphasis-position -moz-tab-size"> <% text_align_keyword = Keyword("text-align", "start end left right center justify -moz-center -moz-left " + "-moz-right match-parent char") %> ${impl_keyword('text_align', 'mTextAlign', text_align_keyword, need_clone=False)} pub fn set_text_shadow(&mut self, v: longhands::text_shadow::computed_value::T) { self.gecko.mTextShadow.replace_with_new(v.0.len() as u32); for (servo, gecko_shadow) in v.0.into_iter() .zip(self.gecko.mTextShadow.iter_mut()) { gecko_shadow.mXOffset = servo.offset_x.0; gecko_shadow.mYOffset = servo.offset_y.0; gecko_shadow.mRadius = servo.blur_radius.0; gecko_shadow.mHasColor = false; gecko_shadow.mColor = match servo.color { Color::RGBA(rgba) => { gecko_shadow.mHasColor = true; convert_rgba_to_nscolor(&rgba) }, // TODO handle currentColor // https://bugzilla.mozilla.org/show_bug.cgi?id=760345 Color::CurrentColor => 0, } } } pub fn copy_text_shadow_from(&mut self, other: &Self) { self.gecko.mTextShadow.copy_from(&other.gecko.mTextShadow); } pub fn clone_text_shadow(&self) -> longhands::text_shadow::computed_value::T { let buf = self.gecko.mTextShadow.iter().map(|shadow| { longhands::text_shadow::computed_value::TextShadow { offset_x: Au(shadow.mXOffset), offset_y: Au(shadow.mYOffset), blur_radius: Au(shadow.mRadius), color: Color::RGBA(convert_nscolor_to_rgba(shadow.mColor)), } }).collect(); longhands::text_shadow::computed_value::T(buf) } pub fn set_line_height(&mut self, v: longhands::line_height::computed_value::T) { use properties::longhands::line_height::computed_value::T; // FIXME: Align binary representations and ditch |match| for cast + static_asserts let en = match v { T::Normal => CoordDataValue::Normal, T::Length(val) => CoordDataValue::Coord(val.0), T::Number(val) => CoordDataValue::Factor(val), T::MozBlockHeight => CoordDataValue::Enumerated(structs::NS_STYLE_LINE_HEIGHT_BLOCK_HEIGHT), }; self.gecko.mLineHeight.set_value(en); } pub fn clone_line_height(&self) -> longhands::line_height::computed_value::T { use properties::longhands::line_height::computed_value::T; return match self.gecko.mLineHeight.as_value() { CoordDataValue::Normal => T::Normal, CoordDataValue::Coord(coord) => T::Length(Au(coord)), CoordDataValue::Factor(n) => T::Number(n), CoordDataValue::Enumerated(val) if val == structs::NS_STYLE_LINE_HEIGHT_BLOCK_HEIGHT => T::MozBlockHeight, _ => { debug_assert!(false); T::MozBlockHeight } } } <%call expr="impl_coord_copy('line_height', 'mLineHeight')"></%call> pub fn set_letter_spacing(&mut self, v: longhands::letter_spacing::computed_value::T) { match v.0 { Some(au) => self.gecko.mLetterSpacing.set(au), None => self.gecko.mLetterSpacing.set_value(CoordDataValue::Normal) } } <%call expr="impl_coord_copy('letter_spacing', 'mLetterSpacing')"></%call> pub fn set_word_spacing(&mut self, v: longhands::word_spacing::computed_value::T) { match v.0 { Some(lop) => self.gecko.mWordSpacing.set(lop), // https://drafts.csswg.org/css-text-3/#valdef-word-spacing-normal None => self.gecko.mWordSpacing.set_value(CoordDataValue::Coord(0)), } } <%call expr="impl_coord_copy('word_spacing', 'mWordSpacing')"></%call> fn clear_text_emphasis_style_if_string(&mut self) { use nsstring::nsString; if self.gecko.mTextEmphasisStyle == structs::NS_STYLE_TEXT_EMPHASIS_STYLE_STRING as u8 { self.gecko.mTextEmphasisStyleString.assign(&nsString::new()); self.gecko.mTextEmphasisStyle = structs::NS_STYLE_TEXT_EMPHASIS_STYLE_NONE as u8; } } pub fn set_text_emphasis_position(&mut self, v: longhands::text_emphasis_position::computed_value::T) { use properties::longhands::text_emphasis_position::*; let mut result = match v.0 { HorizontalWritingModeValue::Over => structs::NS_STYLE_TEXT_EMPHASIS_POSITION_OVER as u8, HorizontalWritingModeValue::Under => structs::NS_STYLE_TEXT_EMPHASIS_POSITION_UNDER as u8, }; match v.1 { VerticalWritingModeValue::Right => { result |= structs::NS_STYLE_TEXT_EMPHASIS_POSITION_RIGHT as u8; } VerticalWritingModeValue::Left => { result |= structs::NS_STYLE_TEXT_EMPHASIS_POSITION_LEFT as u8; } } self.gecko.mTextEmphasisPosition = result; } <%call expr="impl_simple_copy('text_emphasis_position', 'mTextEmphasisPosition')"></%call> pub fn set_text_emphasis_style(&mut self, v: longhands::text_emphasis_style::computed_value::T) { use nsstring::nsCString; use properties::longhands::text_emphasis_style::computed_value::T; use properties::longhands::text_emphasis_style::ShapeKeyword; self.clear_text_emphasis_style_if_string(); let (te, s) = match v { T::None => (structs::NS_STYLE_TEXT_EMPHASIS_STYLE_NONE, ""), T::Keyword(ref keyword) => { let fill = if keyword.fill { structs::NS_STYLE_TEXT_EMPHASIS_STYLE_FILLED } else { structs::NS_STYLE_TEXT_EMPHASIS_STYLE_OPEN }; let shape = match keyword.shape { ShapeKeyword::Dot => structs::NS_STYLE_TEXT_EMPHASIS_STYLE_DOT, ShapeKeyword::Circle => structs::NS_STYLE_TEXT_EMPHASIS_STYLE_CIRCLE, ShapeKeyword::DoubleCircle => structs::NS_STYLE_TEXT_EMPHASIS_STYLE_DOUBLE_CIRCLE, ShapeKeyword::Triangle => structs::NS_STYLE_TEXT_EMPHASIS_STYLE_TRIANGLE, ShapeKeyword::Sesame => structs::NS_STYLE_TEXT_EMPHASIS_STYLE_SESAME, }; (shape | fill, keyword.shape.char(keyword.fill)) }, T::String(ref s) => { (structs::NS_STYLE_TEXT_EMPHASIS_STYLE_STRING, &**s) }, }; self.gecko.mTextEmphasisStyleString.assign_utf8(&nsCString::from(s)); self.gecko.mTextEmphasisStyle = te as u8; } pub fn copy_text_emphasis_style_from(&mut self, other: &Self) { self.clear_text_emphasis_style_if_string(); if other.gecko.mTextEmphasisStyle == structs::NS_STYLE_TEXT_EMPHASIS_STYLE_STRING as u8 { self.gecko.mTextEmphasisStyleString .assign(&*other.gecko.mTextEmphasisStyleString) } self.gecko.mTextEmphasisStyle = other.gecko.mTextEmphasisStyle; } <%call expr="impl_app_units('_webkit_text_stroke_width', 'mWebkitTextStrokeWidth', need_clone=False)"></%call> #[allow(non_snake_case)] pub fn set__moz_tab_size(&mut self, v: longhands::_moz_tab_size::computed_value::T) { use values::Either; match v { Either::Second(number) => { self.gecko.mTabSize.set_value(CoordDataValue::Factor(number)); } Either::First(au) => { self.gecko.mTabSize.set(au); } } } <%call expr="impl_coord_copy('_moz_tab_size', 'mTabSize')"></%call> </%self:impl_trait> <%self:impl_trait style_struct_name="Text" skip_longhands="text-decoration-line text-overflow" skip_additionals="*"> pub fn set_text_decoration_line(&mut self, v: longhands::text_decoration_line::computed_value::T) { let mut bits: u8 = 0; if v.contains(longhands::text_decoration_line::UNDERLINE) { bits |= structs::NS_STYLE_TEXT_DECORATION_LINE_UNDERLINE as u8; } if v.contains(longhands::text_decoration_line::OVERLINE) { bits |= structs::NS_STYLE_TEXT_DECORATION_LINE_OVERLINE as u8; } if v.contains(longhands::text_decoration_line::LINE_THROUGH) { bits |= structs::NS_STYLE_TEXT_DECORATION_LINE_LINE_THROUGH as u8; } if v.contains(longhands::text_decoration_line::BLINK) { bits |= structs::NS_STYLE_TEXT_DECORATION_LINE_BLINK as u8; } if v.contains(longhands::text_decoration_line::COLOR_OVERRIDE) { bits |= structs::NS_STYLE_TEXT_DECORATION_LINE_OVERRIDE_ALL as u8; } self.gecko.mTextDecorationLine = bits; } ${impl_simple_copy('text_decoration_line', 'mTextDecorationLine')} fn clear_overflow_sides_if_string(&mut self) { use gecko_bindings::structs::nsStyleTextOverflowSide; use nsstring::nsString; fn clear_if_string(side: &mut nsStyleTextOverflowSide) { if side.mType == structs::NS_STYLE_TEXT_OVERFLOW_STRING as u8 { side.mString.assign(&nsString::new()); side.mType = structs::NS_STYLE_TEXT_OVERFLOW_CLIP as u8; } } clear_if_string(&mut self.gecko.mTextOverflow.mLeft); clear_if_string(&mut self.gecko.mTextOverflow.mRight); } pub fn set_text_overflow(&mut self, v: longhands::text_overflow::computed_value::T) { use gecko_bindings::structs::nsStyleTextOverflowSide; use properties::longhands::text_overflow::{SpecifiedValue, Side}; fn set(side: &mut nsStyleTextOverflowSide, value: &Side) { use nsstring::nsCString; let ty = match *value { Side::Clip => structs::NS_STYLE_TEXT_OVERFLOW_CLIP, Side::Ellipsis => structs::NS_STYLE_TEXT_OVERFLOW_ELLIPSIS, Side::String(ref s) => { side.mString.assign_utf8(&nsCString::from(&**s)); structs::NS_STYLE_TEXT_OVERFLOW_STRING } }; side.mType = ty as u8; } self.clear_overflow_sides_if_string(); if v.second.is_none() { self.gecko.mTextOverflow.mLogicalDirections = true; } let SpecifiedValue { ref first, ref second } = v; let second = second.as_ref().unwrap_or(&first); set(&mut self.gecko.mTextOverflow.mLeft, first); set(&mut self.gecko.mTextOverflow.mRight, second); } pub fn copy_text_overflow_from(&mut self, other: &Self) { use gecko_bindings::structs::nsStyleTextOverflowSide; fn set(side: &mut nsStyleTextOverflowSide, other: &nsStyleTextOverflowSide) { if other.mType == structs::NS_STYLE_TEXT_OVERFLOW_STRING as u8 { side.mString.assign(&*other.mString) } side.mType = other.mType } self.clear_overflow_sides_if_string(); set(&mut self.gecko.mTextOverflow.mLeft, &other.gecko.mTextOverflow.mLeft); set(&mut self.gecko.mTextOverflow.mRight, &other.gecko.mTextOverflow.mRight); self.gecko.mTextOverflow.mLogicalDirections = other.gecko.mTextOverflow.mLogicalDirections; } #[inline] pub fn has_underline(&self) -> bool { (self.gecko.mTextDecorationLine & (structs::NS_STYLE_TEXT_DECORATION_LINE_UNDERLINE as u8)) != 0 } #[inline] pub fn has_overline(&self) -> bool { (self.gecko.mTextDecorationLine & (structs::NS_STYLE_TEXT_DECORATION_LINE_OVERLINE as u8)) != 0 } #[inline] pub fn has_line_through(&self) -> bool { (self.gecko.mTextDecorationLine & (structs::NS_STYLE_TEXT_DECORATION_LINE_LINE_THROUGH as u8)) != 0 } </%self:impl_trait> <% skip_svg_longhands = """ mask-mode mask-repeat mask-clip mask-origin mask-composite mask-position-x mask-position-y mask-size mask-image clip-path """ %> <%self:impl_trait style_struct_name="SVG" skip_longhands="${skip_svg_longhands}" skip_additionals="*"> <% impl_common_image_layer_properties("mask") %> <%self:simple_image_array_property name="mode" shorthand="mask" field_name="mMaskMode"> use properties::longhands::mask_mode::single_value::computed_value::T; match servo { T::alpha => structs::NS_STYLE_MASK_MODE_ALPHA as u8, T::luminance => structs::NS_STYLE_MASK_MODE_LUMINANCE as u8, T::match_source => structs::NS_STYLE_MASK_MODE_MATCH_SOURCE as u8, } </%self:simple_image_array_property> <%self:simple_image_array_property name="composite" shorthand="mask" field_name="mComposite"> use properties::longhands::mask_composite::single_value::computed_value::T; match servo { T::add => structs::NS_STYLE_MASK_COMPOSITE_ADD as u8, T::subtract => structs::NS_STYLE_MASK_COMPOSITE_SUBTRACT as u8, T::intersect => structs::NS_STYLE_MASK_COMPOSITE_INTERSECT as u8, T::exclude => structs::NS_STYLE_MASK_COMPOSITE_EXCLUDE as u8, } </%self:simple_image_array_property> pub fn set_clip_path(&mut self, v: longhands::clip_path::computed_value::T) { use gecko_bindings::bindings::{Gecko_NewBasicShape, Gecko_DestroyClipPath}; use gecko_bindings::structs::StyleGeometryBox; use gecko_bindings::structs::{StyleBasicShape, StyleBasicShapeType, StyleShapeSourceType}; use gecko_bindings::structs::{StyleFillRule, StyleShapeSource}; use gecko::conversions::basic_shape::set_corners_from_radius; use gecko::values::GeckoStyleCoordConvertible; use values::computed::basic_shape::*; let ref mut clip_path = self.gecko.mClipPath; // clean up existing struct unsafe { Gecko_DestroyClipPath(clip_path) }; clip_path.mType = StyleShapeSourceType::None; match v { ShapeSource::Url(ref url) => { unsafe { bindings::Gecko_StyleClipPath_SetURLValue(clip_path, url.for_ffi()); } } ShapeSource::None => {} // don't change the type ShapeSource::Box(reference) => { clip_path.mReferenceBox = reference.into(); clip_path.mType = StyleShapeSourceType::Box; } ShapeSource::Shape(servo_shape, maybe_box) => { clip_path.mReferenceBox = maybe_box.map(Into::into) .unwrap_or(StyleGeometryBox::NoBox); clip_path.mType = StyleShapeSourceType::Shape; fn init_shape(clip_path: &mut StyleShapeSource, ty: StyleBasicShapeType) -> &mut StyleBasicShape { unsafe { // We have to be very careful to avoid a copy here! let ref mut union = clip_path.__bindgen_anon_1; let mut shape: &mut *mut StyleBasicShape = union.mBasicShape.as_mut(); *shape = Gecko_NewBasicShape(ty); &mut **shape } } match servo_shape { BasicShape::Inset(rect) => { let mut shape = init_shape(clip_path, StyleBasicShapeType::Inset); unsafe { shape.mCoordinates.set_len(4) }; // set_len() can't call constructors, so the coordinates // can contain any value. set_value() attempts to free // allocated coordinates, so we don't want to feed it // garbage values which it may misinterpret. // Instead, we use leaky_set_value to blindly overwrite // the garbage data without // attempting to clean up. shape.mCoordinates[0].leaky_set_null(); rect.top.to_gecko_style_coord(&mut shape.mCoordinates[0]); shape.mCoordinates[1].leaky_set_null(); rect.right.to_gecko_style_coord(&mut shape.mCoordinates[1]); shape.mCoordinates[2].leaky_set_null(); rect.bottom.to_gecko_style_coord(&mut shape.mCoordinates[2]); shape.mCoordinates[3].leaky_set_null(); rect.left.to_gecko_style_coord(&mut shape.mCoordinates[3]); set_corners_from_radius(rect.round, &mut shape.mRadius); } BasicShape::Circle(circ) => { let mut shape = init_shape(clip_path, StyleBasicShapeType::Circle); unsafe { shape.mCoordinates.set_len(1) }; shape.mCoordinates[0].leaky_set_null(); circ.radius.to_gecko_style_coord(&mut shape.mCoordinates[0]); shape.mPosition = circ.position.into(); } BasicShape::Ellipse(el) => { let mut shape = init_shape(clip_path, StyleBasicShapeType::Ellipse); unsafe { shape.mCoordinates.set_len(2) }; shape.mCoordinates[0].leaky_set_null(); el.semiaxis_x.to_gecko_style_coord(&mut shape.mCoordinates[0]); shape.mCoordinates[1].leaky_set_null(); el.semiaxis_y.to_gecko_style_coord(&mut shape.mCoordinates[1]); shape.mPosition = el.position.into(); } BasicShape::Polygon(poly) => { let mut shape = init_shape(clip_path, StyleBasicShapeType::Polygon); unsafe { shape.mCoordinates.set_len(poly.coordinates.len() as u32 * 2); } for (i, coord) in poly.coordinates.iter().enumerate() { shape.mCoordinates[2 * i].leaky_set_null(); shape.mCoordinates[2 * i + 1].leaky_set_null(); coord.0.to_gecko_style_coord(&mut shape.mCoordinates[2 * i]); coord.1.to_gecko_style_coord(&mut shape.mCoordinates[2 * i + 1]); } shape.mFillRule = if poly.fill == FillRule::EvenOdd { StyleFillRule::Evenodd } else { StyleFillRule::Nonzero }; } } } } } pub fn copy_clip_path_from(&mut self, other: &Self) { use gecko_bindings::bindings::Gecko_CopyClipPathValueFrom; unsafe { Gecko_CopyClipPathValueFrom(&mut self.gecko.mClipPath, &other.gecko.mClipPath); } } </%self:impl_trait> <%self:impl_trait style_struct_name="InheritedSVG" skip_longhands="paint-order stroke-dasharray" skip_additionals="*"> pub fn set_paint_order(&mut self, v: longhands::paint_order::computed_value::T) { use self::longhands::paint_order; if v.0 == 0 { self.gecko.mPaintOrder = structs::NS_STYLE_PAINT_ORDER_NORMAL as u8; } else { let mut order = 0; for pos in 0..3 { let geckoval = match v.bits_at(pos) { paint_order::FILL => structs::NS_STYLE_PAINT_ORDER_FILL as u8, paint_order::STROKE => structs::NS_STYLE_PAINT_ORDER_STROKE as u8, paint_order::MARKERS => structs::NS_STYLE_PAINT_ORDER_MARKERS as u8, _ => unreachable!(), }; order |= geckoval << (pos * structs::NS_STYLE_PAINT_ORDER_BITWIDTH as u8); } self.gecko.mPaintOrder = order; } } ${impl_simple_copy('paint_order', 'mPaintOrder')} pub fn set_stroke_dasharray(&mut self, v: longhands::stroke_dasharray::computed_value::T) { unsafe { bindings::Gecko_nsStyleSVG_SetDashArrayLength(&mut self.gecko, v.0.len() as u32); } for (mut gecko, servo) in self.gecko.mStrokeDasharray.iter_mut().zip(v.0.into_iter()) { match servo { Either::First(lop) => gecko.set(lop), Either::Second(number) => gecko.set_value(CoordDataValue::Factor(number)), } } } pub fn copy_stroke_dasharray_from(&mut self, other: &Self) { unsafe { bindings::Gecko_nsStyleSVG_CopyDashArray(&mut self.gecko, &other.gecko); } } </%self:impl_trait> <%self:impl_trait style_struct_name="Color" skip_longhands="*"> pub fn set_color(&mut self, v: longhands::color::computed_value::T) { let result = convert_rgba_to_nscolor(&v); ${set_gecko_property("mColor", "result")} } <%call expr="impl_simple_copy('color', 'mColor')"></%call> pub fn clone_color(&self) -> longhands::color::computed_value::T { let color = ${get_gecko_property("mColor")} as u32; convert_nscolor_to_rgba(color) } </%self:impl_trait> <%self:impl_trait style_struct_name="Pointing" skip_longhands="cursor caret-color"> pub fn set_cursor(&mut self, v: longhands::cursor::computed_value::T) { use properties::longhands::cursor::computed_value::Keyword; use style_traits::cursor::Cursor; self.gecko.mCursor = match v.keyword { Keyword::AutoCursor => structs::NS_STYLE_CURSOR_AUTO, Keyword::SpecifiedCursor(cursor) => match cursor { Cursor::None => structs::NS_STYLE_CURSOR_NONE, Cursor::Default => structs::NS_STYLE_CURSOR_DEFAULT, Cursor::Pointer => structs::NS_STYLE_CURSOR_POINTER, Cursor::ContextMenu => structs::NS_STYLE_CURSOR_CONTEXT_MENU, Cursor::Help => structs::NS_STYLE_CURSOR_HELP, Cursor::Progress => structs::NS_STYLE_CURSOR_DEFAULT, // Gecko doesn't support "progress" yet Cursor::Wait => structs::NS_STYLE_CURSOR_WAIT, Cursor::Cell => structs::NS_STYLE_CURSOR_CELL, Cursor::Crosshair => structs::NS_STYLE_CURSOR_CROSSHAIR, Cursor::Text => structs::NS_STYLE_CURSOR_TEXT, Cursor::VerticalText => structs::NS_STYLE_CURSOR_VERTICAL_TEXT, Cursor::Alias => structs::NS_STYLE_CURSOR_ALIAS, Cursor::Copy => structs::NS_STYLE_CURSOR_COPY, Cursor::Move => structs::NS_STYLE_CURSOR_MOVE, Cursor::NoDrop => structs::NS_STYLE_CURSOR_NO_DROP, Cursor::NotAllowed => structs::NS_STYLE_CURSOR_NOT_ALLOWED, Cursor::Grab => structs::NS_STYLE_CURSOR_GRAB, Cursor::Grabbing => structs::NS_STYLE_CURSOR_GRABBING, Cursor::EResize => structs::NS_STYLE_CURSOR_E_RESIZE, Cursor::NResize => structs::NS_STYLE_CURSOR_N_RESIZE, Cursor::NeResize => structs::NS_STYLE_CURSOR_NE_RESIZE, Cursor::NwResize => structs::NS_STYLE_CURSOR_NW_RESIZE, Cursor::SResize => structs::NS_STYLE_CURSOR_S_RESIZE, Cursor::SeResize => structs::NS_STYLE_CURSOR_SE_RESIZE, Cursor::SwResize => structs::NS_STYLE_CURSOR_SW_RESIZE, Cursor::WResize => structs::NS_STYLE_CURSOR_W_RESIZE, Cursor::EwResize => structs::NS_STYLE_CURSOR_EW_RESIZE, Cursor::NsResize => structs::NS_STYLE_CURSOR_NS_RESIZE, Cursor::NeswResize => structs::NS_STYLE_CURSOR_NESW_RESIZE, Cursor::NwseResize => structs::NS_STYLE_CURSOR_NWSE_RESIZE, Cursor::ColResize => structs::NS_STYLE_CURSOR_COL_RESIZE, Cursor::RowResize => structs::NS_STYLE_CURSOR_ROW_RESIZE, Cursor::AllScroll => structs::NS_STYLE_CURSOR_ALL_SCROLL, Cursor::ZoomIn => structs::NS_STYLE_CURSOR_ZOOM_IN, Cursor::ZoomOut => structs::NS_STYLE_CURSOR_ZOOM_OUT, } } as u8; unsafe { Gecko_SetCursorArrayLength(&mut self.gecko, v.images.len()); } for i in 0..v.images.len() { let image = &v.images[i]; unsafe { Gecko_SetCursorImage(&mut self.gecko.mCursorImages[i], image.url.for_ffi()); } // We don't need to record this struct as uncacheable, like when setting // background-image to a url() value, since only properties in reset structs // are re-used from the applicable declaration cache, and the Pointing struct // is an inherited struct. } } pub fn copy_cursor_from(&mut self, other: &Self) { self.gecko.mCursor = other.gecko.mCursor; unsafe { Gecko_CopyCursorArrayFrom(&mut self.gecko, &other.gecko); } } pub fn set_caret_color(&mut self, v: longhands::caret_color::computed_value::T){ use values::Either; match v { Either::First(color) => { self.gecko.mCaretColor = StyleComplexColor::from(color); } Either::Second(_auto) => { self.gecko.mCaretColor = StyleComplexColor::auto(); } } } pub fn copy_caret_color_from(&mut self, other: &Self){ self.gecko.mCaretColor = other.gecko.mCaretColor; } <%call expr="impl_color_clone('caret_color', 'mCaretColor')"></%call> </%self:impl_trait> <%self:impl_trait style_struct_name="Column" skip_longhands="column-count column-rule-width"> #[allow(unused_unsafe)] pub fn set_column_count(&mut self, v: longhands::column_count::computed_value::T) { use gecko_bindings::structs::{NS_STYLE_COLUMN_COUNT_AUTO, nsStyleColumn_kMaxColumnCount}; self.gecko.mColumnCount = match v.0 { Some(number) => unsafe { cmp::min(number, nsStyleColumn_kMaxColumnCount) }, None => NS_STYLE_COLUMN_COUNT_AUTO }; } ${impl_simple_copy('column_count', 'mColumnCount')} <% impl_app_units("column_rule_width", "mColumnRuleWidth", need_clone=True, round_to_pixels=True) %> </%self:impl_trait> <%self:impl_trait style_struct_name="Counters" skip_longhands="content counter-increment counter-reset"> pub fn set_content(&mut self, v: longhands::content::computed_value::T) { use properties::longhands::content::computed_value::T; use properties::longhands::content::computed_value::ContentItem; use style_traits::ToCss; use gecko_bindings::structs::nsStyleContentType::*; use gecko_bindings::bindings::Gecko_ClearAndResizeStyleContents; // Converts a string as utf16, and returns an owned, zero-terminated raw buffer. fn as_utf16_and_forget(s: &str) -> *mut u16 { use std::mem; let mut vec = s.encode_utf16().collect::<Vec<_>>(); vec.push(0u16); let ptr = vec.as_mut_ptr(); mem::forget(vec); ptr } match v { T::none | T::normal => { // Ensure destructors run, otherwise we could leak. if !self.gecko.mContents.is_empty() { unsafe { Gecko_ClearAndResizeStyleContents(&mut self.gecko, 0); } } }, T::Content(items) => { unsafe { Gecko_ClearAndResizeStyleContents(&mut self.gecko, items.len() as u32); } for (i, item) in items.into_iter().enumerate() { // NB: Gecko compares the mString value if type is not image // or URI independently of whatever gets there. In the quote // cases, they set it to null, so do the same here. unsafe { *self.gecko.mContents[i].mContent.mString.as_mut() = ptr::null_mut(); } match item { ContentItem::String(value) => { self.gecko.mContents[i].mType = eStyleContentType_String; unsafe { // NB: we share allocators, so doing this is fine. *self.gecko.mContents[i].mContent.mString.as_mut() = as_utf16_and_forget(&value); } } ContentItem::Attr(ns, val) => { self.gecko.mContents[i].mType = eStyleContentType_Attr; let s = if let Some(ns) = ns { format!("{}|{}", ns, val) } else { val }; unsafe { // NB: we share allocators, so doing this is fine. *self.gecko.mContents[i].mContent.mString.as_mut() = as_utf16_and_forget(&s); } } ContentItem::OpenQuote => self.gecko.mContents[i].mType = eStyleContentType_OpenQuote, ContentItem::CloseQuote => self.gecko.mContents[i].mType = eStyleContentType_CloseQuote, ContentItem::NoOpenQuote => self.gecko.mContents[i].mType = eStyleContentType_NoOpenQuote, ContentItem::NoCloseQuote => self.gecko.mContents[i].mType = eStyleContentType_NoCloseQuote, ContentItem::MozAltContent => self.gecko.mContents[i].mType = eStyleContentType_AltContent, ContentItem::Counter(name, style) => { unsafe { bindings::Gecko_SetContentDataArray(&mut self.gecko.mContents[i], eStyleContentType_Counter, 2) }<|fim▁hole|> // When we support <custom-ident> values for list-style-type this will need to be updated array[1].set_ident(&style.to_css_string()); } ContentItem::Counters(name, sep, style) => { unsafe { bindings::Gecko_SetContentDataArray(&mut self.gecko.mContents[i], eStyleContentType_Counters, 3) } let mut array = unsafe { &mut **self.gecko.mContents[i].mContent.mCounters.as_mut() }; array[0].set_string(&name); array[1].set_string(&sep); // When we support <custom-ident> values for list-style-type this will need to be updated array[2].set_ident(&style.to_css_string()); } ContentItem::Url(url) => { unsafe { bindings::Gecko_SetContentDataImage(&mut self.gecko.mContents[i], url.for_ffi()) } } } } } } } pub fn copy_content_from(&mut self, other: &Self) { use gecko_bindings::bindings::Gecko_CopyStyleContentsFrom; unsafe { Gecko_CopyStyleContentsFrom(&mut self.gecko, &other.gecko) } } % for counter_property in ["Increment", "Reset"]: pub fn set_counter_${counter_property.lower()}(&mut self, v: longhands::counter_increment::computed_value::T) { unsafe { bindings::Gecko_ClearAndResizeCounter${counter_property}s(&mut self.gecko, v.0.len() as u32); for (i, item) in v.0.into_iter().enumerate() { self.gecko.m${counter_property}s[i].mCounter.assign_utf8(&item.0); self.gecko.m${counter_property}s[i].mValue = item.1; } } } pub fn copy_counter_${counter_property.lower()}_from(&mut self, other: &Self) { unsafe { bindings::Gecko_CopyCounter${counter_property}sFrom(&mut self.gecko, &other.gecko) } } % endfor </%self:impl_trait> <%self:impl_trait style_struct_name="XUL" skip_longhands="-moz-stack-sizing"> #[allow(non_snake_case)] pub fn set__moz_stack_sizing(&mut self, v: longhands::_moz_stack_sizing::computed_value::T) { use properties::longhands::_moz_stack_sizing::computed_value::T; self.gecko.mStretchStack = v == T::stretch_to_fit; } ${impl_simple_copy('_moz_stack_sizing', 'mStretchStack')} </%self:impl_trait> <%def name="define_ffi_struct_accessor(style_struct)"> #[no_mangle] #[allow(non_snake_case, unused_variables)] pub unsafe extern "C" fn Servo_GetStyle${style_struct.gecko_name}(computed_values: ServoComputedValuesBorrowedOrNull) -> *const ${style_struct.gecko_ffi_name} { ComputedValues::arc_from_borrowed(&computed_values).unwrap().get_${style_struct.name_lower}().get_gecko() as *const ${style_struct.gecko_ffi_name} } </%def> % for style_struct in data.style_structs: ${declare_style_struct(style_struct)} ${impl_style_struct(style_struct)} % if not style_struct.name in data.manual_style_structs: <%self:raw_impl_trait style_struct="${style_struct}"></%self:raw_impl_trait> % endif ${define_ffi_struct_accessor(style_struct)} % endfor // This is only accessed from the Gecko main thread. static mut EMPTY_VARIABLES_STRUCT: Option<nsStyleVariables> = None; #[no_mangle] #[allow(non_snake_case)] pub unsafe extern "C" fn Servo_GetStyleVariables(_cv: ServoComputedValuesBorrowedOrNull) -> *const nsStyleVariables { EMPTY_VARIABLES_STRUCT.as_ref().unwrap() } pub fn initialize() { unsafe { EMPTY_VARIABLES_STRUCT = Some(zeroed()); Gecko_Construct_nsStyleVariables(EMPTY_VARIABLES_STRUCT.as_mut().unwrap()); } } pub fn shutdown() { unsafe { EMPTY_VARIABLES_STRUCT.take().as_mut().map(|v| Gecko_Destroy_nsStyleVariables(v)); } }<|fim▁end|>
let mut array = unsafe { &mut **self.gecko.mContents[i].mContent.mCounters.as_mut() }; array[0].set_string(&name);
<|file_name|>exec.go<|end_file_name|><|fim▁begin|>package exec import ( "bytes" "fmt" "os/exec" "path/filepath" "runtime" "strings" "sync" "syscall" "time" "github.com/kballard/go-shellquote" "github.com/influxdata/telegraf" "github.com/influxdata/telegraf/internal" "github.com/influxdata/telegraf/plugins/inputs" "github.com/influxdata/telegraf/plugins/parsers" "github.com/influxdata/telegraf/plugins/parsers/nagios" ) const sampleConfig = ` ## Commands array commands = [ "/tmp/test.sh", "/usr/bin/mycollector --foo=bar", "/tmp/collect_*.sh" ] ## Timeout for each command to complete. timeout = "5s" ## measurement name suffix (for separating different commands) name_suffix = "_mycollector" ## Data format to consume. ## Each data format has its own unique set of configuration options, read ## more about them here: ## https://github.com/influxdata/telegraf/blob/master/docs/DATA_FORMATS_INPUT.md data_format = "influx" ` type Exec struct { Commands []string Command string Timeout internal.Duration parser parsers.Parser runner Runner } func NewExec() *Exec { return &Exec{ runner: CommandRunner{}, Timeout: internal.Duration{Duration: time.Second * 5}, } } type Runner interface { Run(*Exec, string, telegraf.Accumulator) ([]byte, error) } type CommandRunner struct{} func AddNagiosState(exitCode error, acc telegraf.Accumulator) error { nagiosState := 0 if exitCode != nil { exiterr, ok := exitCode.(*exec.ExitError) if ok { status, ok := exiterr.Sys().(syscall.WaitStatus) if ok { nagiosState = status.ExitStatus() } else { return fmt.Errorf("exec: unable to get nagios plugin exit code") } } else { return fmt.Errorf("exec: unable to get nagios plugin exit code") } } fields := map[string]interface{}{"state": nagiosState} acc.AddFields("nagios_state", fields, nil) return nil } func (c CommandRunner) Run( e *Exec, command string, acc telegraf.Accumulator, ) ([]byte, error) { split_cmd, err := shellquote.Split(command) if err != nil || len(split_cmd) == 0 { return nil, fmt.Errorf("exec: unable to parse command, %s", err) } cmd := exec.Command(split_cmd[0], split_cmd[1:]...) var out bytes.Buffer cmd.Stdout = &out if err := internal.RunTimeout(cmd, e.Timeout.Duration); err != nil { switch e.parser.(type) { case *nagios.NagiosParser: AddNagiosState(err, acc) default: return nil, fmt.Errorf("exec: %s for command '%s'", err, command) } } else { switch e.parser.(type) { case *nagios.NagiosParser: AddNagiosState(nil, acc) } } out = removeCarriageReturns(out) return out.Bytes(), nil } // removeCarriageReturns removes all carriage returns from the input if the // OS is Windows. It does not return any errors. func removeCarriageReturns(b bytes.Buffer) bytes.Buffer { if runtime.GOOS == "windows" { var buf bytes.Buffer for { byt, er := b.ReadBytes(0x0D) end := len(byt) if nil == er { end -= 1 } if nil != byt { buf.Write(byt[:end]) } else { break } if nil != er { break } } b = buf } return b } func (e *Exec) ProcessCommand(command string, acc telegraf.Accumulator, wg *sync.WaitGroup) { defer wg.Done() out, err := e.runner.Run(e, command, acc) if err != nil { acc.AddError(err) return } metrics, err := e.parser.Parse(out) if err != nil { acc.AddError(err) } else { for _, metric := range metrics {<|fim▁hole|> func (e *Exec) SampleConfig() string { return sampleConfig } func (e *Exec) Description() string { return "Read metrics from one or more commands that can output to stdout" } func (e *Exec) SetParser(parser parsers.Parser) { e.parser = parser } func (e *Exec) Gather(acc telegraf.Accumulator) error { var wg sync.WaitGroup // Legacy single command support if e.Command != "" { e.Commands = append(e.Commands, e.Command) e.Command = "" } commands := make([]string, 0, len(e.Commands)) for _, pattern := range e.Commands { cmdAndArgs := strings.SplitN(pattern, " ", 2) if len(cmdAndArgs) == 0 { continue } matches, err := filepath.Glob(cmdAndArgs[0]) if err != nil { acc.AddError(err) continue } if len(matches) == 0 { // There were no matches with the glob pattern, so let's assume // that the command is in PATH and just run it as it is commands = append(commands, pattern) } else { // There were matches, so we'll append each match together with // the arguments to the commands slice for _, match := range matches { if len(cmdAndArgs) == 1 { commands = append(commands, match) } else { commands = append(commands, strings.Join([]string{match, cmdAndArgs[1]}, " ")) } } } } wg.Add(len(commands)) for _, command := range commands { go e.ProcessCommand(command, acc, &wg) } wg.Wait() return nil } func init() { inputs.Add("exec", func() telegraf.Input { return NewExec() }) }<|fim▁end|>
acc.AddFields(metric.Name(), metric.Fields(), metric.Tags(), metric.Time()) } } }
<|file_name|>hDBSessionMaker.py<|end_file_name|><|fim▁begin|># create a Session object by sessionmaker import os import ConfigParser import sqlalchemy.orm # get path to taskmanager. it is assumed that this script is in the lib directory of # the taskmanager package. tmpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/..' ) ) etcpath = '%s/etc' % tmpath # for configuration files # library is in the same folder from hDatabase import Base class hDBSessionMaker( object ): def __init__( self, configFileName=None, createTables=False, echo=False ): if not configFileName: # use default config file etcpath = os.path.normpath( os.path.join( os.path.dirname( os.path.realpath(__file__) ) + '/../etc' ) ) # default config file for database connection configFileName = "{etcPath}/serversettings.cfg".format(etcPath=etcpath) # read config file if os.path.exists( configFileName ): config = ConfigParser.ConfigParser() config.read( configFileName ) else: sys.stderr.write( "ERROR: Could not find Config file {c}!".format( c=configFileName) ) sys.exit( -1 ) databaseDialect = config.get( 'DATABASE', 'database_dialect' ) databaseHost = config.get( 'DATABASE', 'database_host' ) databasePort = config.get( 'DATABASE', 'database_port' ) databaseName = config.get( 'DATABASE', 'database_name' ) databaseUsername = config.get( 'DATABASE', 'database_username' ) databasePassword = config.get( 'DATABASE', 'database_password' ) ## @var engine #The engine that is connected to the database #use "echo=True" for SQL printing statements to stdout self.engine = sqlalchemy.create_engine( "{dialect}://{user}:{password}@{host}:{port}/{name}".format( dialect=databaseDialect, user=databaseUsername, password=databasePassword, host=databaseHost, port=databasePort, name=databaseName), pool_size=50, # number of connections to keep open inside the connection pool max_overflow=100, # number of connections to allow in connection pool "overflow", that is connections that can be opened above and beyond the pool_size setting, which defaults to five. pool_recycle=3600, # this setting causes the pool to recycle connections after the given number of seconds has passed. echo=False ) # Create all tables in the engine. This is equivalent to "Create Table" # statements in raw SQL. Base.metadata.create_all( self.engine ) ## @var DBsession # define a Session class which will serve as a factory for new Session objects # # http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html:<|fim▁hole|> # sessionmaker() is a Session factory. A factory is just something that produces a new object when called. # # Thread local factory for sessions. See http://docs.sqlalchemy.org/en/rel_0_9/orm/session.html#contextual-thread-local-sessions # SessionFactory = sqlalchemy.orm.sessionmaker( bind = self.engine ) self.DBSession = sqlalchemy.orm.scoped_session( SessionFactory )<|fim▁end|>
# Session is a regular Python class which can be directly instantiated. However, to standardize how sessions are # configured and acquired, the sessionmaker class is normally used to create a top level Session configuration # which can then be used throughout an application without the need to repeat the configurational arguments.
<|file_name|>SnappingFader.java<|end_file_name|><|fim▁begin|>package model.protocol.osc.touchosc; public class SnappingFader { static final float SNAP_DELTA = 0.1f; private double currentRealValue; public SnappingFader(final double currentRealValue) { this.currentRealValue = currentRealValue; }<|fim▁hole|> currentRealValue = value; handler.snapSucceeded(); } else { handler.snapFailed(); } } public void forceUpdate(final double value) { currentRealValue = value; } public boolean isInBoundary(final double receivedValue) { return Math.abs(receivedValue - currentRealValue) <= SNAP_DELTA; } }<|fim▁end|>
public void tryUpdate(final double value, final ISnappingFaderEventHandler handler) { if (isInBoundary(value)) {
<|file_name|>test_insertion_sort.py<|end_file_name|><|fim▁begin|># -*- coding utf-8 -*- from __future__ import unicode_literals import pytest from structures.insertion_sort import insertion_sort @pytest.fixture def sorted_list(): return [i for i in xrange(10)] @pytest.fixture def reverse_list(): return [i for i in xrange(9, -1, -1)] @pytest.fixture def average_list(): return [5, 9, 2, 4, 1, 6, 8, 7, 0, 3] def test_sorted(sorted_list): insertion_sort(sorted_list) assert sorted_list == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] def test_worst(reverse_list): insertion_sort(reverse_list) assert reverse_list == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9] <|fim▁hole|> def test_repeats(): l = [3, 6, 7, 3, 9, 5, 2, 7] insertion_sort(l) assert l == [2, 3, 3, 5, 6, 7, 7, 9] def test_multiple_types(): l = [3, 'foo', 2.8, True, []] # python 2 sorting is crazy insertion_sort(l) assert l == [True, 2.8, 3, [], 'foo']<|fim▁end|>
def test_average(average_list): insertion_sort(average_list) assert average_list == [0, 1, 2, 3, 4, 5, 6, 7, 8, 9]
<|file_name|>RA250C.H<|end_file_name|><|fim▁begin|>/* File: RA250.H 'C' structures for RemoteAccess 2.50 Gamma Version: 0.3 (22/04/96) Author: Damien Guard Copyright: Envy Technologies, 1996. Changes: 0.1 - Initial release 0.2 - Fixed some compile problems Sorted enumeration types 0.3 - Added some examples All new documentation (+Write format) CRC routines Notes: These are neither official structures nor authorised by Wantree Development. Always make sure "Allocate enums as int's" is OFF! They should be allocated as CHAR. This document formatted with tab's as three(3) characters WARNING: These structures have not been fully tested. I have only tested CONFIG.RA (CONFIGrecord) extensively. Info: Please e-mail suggestions to : [email protected] Latest version available at : http://www.guernsey.net/~envy If you use this structure in your software please include something like 'This product uses the RA 'C' Developer Kit from Envy Technologies' - it's not much to ask and using any part of the RACDK in your software is free (in compiled form - see RACDK.DOC for more information) */ /* C <> Pascal string conversion macros from SNIPPETS 'C' library C <> Pascal string copy macros by Damien Guard Convert a string: Pas2C(string) to convert string from Pascal to C (ie. after READING) C2Pas(string) to convert string to Pascal from C (ie. before WRITING) Copy a string: cpyPas2C(destination,source) to copy from a Pascal string to a C string cpyC2Pas(destination,source) to copy from a C string to a Pascal string */ typedef unsigned char UCHAR; #define Pas2C(s) {UCHAR n = *(s); memmove((s), &(s)[1], n); s[n] = '\0';} #define C2Pas(s) {int n = strlen(s); memmove(&(s)[1], (s), n); *(s) = n;} #define cpyPas2C(d,s) {UCHAR n = *(s); memmove((d), &(s)[1], n); d[n] = '\0';} #define cpyC2Pas(d,s) {int n = strlen(s); memmove(&(d)[1], (s), n); *(d) = n;} /* Fake Pascal boolean type for clarity */ typedef unsigned char booleanf; /* enums here for clarity - not sure if they work */ enum AskType {Yes, No, Ask, Only}; enum VideoType {Auto, Short, Long}; enum MsgType {LocalMail, NetMail, EchoMail, Internet, Newsgroup}; enum MsgKindsType {Both, Private, Public, ROnly, NoReply}; enum OrphanType {Ignore, Create, Kill }; enum ResetType {Never, Week, Month, Year}; /* Typedef the RemoteAccess 'types' */ typedef unsigned char FlagType[4]; typedef unsigned char TimeF[6]; /* Time format */ typedef unsigned char DateF[9]; /* Date format */ typedef unsigned char LongDate[10]; /* Long date format */ typedef unsigned char ByteArray32[32]; typedef unsigned char MSGTOIDXrecord[36]; typedef unsigned char MSGTXTrecord[256]; typedef unsigned int LASTREADrecord[200]; typedef unsigned int COMBINEDrecord[200]; /* Mail network address (internal) */ struct NetAddress { unsigned int Zone, Net, Node, Point; }; /* Security limits (LIMITS.RA) */ struct LIMITSrecord { unsigned int Security, Ltime, L300, L1200, L2400, L4800, L7200, L9600, L12000, L14400, L16800, L19200, L38400, Llocal, RatioNum, RatioK; float PerMinCost; unsigned int L21600, L24000, L26400, L28800, L57600, L64000; float FlexiTime; unsigned int LsessionTime, ResetAmt; enum ResetType ResetPeriod; unsigned int ResetOffset, L31200, L33600; unsigned char FreeSpace[14]; }; /* Languages (LANGUAGE.RA) */ struct LANGUAGErecord { unsigned char Name[21], Attribute, DefName[61], MenuPath[61], TextPath[61], QuesPath[61]; unsigned int Security; FlagType Flags, NotFlagsMask; unsigned char FreeSpace[191]; }; /* Hudson message information (MSGINFO.BBS) */ struct MSGINFOrecord { unsigned int LowMsg, HighMsg, TotalMsgs; unsigned char TotalOnBoard[200]; }; /* Hudson message index (MSGIDX.BBS) */ struct HMBMSGIDXrecord { unsigned int MsgNum; unsigned char Board; }; /* Hudson message headers (MSGHDR.BBS) */ struct HMBMSGHDRrecord { unsigned int MsgNum, PrevReply, NextReply, TimesRead, StartBlock, NumBlocks, DestNet, DestNode, OrigNet, OrigNode, DestZone; unsigned char OrigZone; unsigned int Cost; unsigned char MsgAttr, NetAttr, Board; TimeF PostTime; DateF PostDate; MSGTOIDXrecord WhoTo, WhoFrom; unsigned char Subject[73]; }; /* Current online status (USERON.BBS) */ struct USERONrecord { MSGTOIDXrecord Name, Handle; unsigned char Line; unsigned int Baud; unsigned char City[26], Status, /* 0 : Browsing/menu 1 : File transfer 2 : Messaging 3 : Door 4 : Sysop chat 5 : Questionnaire 6 : Real-time-conferencing 7 : New user logon 255 : User-defined - display StatDesc */ Attribute, /* Bit 0 : Hidden 1 : Wants chat 2 : Reserved for RANETMGR (RA/Pro) 3 : Do not disturb flag 6 : Ready */ StatDesc[11], FreeSpace[98]; unsigned int NoCalls; }; /* Todays callers list (LASTCALL.BBS) */ struct LASTCALLrecord { unsigned char Line; MSGTOIDXrecord Name, Handle; unsigned char City[26]; unsigned int Baud; long Times; unsigned char LogOn[6], LogOff[6]; unsigned char Attribute; /* Bit 0 : Hidden */ }; /* File area header (FDBxxx.HDR) */ struct FILESHDRrecord { unsigned char Name[13]; long Size, CRC32; unsigned char Uploader[36]; long UploadDate, FileDate, LastDL; unsigned int TimesDL; unsigned char Attrib, /* Bit 0 : Deleted 1 : Unlisted 2 : Free - Does NOT affect "Cost" 3 : Not available (don't allow downloads) 4 : Locked (no kill) 5 : Missing/offline 6 : No time restrictions - always allow DL */ Password[16], KeyWord [5] [16]; unsigned int Cost; long LongDescPtr; unsigned char FreeSpace[20]; }; /* File area index (FDBxxx.IDX) */ struct FILESIDXrecord { unsigned char Name[13]; long UploadDate, KeyWordCRC[5], LongDescPtr; }; /* User base index (USERSIDX.BBS) */ struct USERSIDXrecord { long NameCRC32, HandleCRC32; }; /* User base (USERS.BBS) */ struct USERSrecord { MSGTOIDXrecord Name; unsigned char Location[26], Organisation[51], Address1[51], Address2[51], Address3[51], Handle[36], Comment[81]; long PasswordCRC; unsigned char DataPhone[16], VoicePhone[16]; TimeF LastTime; DateF LastDate; unsigned char Attribute, /* Bit 0 : Flagged for delete 1 : Clear screen 2 : More? prompt 3 : ANSI emulation 4 : No-kill 5 : Xfer priority 6 : Full screen msg editor 7 : Quiet mode */ Attribute2; /* Bit 0 : Hot-keys 1 : AVT/0 (Avatar) 2 : Full screen message viewer 3 : Hidden 4 : Page priority 5 : No echomail in mailbox scan 6 : Guest account 7 : Post bill enabled */ FlagType Flags; long Credit, Pending; unsigned int MsgsPosted, Security; long LastRead, NoCalls, Uploads, Downloads, UploadsK, DownloadsK, TodayK; int Elapsed; unsigned int ScreenLength; unsigned char LastPwdChange; unsigned int Group; COMBINEDrecord CombinedInfo; DateF FirstDate, BirthDate, SubDate; unsigned char ScreenWidth, Language, DateFormat, ForwardTo[36]; unsigned int MsgArea, FileArea; unsigned char DefaultProtocol; unsigned int FileGroup; unsigned char LastDOBCheck, Sex; long XIrecord; unsigned int MsgGroup; unsigned char Attribute3, /* Bit 0 : Mailbox check: scan selected areas only */ Password[16], FreeSpace[31]; }; /* User base index (USERSXI.BBS) */ struct USERSXIrecord { unsigned char FreeSpace[200]; }; /* System information (SYSINFO.BBS) */ struct SYSINFOrecord { long TotalCalls; MSGTOIDXrecord LastCaller, LastHandle; unsigned char ExtraSpace[92]; }; /* Timelog stat (TIMELOG.BBS) for EACH node */ struct TIMELOGrecord { DateF StartDate; unsigned int BusyPerHour[24], BusyPerDay[7]; /* not implemented */ }; /* Menu (*.MNU) */ struct MNUrecord { unsigned char Typ; unsigned int Security, MaxSec; FlagType NotFlagsMask, Flags; unsigned int TimeLeft, TimeUsed; unsigned char Age, TermAttrib; /* Bit 0 : ANSI 1 : Avatar 2 : RIPscript */ long MinSpeed, MaxSpeed, Credit, OptionCost, PerMinCost; ByteArray32 Node, Group; unsigned int StartTime[7], StopTime[7]; unsigned char Display[136], HotKey[9], MiscData[136], Foreground, Background, FreeSpace[50]; }; /* System events (EVENTS.RA) */ struct EVENTrecord { unsigned char Status; /* 0=Deleted 1=Enabled 2=Disabled */ TimeF StartTime; unsigned char ErrorLevel, Days; booleanf Forced; DateF LastTimeRun; }; struct EVENTrecord EVENTrecordArray[20]; /* Message area configuration (MESSAGES.RA) */ struct MESSAGErecord { unsigned int AreaNum, Unused; unsigned char Name[41]; enum MsgType Typ; enum MsgKindsType MsgKinds; unsigned char Attribute, /* Bit 0 : Enable EchoInfo 1 : Combined access 2 : File attaches 3 : Allow aliases 4 : Use SoftCRs as characters 5 : Force handle 6 : Allow deletes 7 : Is a JAM area */ DaysKill, /* Kill older than 'x' days */ RecvKill; /* Kill recv msgs, recv for more than 'x' days */ unsigned int CountKill, ReadSecurity; FlagType ReadFlags, ReadNotFlags; unsigned int WriteSecurity; FlagType WriteFlags, WriteNotFlags; unsigned int SysopSecurity; FlagType SysopFlags, SysopNotFlags; unsigned char OriginLine[61], AkaAddress, Age, JAMbase[61]; unsigned int Group, AltGroup[3]; unsigned char Attribute2; /* Bit 0 : Include in all groups */ unsigned int NetmailArea; unsigned char FreeSpace2[7]; }; /* Groups (MGROUPS.RA & FGROUPS.RA) */ struct GROUPrecord { unsigned int AreaNum; unsigned char Name[41]; unsigned int Security; FlagType Flags, NotFlagsMask; unsigned char FreeSpace[100]; }; /* File area configuration (FILES.RA) */ struct FILESrecord { unsigned int AreaNum, Unused; unsigned char Name[41], Attrib, /* Bit 0 : Include in new files scan 1 : Include in upload dupe scan 2 : Permit long descriptions 3 : Area is on CD-ROM 4 : All files are FREE 5 : Allow DLs not in FDB 6 : Allow users to password uploads 7 : Scan uploads */ FilePath[41]; unsigned int KillDaysDL, KillDaysFD; unsigned char Password[16]; unsigned int MoveArea; unsigned char Age, ConvertExt; unsigned int Group; unsigned char Attrib2; /* Bit 0 : Include in all groups */ unsigned int DefCost, UploadArea, UploadSecurity; FlagType UploadFlags, UploadNotFlags; unsigned int Security; FlagType Flags, NotFlags; unsigned int ListSecurity; FlagType ListFlags, ListNotFlags; unsigned int AltGroup[3]; unsigned char Device, FreeSpace[13]; }; /* Multi-line conferencing (CONF.RA?) */ struct CONFrecord { unsigned char Name[9], Parent[9], Desc[71], Attr, /* Bit 0 : Private 1 : Unlisted 2 : Global 3 : Permanent 4 : Use handles */ Moderator[36], Language[21], Password[16]; unsigned int Security; FlagType Flags; unsigned char NumNodes, Active[250]; booleanf Child[250]; FlagType NotFlagsMask; unsigned char FreeSpace[96]; }; /* Modem configuration (MODEM.RA) */ struct MODEMrecord { unsigned char ComPort, InitTries; unsigned int BufferSize, ModemDelay; long MaxSpeed; booleanf SendBreak, LockModem, AnswerPhone, OffHook; unsigned char InitStr[71], InitStr2[71], BusyStr[71], InitResp[41], BusyResp[41], Connect300[41], Connect1200[41], Connect2400[41], Connect4800[41], Connect7200[41], Connect9600[41], Connect12k[41], Connect14k[41], Connect16k[41], Connect19k[41], Connect38k[41], ConnectFax[41], RingStr[21], AnswerStr[21], ErrorFreeString[16], Connect21k[41], Connect24k[41], Connect26k[41], Connect28k[41], Connect57k[41], Connect64k[41], Connect31k[41], Connect33k[41], FreeSpace[100]; }; /* Archiver control (internal) */ struct ARCrecord { unsigned char Extension[4], UnpackCmd[61], PackCmd[61]; }; /* Main configuration (CONFIG.RA) */ /* All fields prefixed with 'x' no longer in use */ struct CONFIGrecord { unsigned int VersionID; unsigned char xCommPort; /* unused - found in MODEM.RA */ <|fim▁hole|> xBusyStr[71], /* unused - found in MODEM.RA */ xInitResp[41], /* unused - found in MODEM.RA */ xBusyResp[41], /* unused - found in MODEM.RA */ xConnect300[41], /* unused - found in MODEM.RA */ xConnect1200[41],/* unused - found in MODEM.RA */ xConnect2400[41],/* unused - found in MODEM.RA */ xConnect4800[41],/* unused - found in MODEM.RA */ xConnect9600[41],/* unused - found in MODEM.RA */ xConnect19k[41], /* unused - found in MODEM.RA */ xConnect38k[41]; /* unused - found in MODEM.RA */ booleanf xAnswerPhone; /* unused - found in MODEM.RA */ unsigned char xRing[21], /* unused - found in MODEM.RA */ xAnswerStr[21]; /* unused - found in MODEM.RA */ booleanf xFlushBuffer; /* unused - found in MODEM.RA */ int xModemDelay; /* unused - found in MODEM.RA */ unsigned int MinimumBaud, GraphicsBaud, TransferBaud; TimeF SlowBaudTimeStart, SlowBaudTimeEnd, DownloadTimeStart, DownloadTimeEnd, PageStart[7], PageEnd[7]; unsigned char SeriNum[23], CustNum[23], FreeSpace1[24]; unsigned int PwdExpiry; unsigned char MenuPath[61], TextPath[61], AttachPath[61], NodelistPath[61], MsgBasePath[61], SysPath[61], ExternalEdCmd[61]; struct NetAddress Address[10]; /* 0 = Main address, 1 = AKA 1... */ unsigned char SystemName[31]; unsigned int NewSecurity, NewCredit; FlagType NewFlags; unsigned char OriginLine[61], QuoteString[16], Sysop[36], LogFileName[61]; booleanf FastLogon, AllowSysRem, MonoMode, StrictPwdChecking, DirectWrite, SnowCheck; int CreditFactor; unsigned int UserTimeOut, LogonTime, PasswordTries, MaxPage, PageLength; booleanf CheckForMultiLogon, ExcludeSysopFromList, OneWordNames; enum AskType CheckMail; booleanf AskVoicePhone, AskDataPhone, DoFullMailCheck, AllowFileShells, FixUploadDates, FreezeChat; enum AskType ANSI, /* ANSI: Yes/no/ask new users */ ClearScreen, /* Clear: " " */ MorePrompt; /* More: " " */ booleanf UploadMsgs; enum AskType KillSent; /* Kill/Sent " */ unsigned int CrashAskSec; /* Min sec# to ask 'Crash Mail ?' */ FlagType CrashAskFlags; unsigned int CrashSec; /* Min sec# to always send crash mail */ FlagType CrashFlags; unsigned int FAttachSec; /* " ask 'File Attach ?' */ FlagType FAttachFlags; unsigned char NormFore, /* foreground & background colours */ NormBack, StatFore, StatBack, HiBack, HiFore, WindFore, WindBack, ExitLocal, /* exit error levels - Unused?*/ Exit300, Exit1200, Exit2400, Exit4800, Exit9600, Exit19k, Exit38k; booleanf MultiLine; unsigned char MinPwdLen; unsigned int MinUpSpace; enum AskType HotKeys; unsigned char BorderFore, BorderBack, BarFore, BarBack, LogStyle, MultiTasker, PwdBoard; unsigned int xBufferSize; /* unused - found in MODEM.RA */ unsigned char FKeys[10] [61]; booleanf WhyPage; unsigned char LeaveMsg; booleanf ShowMissingFiles, xLockModem; /* unused - found in MODEM.RA */ unsigned char FreeSpace2[10]; booleanf AllowNetmailReplies; unsigned char LogonPrompt[41]; enum AskType CheckNewFiles; unsigned char ReplyHeader[61]; unsigned char BlankSecs; unsigned char ProtocolAttrib[6]; unsigned char xErrorFreeString[16], /* unused - found in MODEM.RA */ xDefaultCombined[25]; /* replaced with DefaultCombined */ unsigned int RenumThreshold; unsigned char LeftBracket, RightBracket; booleanf AskForHandle, AskForBirthDate; unsigned int GroupMailSec; booleanf ConfirmMsgDeletes; unsigned char FreeSpace4[30], TempScanDir[61]; enum AskType ScanNow; unsigned char xUnknownArcAction, /* unused - found in ARCHIVE.RA ?*/ xFailedUnpackAction,/* unused - found in ARCHIVE.RA ?*/ FailedScanAction; /* Bit 0 : Mark deleted, 1 : Mark unlisted, 2 : Mark not available */ unsigned int xUnknownArcArea, /* no longer in use */ xFailedUnpackArea, /* no longer in use */ FailedScanArea; unsigned char ScanCmd[61]; booleanf xDeductIfUnknown; /* no longer in use */ unsigned char NewUserGroup; enum AskType AVATAR; unsigned char BadPwdArea, Location[41], DoAfterAction, /* 0 = wait for CR else wait for x seconds */ OldFileLine[41], /* unused - replaced with FileLine*/ CRfore, CRback, LangHdr[41]; booleanf xSendBreak; /* unused - found in MODEM.RA */ unsigned char ListPath[61]; /* unused ??*/ enum AskType FullMsgView, EMSI_Enable; booleanf EMSI_NewUser; unsigned char EchoChar[2], xConnect7200[41], /* unused - found in MODEM.RA */ xConnect12000[41], /* unused - found in MODEM.RA */ xConnect14400[41], /* unused - found in MODEM.RA */ Exit7200, Exit12000, Exit14400, ChatCommand[61]; enum AskType ExtEd; unsigned char NewuserLanguage, LanguagePrompt[41]; enum VideoType VideoMode; booleanf AutoDetectANSI, xOffHook; /* unused - found in MODEM.RA */ unsigned char NewUserDateFormat; unsigned char KeyboardPwd[16]; booleanf CapLocation; unsigned char NewuserSub, PrinterName[5], HilitePromptFore, /* note lowercase 'l' in hilite */ HiLitePromptBack, xInitStr2[71]; /* unused - found in MODEM.RA */ booleanf AltJSwap; unsigned char SemPath[61]; booleanf AutoChatCapture; unsigned char FileBasePath[61]; booleanf NewFileTag, IgnoreDupeExt; unsigned char TempCDFilePath[61], TagFore, TagBack, xConnect16k[41], /* unused - found in MODEM.RA */ Exit16k, FilePayback, FileLine[201], FileMissingLine[201], NewUserULCredit; unsigned int NewUserULCreditK; struct ARCrecord ArcInfo[10]; unsigned char RAMGRAltFKeys [5] [61], ArcViewCmd[61], xConnectFax[41], /* unused - found in MODEM.RA */ ExitFax; booleanf UseXMS, UseEMS; unsigned char CheckDOB; enum AskType EchoCheck; unsigned int ccSec, ReturnRecSec; booleanf HonourNetReq; COMBINEDrecord DefaultCombined; booleanf AskForSex, AskForAddress; enum AskType DLdesc; booleanf NewPhoneScan; unsigned char Exit21k, Exit24k, Exit26k, Exit28k, Exit57k, Exit64k; booleanf TagLogoffWarning, /* RA 2.5 - Warn if files are tagged at log off */ LimitLocal, /* RA 2.5 - Turn off sysop control keys for non-sysop local users*/ SavePasswords; /* RA 2.5 - Save user passwords */ unsigned char BlankLogins, /* RA 2.5 - Log off after x blank logins (returns)*/ ripiconpath[61], /* RA 2.5 - Path to RIPscript icons */ Exit31k, /* RA 2.5 - Exit level for 31kbps */ Exit33k; /* RA 2.5 - Exit level for 33kbps */ booleanf IncludeNewCDareas;/* RA 2.5 - Include CD areas in new files list */ unsigned char FutureExpansion[513]; }; /* Exit-info dropfile (EXITINFO.BBS) */ struct EXITINFOrecord { unsigned int Baud; struct SYSINFOrecord SysInfo; struct TIMELOGrecord TimeLogInfo; struct USERSrecord UserInfo; struct EVENTrecord EventInfo; booleanf NetMailEntered, EchoMailEntered; TimeF LoginTime; DateF LoginDate; unsigned int TimeLimit; long LoginSec; unsigned int UserRecord, ReadThru, NumberPages, DownloadLimit; TimeF TimeOfCreation; long LogonPasswordCRC; booleanf WantChat; int DeductedTime; unsigned char MenuStack [50] [9], MenuStackPointer; struct USERSXIrecord UserXIinfo; booleanf ErrorFreeConnect, SysopNext, EMSI_Session; unsigned char EMSI_Crtdef[41], EMSI_Protocols[41], EMSI_Capabilities[41], EMSI_Requests[41], EMSI_Software[41], Hold_Attr1, Hold_Attr2, Hold_Len, PageReason[81], StatusLine, LastCostMenu[9]; unsigned int MenuCostPerMin; booleanf DoesAVT, RIPmode, RIPVersion; unsigned char ExtraSpace[85]; }; /* File transfer protocols (PROTOCOL.RA) */ struct PROTOCOLrecord { unsigned char Name[16], ActiveKey; booleanf OpusTypeCtlFile, BatchAvailable; unsigned char Attribute, /* 0=Disabled 1=Enabled */ LogFileName[81], CtlFileName[81], DnCmdString[81], DnCtlString[81], UpCmdString[81], UpCtlString[81], UpLogKeyword[21], DnLogKeyword[21]; unsigned int XferDescWordNum, XferNameWordNum; };<|fim▁end|>
long xBaud; /* unused - found in MODEM.RA */ unsigned char xInitTries, /* unused - found in MODEM.RA */ xInitStr[71], /* unused - found in MODEM.RA */
<|file_name|>TaskWrongPasswordException.java<|end_file_name|><|fim▁begin|>/* * Created on 20/giu/2010 * * Copyright 2010 by Andrea Vacondio ([email protected]). * * This file is part of the Sejda source code * * This program is free software: you can redistribute it and/or modify * it under the terms of the GNU Affero General Public License as * published by the Free Software Foundation, either version 3 of the * License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU Affero General Public License for more details. * * You should have received a copy of the GNU Affero General Public License * along with this program. If not, see <http://www.gnu.org/licenses/>. */ package org.sejda.model.exception; /** * Exception thrown when a wrong password has been set and it's not possible to open the pdf document (and execute the task) * * @author Andrea Vacondio<|fim▁hole|> */ public class TaskWrongPasswordException extends TaskIOException { private static final long serialVersionUID = -5517166148313118559L; /** * @param message * @param cause */ public TaskWrongPasswordException(String message, Throwable cause) { super(message, cause); } /** * @param message */ public TaskWrongPasswordException(String message) { super(message); } /** * @param cause */ public TaskWrongPasswordException(Throwable cause) { super(cause); } }<|fim▁end|>
*
<|file_name|>docs.rs<|end_file_name|><|fim▁begin|>//! Protocol documentation #[doc(include = "../protocol-docs/websocket.md")]<|fim▁hole|><|fim▁end|>
pub mod websocket {}
<|file_name|>IOErrorEvent.js<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|>
export { default } from "./../../_gen/openfl/events/IOErrorEvent";
<|file_name|>state.py<|end_file_name|><|fim▁begin|>from .trace_decorator import getLog from .exception import StateError class State(object): def __init__(self): self._state = [] # can be "unknown", "success" or "fail" self.result = "unknown" self.state_log = getLog("mockbuild.Root.state") def state(self): if not len(self._state): raise StateError("state called on empty state stack") return self._state[-1] def start(self, state): if state is None: raise StateError("start called with None State") self._state.append(state) self.state_log.info("Start: %s" % state) def finish(self, state): if len(self._state) == 0: raise StateError("finish called on empty state list") current = self._state.pop() if state != current:<|fim▁hole|> raise StateError("state finish mismatch: current: %s, state: %s" % (current, state)) self.state_log.info("Finish: %s" % state) def alldone(self): if len(self._state) != 0: raise StateError("alldone called with pending states: %s" % ",".join(self._state))<|fim▁end|>
<|file_name|>actionable.rs<|end_file_name|><|fim▁begin|>// This file was generated by gir (5c017c9) from gir-files (71d73f0) // DO NOT EDIT <|fim▁hole|>use ffi; use glib; use glib::object::IsA; use glib::translate::*; glib_wrapper! { pub struct Actionable(Object<ffi::GtkActionable>): Widget; match fn { get_type => || ffi::gtk_actionable_get_type(), } } pub trait ActionableExt { fn get_action_name(&self) -> Option<String>; fn get_action_target_value(&self) -> Option<glib::Variant>; fn set_action_name<'a, P: Into<Option<&'a str>>>(&self, action_name: P); //fn set_action_target(&self, format_string: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs); fn set_action_target_value(&self, target_value: &glib::Variant); fn set_detailed_action_name(&self, detailed_action_name: &str); } impl<O: IsA<Actionable>> ActionableExt for O { fn get_action_name(&self) -> Option<String> { unsafe { from_glib_none(ffi::gtk_actionable_get_action_name(self.to_glib_none().0)) } } fn get_action_target_value(&self) -> Option<glib::Variant> { unsafe { from_glib_none(ffi::gtk_actionable_get_action_target_value(self.to_glib_none().0)) } } fn set_action_name<'a, P: Into<Option<&'a str>>>(&self, action_name: P) { let action_name = action_name.into(); let action_name = action_name.to_glib_none(); unsafe { ffi::gtk_actionable_set_action_name(self.to_glib_none().0, action_name.0); } } //fn set_action_target(&self, format_string: &str, : /*Unknown conversion*//*Unimplemented*/Fundamental: VarArgs) { // unsafe { TODO: call ffi::gtk_actionable_set_action_target() } //} fn set_action_target_value(&self, target_value: &glib::Variant) { unsafe { ffi::gtk_actionable_set_action_target_value(self.to_glib_none().0, target_value.to_glib_none().0); } } fn set_detailed_action_name(&self, detailed_action_name: &str) { unsafe { ffi::gtk_actionable_set_detailed_action_name(self.to_glib_none().0, detailed_action_name.to_glib_none().0); } } }<|fim▁end|>
use Widget;
<|file_name|>630.js<|end_file_name|><|fim▁begin|>var __v=[ { "Id": 2046, "Chapter": 630,<|fim▁hole|> "Name": "c++", "Sort": 0 } ]<|fim▁end|>
<|file_name|>GetIntrinsic.d.ts<|end_file_name|><|fim▁begin|>/** * Returns the ECMAScript intrinsic for the name. * * @param name The ECMAScript intrinsic name * @param allowMissing Whether the intrinsic can be missing in this environment * * @throws {SyntaxError} If the ECMAScript intrinsic doesn't exist * @throws {TypeError} If the ECMAScript intrinsic exists, but not in this environment and `allowMissing` is `false`. */ declare function GetIntrinsic<K extends keyof GetIntrinsic.Intrinsics>( name: K, allowMissing?: false, ): GetIntrinsic.Intrinsics[K]; declare function GetIntrinsic<K extends keyof GetIntrinsic.Intrinsics>( name: K, allowMissing: true, ): GetIntrinsic.Intrinsics[K] | undefined; declare function GetIntrinsic<K extends keyof GetIntrinsic.Intrinsics>( name: K, allowMissing?: boolean, ): GetIntrinsic.Intrinsics[K] | undefined; declare function GetIntrinsic(name: string, allowMissing?: boolean): unknown; export = GetIntrinsic; type numeric = number | bigint; interface TypedArray<T extends numeric = numeric> extends Readonly<ArrayBufferView> { /** The length of the array. */ readonly length: number; [index: number]: T; } interface TypedArrayConstructor { readonly prototype: TypedArrayPrototype; new (...args: unknown[]): TypedArrayPrototype; /** * Returns a new typed array from a set of elements. * @param items A set of elements to include in the new typed array object. */ of(this: new (length: number) => Int8Array, ...items: number[]): Int8Array; of(this: new (length: number) => Uint8Array, ...items: number[]): Uint8Array; of(this: new (length: number) => Uint8ClampedArray, ...items: number[]): Uint8ClampedArray; of(this: new (length: number) => Int16Array, ...items: number[]): Int16Array; of(this: new (length: number) => Uint16Array, ...items: number[]): Uint16Array; of(this: new (length: number) => Int32Array, ...items: number[]): Int32Array; of(this: new (length: number) => Uint32Array, ...items: number[]): Uint32Array; // For whatever reason, `array-type` considers `bigint` a non-simple type: // tslint:disable: array-type of(this: new (length: number) => BigInt64Array, ...items: bigint[]): BigInt64Array; of(this: new (length: number) => BigUint64Array, ...items: bigint[]): BigUint64Array; // tslint:enable: array-type of(this: new (length: number) => Float32Array, ...items: number[]): Float32Array; of(this: new (length: number) => Float64Array, ...items: number[]): Float64Array; /** * Creates a new typed array from an array-like or iterable object. * @param source An array-like or iterable object to convert to a typed array. * @param mapfn A mapping function to call on every element of the source object. * @param thisArg Value of 'this' used to invoke the mapfn. */ from(this: new (length: number) => Int8Array, source: Iterable<number> | ArrayLike<number>): Int8Array; from<U>( this: new (length: number) => Int8Array, source: Iterable<U> | ArrayLike<U>, mapfn: (v: U, k: number) => number, thisArg?: unknown, ): Int8Array; from(this: new (length: number) => Uint8Array, source: Iterable<number> | ArrayLike<number>): Uint8Array; from<U>( this: new (length: number) => Uint8Array, source: Iterable<U> | ArrayLike<U>, mapfn: (v: U, k: number) => number, thisArg?: unknown, ): Uint8Array; from( this: new (length: number) => Uint8ClampedArray, source: Iterable<number> | ArrayLike<number>, ): Uint8ClampedArray; from<U>( this: new (length: number) => Uint8ClampedArray, source: Iterable<U> | ArrayLike<U>, mapfn: (v: U, k: number) => number, thisArg?: unknown, ): Uint8ClampedArray; from(this: new (length: number) => Int16Array, source: Iterable<number> | ArrayLike<number>): Int16Array; from<U>( this: new (length: number) => Int16Array, source: Iterable<U> | ArrayLike<U>, mapfn: (v: U, k: number) => number, thisArg?: unknown, ): Int16Array; from(this: new (length: number) => Uint16Array, source: Iterable<number> | ArrayLike<number>): Uint16Array; from<U>( this: new (length: number) => Uint16Array, source: Iterable<U> | ArrayLike<U>, mapfn: (v: U, k: number) => number, thisArg?: unknown, ): Uint16Array; from(this: new (length: number) => Int32Array, source: Iterable<number> | ArrayLike<number>): Int32Array; from<U>( this: new (length: number) => Int32Array, source: Iterable<U> | ArrayLike<U>, mapfn: (v: U, k: number) => number, thisArg?: unknown, ): Int32Array; from(this: new (length: number) => Uint32Array, source: Iterable<number> | ArrayLike<number>): Uint32Array; from<U>( this: new (length: number) => Uint32Array, source: Iterable<U> | ArrayLike<U>, mapfn: (v: U, k: number) => number, thisArg?: unknown, ): Uint32Array; from(this: new (length: number) => BigInt64Array, source: Iterable<bigint> | ArrayLike<bigint>): BigInt64Array; from<U>( this: new (length: number) => BigInt64Array, source: Iterable<U> | ArrayLike<U>, mapfn: (v: U, k: number) => bigint, thisArg?: unknown, ): BigInt64Array; from(this: new (length: number) => BigUint64Array, source: Iterable<bigint> | ArrayLike<bigint>): BigUint64Array; from<U>( this: new (length: number) => BigUint64Array, source: Iterable<U> | ArrayLike<U>, mapfn: (v: U, k: number) => bigint, thisArg?: unknown, ): BigUint64Array; from(this: new (length: number) => Float32Array, source: Iterable<number> | ArrayLike<number>): Float32Array; from<U>( this: new (length: number) => Float32Array, source: Iterable<U> | ArrayLike<U>, mapfn: (v: U, k: number) => number, thisArg?: unknown, ): Float32Array; from(this: new (length: number) => Float64Array, source: Iterable<number> | ArrayLike<number>): Float64Array; from<U>( this: new (length: number) => Float64Array, source: Iterable<U> | ArrayLike<U>, mapfn: (v: U, k: number) => number, thisArg?: unknown, ): Float64Array; } interface TypedArrayPrototype { /** The ArrayBuffer instance referenced by the array. */ readonly buffer: ArrayBufferLike; /** The length in bytes of the array. */ readonly byteLength: number; /** The offset in bytes of the array. */ readonly byteOffset: number; /** * Returns the this object after copying a section of the array identified by start and end * to the same array starting at position target * @param target If target is negative, it is treated as length+target where length is the * length of the array. * @param start If start is negative, it is treated as length+start. If end is negative, it * is treated as length+end. * @param end If not specified, length of the this object is used as its default value. */ copyWithin<THIS extends TypedArray>(this: THIS, target: number, start: number, end?: number): THIS; /** Yields index, value pairs for every entry in the array. */ entries<T extends numeric>(this: TypedArray<T>): IterableIterator<[number, T]>; /** * Determines whether all the members of an array satisfy the specified test. * @param callbackfn A function that accepts up to three arguments. The every method calls * the callbackfn function for each element in the array until the callbackfn returns false, * or until the end of the array. * @param thisArg An object to which the this keyword can refer in the callbackfn function. * If thisArg is omitted, undefined is used as the this value. */ every<T extends numeric, THIS extends TypedArray<T>>( this: THIS, predicate: (value: T, index: number, array: THIS) => unknown, thisArg?: unknown, ): boolean; /** * Returns the this object after filling the section identified by start and end with value * @param value value to fill array section with * @param start index to start filling the array at. If start is negative, it is treated as * length+start where length is the length of the array. * @param end index to stop filling the array at. If end is negative, it is treated as * length+end. */ fill<T extends numeric, THIS extends TypedArray<T>>(this: THIS, value: T, start?: number, end?: number): THIS; /** * Returns the elements of an array that meet the condition specified in a callback function. * @param callbackfn A function that accepts up to three arguments. The filter method calls * the callbackfn function one time for each element in the array. * @param thisArg An object to which the this keyword can refer in the callbackfn function. * If thisArg is omitted, undefined is used as the this value. */ filter<T extends numeric, THIS extends TypedArray<T>>( this: THIS, predicate: (value: T, index: number, array: THIS) => unknown, thisArg?: unknown, ): THIS; /** * Returns the value of the first element in the array where predicate is true, and undefined * otherwise. * @param predicate find calls predicate once for each element of the array, in ascending * order, until it finds one where predicate returns true. If such an element is found, find * immediately returns that element value. Otherwise, find returns undefined. * @param thisArg If provided, it will be used as the this value for each invocation of * predicate. If it is not provided, undefined is used instead. */ find<T extends numeric, THIS extends TypedArray<T>>( this: THIS, predicate: (value: T, index: number, array: THIS) => unknown, thisArg?: unknown, ): T | undefined; /** * Returns the index of the first element in the array where predicate is true, and -1 * otherwise. * @param predicate find calls predicate once for each element of the array, in ascending * order, until it finds one where predicate returns true. If such an element is found, * findIndex immediately returns that element index. Otherwise, findIndex returns -1. * @param thisArg If provided, it will be used as the this value for each invocation of * predicate. If it is not provided, undefined is used instead. */ findIndex<T extends numeric, THIS extends TypedArray<T>>( this: THIS, predicate: (value: T, index: number, array: THIS) => unknown, thisArg?: unknown, ): number; /** * Performs the specified action for each element in an array. * @param callbackfn A function that accepts up to three arguments. forEach calls the * callbackfn function one time for each element in the array. * @param thisArg An object to which the this keyword can refer in the callbackfn function. * If thisArg is omitted, undefined is used as the this value. */ forEach<T extends numeric, THIS extends TypedArray<T>>( this: THIS, callbackfn: (value: T, index: number, array: THIS) => void, thisArg?: unknown, ): void; /** * Determines whether an array includes a certain element, returning true or false as appropriate. * @param searchElement The element to search for. * @param fromIndex The position in this array at which to begin searching for searchElement. */ includes<T extends numeric>(this: TypedArray<T>, searchElement: T, fromIndex?: number): boolean; /** * Returns the index of the first occurrence of a value in an array. * @param searchElement The value to locate in the array. * @param fromIndex The array index at which to begin the search. If fromIndex is omitted, the * search starts at index 0. */ indexOf<T extends numeric>(this: TypedArray<T>, searchElement: T, fromIndex?: number): boolean; /** * Adds all the elements of an array separated by the specified separator string. * @param separator A string used to separate one element of an array from the next in the * resulting String. If omitted, the array elements are separated with a comma. */ join(this: TypedArray, separator?: string): string; /** Yields each index in the array. */ keys(this: TypedArray): IterableIterator<number>; /** * Returns the index of the last occurrence of a value in an array. * @param searchElement The value to locate in the array. * @param fromIndex The array index at which to begin the search. If fromIndex is omitted, the * search starts at index 0. */ lastIndexOf<T extends numeric>(this: TypedArray<T>, searchElement: T, fromIndex?: number): boolean; /** The length of the array. */ readonly length: number; /** * Calls a defined callback function on each element of an array, and returns an array that * contains the results. * @param callbackfn A function that accepts up to three arguments. The map method calls the * callbackfn function one time for each element in the array. * @param thisArg An object to which the this keyword can refer in the callbackfn function. * If thisArg is omitted, undefined is used as the this value. */ map<T extends numeric, THIS extends TypedArray>( this: THIS, mapper: (value: T, index: number, array: THIS) => T, thisArg?: unknown, ): THIS; /** * Calls the specified callback function for all the elements in an array. The return value of * the callback function is the accumulated result, and is provided as an argument in the next * call to the callback function. * @param callbackfn A function that accepts up to four arguments. The reduce method calls the * callbackfn function one time for each element in the array. * @param initialValue If initialValue is specified, it is used as the initial value to start * the accumulation. The first call to the callbackfn function provides this value as an argument * instead of an array value. */ reduce<T extends numeric, THIS extends TypedArray<T>>( this: THIS, reducer: (previousValue: T, currentValue: T, currentIndex: number, array: THIS) => T, ): T; reduce<T extends numeric, U, THIS extends TypedArray<T>>( this: THIS, reducer: (previousValue: U, currentValue: T, currentIndex: number, array: THIS) => U, initialValue: U, ): U; /** * Calls the specified callback function for all the elements in an array, in descending order. * The return value of the callback function is the accumulated result, and is provided as an * argument in the next call to the callback function. * @param callbackfn A function that accepts up to four arguments. The reduceRight method calls * the callbackfn function one time for each element in the array. * @param initialValue If initialValue is specified, it is used as the initial value to start * the accumulation. The first call to the callbackfn function provides this value as an * argument instead of an array value. */ reduceRight<T extends numeric, THIS extends TypedArray<T>>( this: THIS, reducer: (previousValue: T, currentValue: T, currentIndex: number, array: THIS) => T, ): T; reduceRight<T extends numeric, U, THIS extends TypedArray<T>>( this: THIS, reducer: (previousValue: U, currentValue: T, currentIndex: number, array: THIS) => U, initialValue: U, ): U; /** Reverses the elements in the array. */ reverse<THIS extends TypedArray>(this: THIS): THIS; /** * Sets a value or an array of values. * @param array A typed or untyped array of values to set. * @param offset The index in the current array at which the values are to be written. */ set<T extends numeric>(this: TypedArray<T>, array: ArrayLike<T>, offset?: number): void; /** * Returns a section of an array. * @param start The beginning of the specified portion of the array. * @param end The end of the specified portion of the array. */ slice<THIS extends TypedArray>(this: THIS, start?: number, end?: number): THIS; /** * Determines whether the specified callback function returns true for any element of an array. * @param callbackfn A function that accepts up to three arguments. The some method calls the * callbackfn function for each element in the array until the callbackfn returns true, or until * the end of the array. * @param thisArg An object to which the this keyword can refer in the callbackfn function. * If thisArg is omitted, undefined is used as the this value. */ some<T extends numeric, THIS extends TypedArray<T>>( this: THIS, predicate: (value: T, index: number, array: THIS) => unknown, thisArg?: unknown, ): boolean; /** * Sorts the array. * @param compareFn The function used to determine the order of the elements. If omitted, the elements are sorted in ascending order. */ sort<T extends numeric, THIS extends TypedArray<T>>(this: THIS, comparator?: (a: T, b: T) => number): THIS; /** * Gets a new subview of the ArrayBuffer store for this array, referencing the elements * at begin, inclusive, up to end, exclusive. * @param begin The index of the beginning of the array. * @param end The index of the end of the array. */ subarray<THIS extends TypedArray>(this: THIS, begin?: number, end?: number): THIS; /** Converts the array to a string by using the current locale. */ toLocaleString(this: TypedArray, locales?: string | string[], options?: Intl.NumberFormatOptions): string; /** Returns a string representation of the array. */ toString(): string; /** Yields each value in the array. */ values<T extends numeric>(this: TypedArray<T>): IterableIterator<T>; /** Yields each value in the array. */ [Symbol.iterator]<T extends numeric>(this: TypedArray<T>): IterableIterator<T>; readonly [Symbol.toStringTag]: string | undefined; } // ------------------------ >8 ------------------------ // autogenerated by scripts/collect-intrinsics.ts // do not edit! 2020-07-08T00:53:03.057Z // tslint:disable: ban-types // prettier-ignore declare namespace GetIntrinsic { interface Intrinsics { '%Array%': ArrayConstructor; '%ArrayBuffer%': ArrayBufferConstructor; '%ArrayBufferPrototype%': ArrayBuffer; '%ArrayIteratorPrototype%': IterableIterator<any>; '%ArrayPrototype%': typeof Array.prototype; '%ArrayProto_entries%': typeof Array.prototype.entries; '%ArrayProto_forEach%': typeof Array.prototype.forEach; '%ArrayProto_keys%': typeof Array.prototype.keys; '%ArrayProto_values%': typeof Array.prototype.values; '%AsyncFromSyncIteratorPrototype%': AsyncGenerator<any>; '%AsyncFunction%': FunctionConstructor; '%AsyncFunctionPrototype%': typeof Function.prototype; '%AsyncGenerator%': AsyncGeneratorFunction; '%AsyncGeneratorFunction%': AsyncGeneratorFunctionConstructor; '%AsyncGeneratorPrototype%': AsyncGenerator<any>; '%AsyncIteratorPrototype%': AsyncIterable<any>; '%Atomics%': Atomics; '%Boolean%': BooleanConstructor; '%BooleanPrototype%': typeof Boolean.prototype; '%DataView%': DataViewConstructor; '%DataViewPrototype%': DataView; '%Date%': DateConstructor; '%DatePrototype%': Date; '%decodeURI%': typeof decodeURI; '%decodeURIComponent%': typeof decodeURIComponent; '%encodeURI%': typeof encodeURI; '%encodeURIComponent%': typeof encodeURIComponent; '%Error%': ErrorConstructor; '%ErrorPrototype%': Error; '%eval%': typeof eval; '%EvalError%': EvalErrorConstructor; '%EvalErrorPrototype%': EvalError; '%Float32Array%': Float32ArrayConstructor; '%Float32ArrayPrototype%': Float32Array; '%Float64Array%': Float64ArrayConstructor; '%Float64ArrayPrototype%': Float64Array; '%Function%': FunctionConstructor; '%FunctionPrototype%': typeof Function.prototype; '%Generator%': GeneratorFunction; '%GeneratorFunction%': GeneratorFunctionConstructor; '%GeneratorPrototype%': Generator<any>; '%Int8Array%': Int8ArrayConstructor; '%Int8ArrayPrototype%': Int8Array; '%Int16Array%': Int16ArrayConstructor; '%Int16ArrayPrototype%': Int16Array; '%Int32Array%': Int32ArrayConstructor; '%Int32ArrayPrototype%': Int32Array; '%isFinite%': typeof isFinite; '%isNaN%': typeof isNaN; '%IteratorPrototype%': Iterable<any>; '%JSON%': JSON; '%JSONParse%': typeof JSON.parse; '%Map%': MapConstructor; '%MapIteratorPrototype%': IterableIterator<any>; '%MapPrototype%': typeof Map.prototype; '%Math%': Math; '%Number%': NumberConstructor; '%NumberPrototype%': typeof Number.prototype; '%Object%': ObjectConstructor; '%ObjectPrototype%': typeof Object.prototype; '%ObjProto_toString%': typeof Object.prototype.toString; '%ObjProto_valueOf%': typeof Object.prototype.valueOf; '%parseFloat%': typeof parseFloat; '%parseInt%': typeof parseInt; '%Promise%': PromiseConstructor; '%PromisePrototype%': typeof Promise.prototype; '%PromiseProto_then%': typeof Promise.prototype.then; '%Promise_all%': typeof Promise.all; '%Promise_reject%': typeof Promise.reject; '%Promise_resolve%': typeof Promise.resolve; '%Proxy%': ProxyConstructor; '%RangeError%': RangeErrorConstructor; '%RangeErrorPrototype%': RangeError; '%ReferenceError%': ReferenceErrorConstructor; '%ReferenceErrorPrototype%': ReferenceError; '%Reflect%': typeof Reflect; '%RegExp%': RegExpConstructor; '%RegExpPrototype%': RegExp; '%Set%': SetConstructor; '%SetIteratorPrototype%': IterableIterator<any>; '%SetPrototype%': typeof Set.prototype; '%SharedArrayBuffer%': SharedArrayBufferConstructor; '%SharedArrayBufferPrototype%': SharedArrayBuffer; '%String%': StringConstructor; '%StringIteratorPrototype%': IterableIterator<string>; '%StringPrototype%': typeof String.prototype; '%Symbol%': SymbolConstructor; '%SymbolPrototype%': typeof Symbol.prototype; '%SyntaxError%': SyntaxErrorConstructor; '%SyntaxErrorPrototype%': SyntaxError; '%ThrowTypeError%': () => never; '%TypedArray%': TypedArrayConstructor; '%TypedArrayPrototype%': TypedArrayPrototype; '%TypeError%': TypeErrorConstructor; '%TypeErrorPrototype%': TypeError; '%Uint8Array%': Uint8ArrayConstructor; '%Uint8ArrayPrototype%': Uint8Array; '%Uint8ClampedArray%': Uint8ClampedArrayConstructor; '%Uint8ClampedArrayPrototype%': Uint8ClampedArray; '%Uint16Array%': Uint16ArrayConstructor; '%Uint16ArrayPrototype%': Uint16Array; '%Uint32Array%': Uint32ArrayConstructor; '%Uint32ArrayPrototype%': Uint32Array; '%URIError%': URIErrorConstructor; '%URIErrorPrototype%': URIError; '%WeakMap%': WeakMapConstructor; '%WeakMapPrototype%': typeof WeakMap.prototype; '%WeakSet%': WeakSetConstructor; '%WeakSetPrototype%': typeof WeakSet.prototype; } interface Intrinsics { '%Array.prototype%': typeof Array.prototype; '%Array.prototype.length%': typeof Array.prototype.length; '%Array.prototype.concat%': typeof Array.prototype.concat; '%Array.prototype.copyWithin%': typeof Array.prototype.copyWithin; '%Array.prototype.fill%': typeof Array.prototype.fill; '%Array.prototype.find%': typeof Array.prototype.find; '%Array.prototype.findIndex%': typeof Array.prototype.findIndex; '%Array.prototype.lastIndexOf%': typeof Array.prototype.lastIndexOf; '%Array.prototype.pop%': typeof Array.prototype.pop; '%Array.prototype.push%': typeof Array.prototype.push; '%Array.prototype.reverse%': typeof Array.prototype.reverse; '%Array.prototype.shift%': typeof Array.prototype.shift; '%Array.prototype.unshift%': typeof Array.prototype.unshift; '%Array.prototype.slice%': typeof Array.prototype.slice; '%Array.prototype.sort%': typeof Array.prototype.sort; '%Array.prototype.splice%': typeof Array.prototype.splice; '%Array.prototype.includes%': typeof Array.prototype.includes; '%Array.prototype.indexOf%': typeof Array.prototype.indexOf; '%Array.prototype.join%': typeof Array.prototype.join; '%Array.prototype.keys%': typeof Array.prototype.keys; '%Array.prototype.entries%': typeof Array.prototype.entries; '%Array.prototype.values%': typeof Array.prototype.values; '%Array.prototype.forEach%': typeof Array.prototype.forEach; '%Array.prototype.filter%': typeof Array.prototype.filter; '%Array.prototype.flat%': typeof Array.prototype.flat; '%Array.prototype.flatMap%': typeof Array.prototype.flatMap; '%Array.prototype.map%': typeof Array.prototype.map; '%Array.prototype.every%': typeof Array.prototype.every; '%Array.prototype.some%': typeof Array.prototype.some; '%Array.prototype.reduce%': typeof Array.prototype.reduce; '%Array.prototype.reduceRight%': typeof Array.prototype.reduceRight; '%Array.prototype.toLocaleString%': typeof Array.prototype.toLocaleString; '%Array.prototype.toString%': typeof Array.prototype.toString; '%Array.isArray%': typeof Array.isArray; '%Array.from%': typeof Array.from; '%Array.of%': typeof Array.of; '%ArrayBuffer.prototype%': ArrayBuffer; '%ArrayBuffer.prototype.byteLength%': (this: ArrayBuffer) => typeof ArrayBuffer.prototype.byteLength; '%ArrayBuffer.prototype.slice%': typeof ArrayBuffer.prototype.slice; '%ArrayBuffer.isView%': typeof ArrayBuffer.isView; '%ArrayBufferPrototype.byteLength%': (this: ArrayBuffer) => typeof ArrayBuffer.prototype.byteLength; '%ArrayBufferPrototype.slice%': typeof ArrayBuffer.prototype.slice; '%ArrayIteratorPrototype.next%': IterableIterator<any>['next']; '%ArrayPrototype.length%': typeof Array.prototype.length; '%ArrayPrototype.concat%': typeof Array.prototype.concat; '%ArrayPrototype.copyWithin%': typeof Array.prototype.copyWithin; '%ArrayPrototype.fill%': typeof Array.prototype.fill; '%ArrayPrototype.find%': typeof Array.prototype.find; '%ArrayPrototype.findIndex%': typeof Array.prototype.findIndex; '%ArrayPrototype.lastIndexOf%': typeof Array.prototype.lastIndexOf; '%ArrayPrototype.pop%': typeof Array.prototype.pop; '%ArrayPrototype.push%': typeof Array.prototype.push; '%ArrayPrototype.reverse%': typeof Array.prototype.reverse; '%ArrayPrototype.shift%': typeof Array.prototype.shift; '%ArrayPrototype.unshift%': typeof Array.prototype.unshift; '%ArrayPrototype.slice%': typeof Array.prototype.slice; '%ArrayPrototype.sort%': typeof Array.prototype.sort; '%ArrayPrototype.splice%': typeof Array.prototype.splice; '%ArrayPrototype.includes%': typeof Array.prototype.includes; '%ArrayPrototype.indexOf%': typeof Array.prototype.indexOf; '%ArrayPrototype.join%': typeof Array.prototype.join; '%ArrayPrototype.keys%': typeof Array.prototype.keys; '%ArrayPrototype.entries%': typeof Array.prototype.entries; '%ArrayPrototype.values%': typeof Array.prototype.values; '%ArrayPrototype.forEach%': typeof Array.prototype.forEach; '%ArrayPrototype.filter%': typeof Array.prototype.filter; '%ArrayPrototype.flat%': typeof Array.prototype.flat; '%ArrayPrototype.flatMap%': typeof Array.prototype.flatMap; '%ArrayPrototype.map%': typeof Array.prototype.map; '%ArrayPrototype.every%': typeof Array.prototype.every; '%ArrayPrototype.some%': typeof Array.prototype.some; '%ArrayPrototype.reduce%': typeof Array.prototype.reduce; '%ArrayPrototype.reduceRight%': typeof Array.prototype.reduceRight; '%ArrayPrototype.toLocaleString%': typeof Array.prototype.toLocaleString; '%ArrayPrototype.toString%': typeof Array.prototype.toString; '%AsyncFromSyncIteratorPrototype.next%': AsyncGenerator<any>['next']; '%AsyncFromSyncIteratorPrototype.return%': AsyncGenerator<any>['return']; '%AsyncFromSyncIteratorPrototype.throw%': AsyncGenerator<any>['throw']; '%AsyncFunction.prototype%': typeof Function.prototype; '%AsyncGenerator.prototype%': AsyncGenerator<any>; '%AsyncGenerator.prototype.next%': AsyncGenerator<any>['next']; '%AsyncGenerator.prototype.return%': AsyncGenerator<any>['return']; '%AsyncGenerator.prototype.throw%': AsyncGenerator<any>['throw']; '%AsyncGeneratorFunction.prototype%': AsyncGeneratorFunction; '%AsyncGeneratorFunction.prototype.prototype%': AsyncGenerator<any>; '%AsyncGeneratorFunction.prototype.prototype.next%': AsyncGenerator<any>['next']; '%AsyncGeneratorFunction.prototype.prototype.return%': AsyncGenerator<any>['return']; '%AsyncGeneratorFunction.prototype.prototype.throw%': AsyncGenerator<any>['throw']; '%AsyncGeneratorPrototype.next%': AsyncGenerator<any>['next']; '%AsyncGeneratorPrototype.return%': AsyncGenerator<any>['return']; '%AsyncGeneratorPrototype.throw%': AsyncGenerator<any>['throw']; '%Atomics.load%': typeof Atomics.load; '%Atomics.store%': typeof Atomics.store; '%Atomics.add%': typeof Atomics.add; '%Atomics.sub%': typeof Atomics.sub; '%Atomics.and%': typeof Atomics.and; '%Atomics.or%': typeof Atomics.or; '%Atomics.xor%': typeof Atomics.xor; '%Atomics.exchange%': typeof Atomics.exchange; '%Atomics.compareExchange%': typeof Atomics.compareExchange; '%Atomics.isLockFree%': typeof Atomics.isLockFree; '%Atomics.wait%': typeof Atomics.wait; '%Atomics.notify%': typeof Atomics.notify; '%Boolean.prototype%': typeof Boolean.prototype; '%Boolean.prototype.toString%': typeof Boolean.prototype.toString; '%Boolean.prototype.valueOf%': typeof Boolean.prototype.valueOf; '%BooleanPrototype.toString%': typeof Boolean.prototype.toString; '%BooleanPrototype.valueOf%': typeof Boolean.prototype.valueOf; '%DataView.prototype%': DataView; '%DataView.prototype.buffer%': (this: DataView) => typeof DataView.prototype.buffer; '%DataView.prototype.byteLength%': (this: DataView) => typeof DataView.prototype.byteLength; '%DataView.prototype.byteOffset%': (this: DataView) => typeof DataView.prototype.byteOffset; '%DataView.prototype.getInt8%': typeof DataView.prototype.getInt8; '%DataView.prototype.setInt8%': typeof DataView.prototype.setInt8; '%DataView.prototype.getUint8%': typeof DataView.prototype.getUint8; '%DataView.prototype.setUint8%': typeof DataView.prototype.setUint8; '%DataView.prototype.getInt16%': typeof DataView.prototype.getInt16; '%DataView.prototype.setInt16%': typeof DataView.prototype.setInt16; '%DataView.prototype.getUint16%': typeof DataView.prototype.getUint16; '%DataView.prototype.setUint16%': typeof DataView.prototype.setUint16; '%DataView.prototype.getInt32%': typeof DataView.prototype.getInt32; '%DataView.prototype.setInt32%': typeof DataView.prototype.setInt32; '%DataView.prototype.getUint32%': typeof DataView.prototype.getUint32; '%DataView.prototype.setUint32%': typeof DataView.prototype.setUint32; '%DataView.prototype.getFloat32%': typeof DataView.prototype.getFloat32; '%DataView.prototype.setFloat32%': typeof DataView.prototype.setFloat32; '%DataView.prototype.getFloat64%': typeof DataView.prototype.getFloat64; '%DataView.prototype.setFloat64%': typeof DataView.prototype.setFloat64; '%DataView.prototype.getBigInt64%': typeof DataView.prototype.getBigInt64; '%DataView.prototype.setBigInt64%': typeof DataView.prototype.setBigInt64; '%DataView.prototype.getBigUint64%': typeof DataView.prototype.getBigUint64; '%DataView.prototype.setBigUint64%': typeof DataView.prototype.setBigUint64; '%DataViewPrototype.buffer%': (this: DataView) => typeof DataView.prototype.buffer; '%DataViewPrototype.byteLength%': (this: DataView) => typeof DataView.prototype.byteLength; '%DataViewPrototype.byteOffset%': (this: DataView) => typeof DataView.prototype.byteOffset; '%DataViewPrototype.getInt8%': typeof DataView.prototype.getInt8; '%DataViewPrototype.setInt8%': typeof DataView.prototype.setInt8; '%DataViewPrototype.getUint8%': typeof DataView.prototype.getUint8; '%DataViewPrototype.setUint8%': typeof DataView.prototype.setUint8; '%DataViewPrototype.getInt16%': typeof DataView.prototype.getInt16; '%DataViewPrototype.setInt16%': typeof DataView.prototype.setInt16; '%DataViewPrototype.getUint16%': typeof DataView.prototype.getUint16; '%DataViewPrototype.setUint16%': typeof DataView.prototype.setUint16; '%DataViewPrototype.getInt32%': typeof DataView.prototype.getInt32; '%DataViewPrototype.setInt32%': typeof DataView.prototype.setInt32; '%DataViewPrototype.getUint32%': typeof DataView.prototype.getUint32; '%DataViewPrototype.setUint32%': typeof DataView.prototype.setUint32; '%DataViewPrototype.getFloat32%': typeof DataView.prototype.getFloat32; '%DataViewPrototype.setFloat32%': typeof DataView.prototype.setFloat32; '%DataViewPrototype.getFloat64%': typeof DataView.prototype.getFloat64; '%DataViewPrototype.setFloat64%': typeof DataView.prototype.setFloat64; '%DataViewPrototype.getBigInt64%': typeof DataView.prototype.getBigInt64; '%DataViewPrototype.setBigInt64%': typeof DataView.prototype.setBigInt64; '%DataViewPrototype.getBigUint64%': typeof DataView.prototype.getBigUint64; '%DataViewPrototype.setBigUint64%': typeof DataView.prototype.setBigUint64; '%Date.prototype%': Date; '%Date.prototype.toString%': typeof Date.prototype.toString; '%Date.prototype.toDateString%': typeof Date.prototype.toDateString; '%Date.prototype.toTimeString%': typeof Date.prototype.toTimeString; '%Date.prototype.toISOString%': typeof Date.prototype.toISOString; '%Date.prototype.toUTCString%': typeof Date.prototype.toUTCString; '%Date.prototype.getDate%': typeof Date.prototype.getDate; '%Date.prototype.setDate%': typeof Date.prototype.setDate; '%Date.prototype.getDay%': typeof Date.prototype.getDay; '%Date.prototype.getFullYear%': typeof Date.prototype.getFullYear; '%Date.prototype.setFullYear%': typeof Date.prototype.setFullYear; '%Date.prototype.getHours%': typeof Date.prototype.getHours; '%Date.prototype.setHours%': typeof Date.prototype.setHours; '%Date.prototype.getMilliseconds%': typeof Date.prototype.getMilliseconds; '%Date.prototype.setMilliseconds%': typeof Date.prototype.setMilliseconds; '%Date.prototype.getMinutes%': typeof Date.prototype.getMinutes; '%Date.prototype.setMinutes%': typeof Date.prototype.setMinutes;<|fim▁hole|> '%Date.prototype.setMonth%': typeof Date.prototype.setMonth; '%Date.prototype.getSeconds%': typeof Date.prototype.getSeconds; '%Date.prototype.setSeconds%': typeof Date.prototype.setSeconds; '%Date.prototype.getTime%': typeof Date.prototype.getTime; '%Date.prototype.setTime%': typeof Date.prototype.setTime; '%Date.prototype.getTimezoneOffset%': typeof Date.prototype.getTimezoneOffset; '%Date.prototype.getUTCDate%': typeof Date.prototype.getUTCDate; '%Date.prototype.setUTCDate%': typeof Date.prototype.setUTCDate; '%Date.prototype.getUTCDay%': typeof Date.prototype.getUTCDay; '%Date.prototype.getUTCFullYear%': typeof Date.prototype.getUTCFullYear; '%Date.prototype.setUTCFullYear%': typeof Date.prototype.setUTCFullYear; '%Date.prototype.getUTCHours%': typeof Date.prototype.getUTCHours; '%Date.prototype.setUTCHours%': typeof Date.prototype.setUTCHours; '%Date.prototype.getUTCMilliseconds%': typeof Date.prototype.getUTCMilliseconds; '%Date.prototype.setUTCMilliseconds%': typeof Date.prototype.setUTCMilliseconds; '%Date.prototype.getUTCMinutes%': typeof Date.prototype.getUTCMinutes; '%Date.prototype.setUTCMinutes%': typeof Date.prototype.setUTCMinutes; '%Date.prototype.getUTCMonth%': typeof Date.prototype.getUTCMonth; '%Date.prototype.setUTCMonth%': typeof Date.prototype.setUTCMonth; '%Date.prototype.getUTCSeconds%': typeof Date.prototype.getUTCSeconds; '%Date.prototype.setUTCSeconds%': typeof Date.prototype.setUTCSeconds; '%Date.prototype.valueOf%': typeof Date.prototype.valueOf; '%Date.prototype.toJSON%': typeof Date.prototype.toJSON; '%Date.prototype.toLocaleString%': typeof Date.prototype.toLocaleString; '%Date.prototype.toLocaleDateString%': typeof Date.prototype.toLocaleDateString; '%Date.prototype.toLocaleTimeString%': typeof Date.prototype.toLocaleTimeString; '%Date.now%': typeof Date.now; '%Date.parse%': typeof Date.parse; '%Date.UTC%': typeof Date.UTC; '%DatePrototype.toString%': typeof Date.prototype.toString; '%DatePrototype.toDateString%': typeof Date.prototype.toDateString; '%DatePrototype.toTimeString%': typeof Date.prototype.toTimeString; '%DatePrototype.toISOString%': typeof Date.prototype.toISOString; '%DatePrototype.toUTCString%': typeof Date.prototype.toUTCString; '%DatePrototype.getDate%': typeof Date.prototype.getDate; '%DatePrototype.setDate%': typeof Date.prototype.setDate; '%DatePrototype.getDay%': typeof Date.prototype.getDay; '%DatePrototype.getFullYear%': typeof Date.prototype.getFullYear; '%DatePrototype.setFullYear%': typeof Date.prototype.setFullYear; '%DatePrototype.getHours%': typeof Date.prototype.getHours; '%DatePrototype.setHours%': typeof Date.prototype.setHours; '%DatePrototype.getMilliseconds%': typeof Date.prototype.getMilliseconds; '%DatePrototype.setMilliseconds%': typeof Date.prototype.setMilliseconds; '%DatePrototype.getMinutes%': typeof Date.prototype.getMinutes; '%DatePrototype.setMinutes%': typeof Date.prototype.setMinutes; '%DatePrototype.getMonth%': typeof Date.prototype.getMonth; '%DatePrototype.setMonth%': typeof Date.prototype.setMonth; '%DatePrototype.getSeconds%': typeof Date.prototype.getSeconds; '%DatePrototype.setSeconds%': typeof Date.prototype.setSeconds; '%DatePrototype.getTime%': typeof Date.prototype.getTime; '%DatePrototype.setTime%': typeof Date.prototype.setTime; '%DatePrototype.getTimezoneOffset%': typeof Date.prototype.getTimezoneOffset; '%DatePrototype.getUTCDate%': typeof Date.prototype.getUTCDate; '%DatePrototype.setUTCDate%': typeof Date.prototype.setUTCDate; '%DatePrototype.getUTCDay%': typeof Date.prototype.getUTCDay; '%DatePrototype.getUTCFullYear%': typeof Date.prototype.getUTCFullYear; '%DatePrototype.setUTCFullYear%': typeof Date.prototype.setUTCFullYear; '%DatePrototype.getUTCHours%': typeof Date.prototype.getUTCHours; '%DatePrototype.setUTCHours%': typeof Date.prototype.setUTCHours; '%DatePrototype.getUTCMilliseconds%': typeof Date.prototype.getUTCMilliseconds; '%DatePrototype.setUTCMilliseconds%': typeof Date.prototype.setUTCMilliseconds; '%DatePrototype.getUTCMinutes%': typeof Date.prototype.getUTCMinutes; '%DatePrototype.setUTCMinutes%': typeof Date.prototype.setUTCMinutes; '%DatePrototype.getUTCMonth%': typeof Date.prototype.getUTCMonth; '%DatePrototype.setUTCMonth%': typeof Date.prototype.setUTCMonth; '%DatePrototype.getUTCSeconds%': typeof Date.prototype.getUTCSeconds; '%DatePrototype.setUTCSeconds%': typeof Date.prototype.setUTCSeconds; '%DatePrototype.valueOf%': typeof Date.prototype.valueOf; '%DatePrototype.toJSON%': typeof Date.prototype.toJSON; '%DatePrototype.toLocaleString%': typeof Date.prototype.toLocaleString; '%DatePrototype.toLocaleDateString%': typeof Date.prototype.toLocaleDateString; '%DatePrototype.toLocaleTimeString%': typeof Date.prototype.toLocaleTimeString; '%Error.prototype%': Error; '%Error.prototype.name%': typeof Error.prototype.name; '%Error.prototype.message%': typeof Error.prototype.message; '%Error.prototype.toString%': typeof Error.prototype.toString; '%ErrorPrototype.name%': typeof Error.prototype.name; '%ErrorPrototype.message%': typeof Error.prototype.message; '%ErrorPrototype.toString%': typeof Error.prototype.toString; '%EvalError.prototype%': EvalError; '%EvalError.prototype.name%': typeof EvalError.prototype.name; '%EvalError.prototype.message%': typeof EvalError.prototype.message; '%EvalErrorPrototype.name%': typeof EvalError.prototype.name; '%EvalErrorPrototype.message%': typeof EvalError.prototype.message; '%Float32Array.prototype%': Float32Array; '%Float32Array.prototype.BYTES_PER_ELEMENT%': typeof Float32Array.prototype.BYTES_PER_ELEMENT; '%Float32Array.BYTES_PER_ELEMENT%': typeof Float32Array.BYTES_PER_ELEMENT; '%Float32ArrayPrototype.BYTES_PER_ELEMENT%': typeof Float32Array.prototype.BYTES_PER_ELEMENT; '%Float64Array.prototype%': Float64Array; '%Float64Array.prototype.BYTES_PER_ELEMENT%': typeof Float64Array.prototype.BYTES_PER_ELEMENT; '%Float64Array.BYTES_PER_ELEMENT%': typeof Float64Array.BYTES_PER_ELEMENT; '%Float64ArrayPrototype.BYTES_PER_ELEMENT%': typeof Float64Array.prototype.BYTES_PER_ELEMENT; '%Function.prototype%': typeof Function.prototype; '%Function.prototype.apply%': typeof Function.prototype.apply; '%Function.prototype.bind%': typeof Function.prototype.bind; '%Function.prototype.call%': typeof Function.prototype.call; '%Function.prototype.toString%': typeof Function.prototype.toString; '%FunctionPrototype.apply%': typeof Function.prototype.apply; '%FunctionPrototype.bind%': typeof Function.prototype.bind; '%FunctionPrototype.call%': typeof Function.prototype.call; '%FunctionPrototype.toString%': typeof Function.prototype.toString; '%Generator.prototype%': Generator<any>; '%Generator.prototype.next%': Generator<any>['next']; '%Generator.prototype.return%': Generator<any>['return']; '%Generator.prototype.throw%': Generator<any>['throw']; '%GeneratorFunction.prototype%': GeneratorFunction; '%GeneratorFunction.prototype.prototype%': Generator<any>; '%GeneratorFunction.prototype.prototype.next%': Generator<any>['next']; '%GeneratorFunction.prototype.prototype.return%': Generator<any>['return']; '%GeneratorFunction.prototype.prototype.throw%': Generator<any>['throw']; '%GeneratorPrototype.next%': Generator<any>['next']; '%GeneratorPrototype.return%': Generator<any>['return']; '%GeneratorPrototype.throw%': Generator<any>['throw']; '%Int8Array.prototype%': Int8Array; '%Int8Array.prototype.BYTES_PER_ELEMENT%': typeof Int8Array.prototype.BYTES_PER_ELEMENT; '%Int8Array.BYTES_PER_ELEMENT%': typeof Int8Array.BYTES_PER_ELEMENT; '%Int8ArrayPrototype.BYTES_PER_ELEMENT%': typeof Int8Array.prototype.BYTES_PER_ELEMENT; '%Int16Array.prototype%': Int16Array; '%Int16Array.prototype.BYTES_PER_ELEMENT%': typeof Int16Array.prototype.BYTES_PER_ELEMENT; '%Int16Array.BYTES_PER_ELEMENT%': typeof Int16Array.BYTES_PER_ELEMENT; '%Int16ArrayPrototype.BYTES_PER_ELEMENT%': typeof Int16Array.prototype.BYTES_PER_ELEMENT; '%Int32Array.prototype%': Int32Array; '%Int32Array.prototype.BYTES_PER_ELEMENT%': typeof Int32Array.prototype.BYTES_PER_ELEMENT; '%Int32Array.BYTES_PER_ELEMENT%': typeof Int32Array.BYTES_PER_ELEMENT; '%Int32ArrayPrototype.BYTES_PER_ELEMENT%': typeof Int32Array.prototype.BYTES_PER_ELEMENT; '%JSON.parse%': typeof JSON.parse; '%JSON.stringify%': typeof JSON.stringify; '%Map.prototype%': typeof Map.prototype; '%Map.prototype.get%': typeof Map.prototype.get; '%Map.prototype.set%': typeof Map.prototype.set; '%Map.prototype.has%': typeof Map.prototype.has; '%Map.prototype.delete%': typeof Map.prototype.delete; '%Map.prototype.clear%': typeof Map.prototype.clear; '%Map.prototype.entries%': typeof Map.prototype.entries; '%Map.prototype.forEach%': typeof Map.prototype.forEach; '%Map.prototype.keys%': typeof Map.prototype.keys; '%Map.prototype.size%': (this: Map<any, any>) => typeof Map.prototype.size; '%Map.prototype.values%': typeof Map.prototype.values; '%MapIteratorPrototype.next%': IterableIterator<any>['next']; '%MapPrototype.get%': typeof Map.prototype.get; '%MapPrototype.set%': typeof Map.prototype.set; '%MapPrototype.has%': typeof Map.prototype.has; '%MapPrototype.delete%': typeof Map.prototype.delete; '%MapPrototype.clear%': typeof Map.prototype.clear; '%MapPrototype.entries%': typeof Map.prototype.entries; '%MapPrototype.forEach%': typeof Map.prototype.forEach; '%MapPrototype.keys%': typeof Map.prototype.keys; '%MapPrototype.size%': (this: Map<any, any>) => typeof Map.prototype.size; '%MapPrototype.values%': typeof Map.prototype.values; '%Math.abs%': typeof Math.abs; '%Math.acos%': typeof Math.acos; '%Math.acosh%': typeof Math.acosh; '%Math.asin%': typeof Math.asin; '%Math.asinh%': typeof Math.asinh; '%Math.atan%': typeof Math.atan; '%Math.atanh%': typeof Math.atanh; '%Math.atan2%': typeof Math.atan2; '%Math.ceil%': typeof Math.ceil; '%Math.cbrt%': typeof Math.cbrt; '%Math.expm1%': typeof Math.expm1; '%Math.clz32%': typeof Math.clz32; '%Math.cos%': typeof Math.cos; '%Math.cosh%': typeof Math.cosh; '%Math.exp%': typeof Math.exp; '%Math.floor%': typeof Math.floor; '%Math.fround%': typeof Math.fround; '%Math.hypot%': typeof Math.hypot; '%Math.imul%': typeof Math.imul; '%Math.log%': typeof Math.log; '%Math.log1p%': typeof Math.log1p; '%Math.log2%': typeof Math.log2; '%Math.log10%': typeof Math.log10; '%Math.max%': typeof Math.max; '%Math.min%': typeof Math.min; '%Math.pow%': typeof Math.pow; '%Math.random%': typeof Math.random; '%Math.round%': typeof Math.round; '%Math.sign%': typeof Math.sign; '%Math.sin%': typeof Math.sin; '%Math.sinh%': typeof Math.sinh; '%Math.sqrt%': typeof Math.sqrt; '%Math.tan%': typeof Math.tan; '%Math.tanh%': typeof Math.tanh; '%Math.trunc%': typeof Math.trunc; '%Math.E%': typeof Math.E; '%Math.LN10%': typeof Math.LN10; '%Math.LN2%': typeof Math.LN2; '%Math.LOG10E%': typeof Math.LOG10E; '%Math.LOG2E%': typeof Math.LOG2E; '%Math.PI%': typeof Math.PI; '%Math.SQRT1_2%': typeof Math.SQRT1_2; '%Math.SQRT2%': typeof Math.SQRT2; '%Number.prototype%': typeof Number.prototype; '%Number.prototype.toExponential%': typeof Number.prototype.toExponential; '%Number.prototype.toFixed%': typeof Number.prototype.toFixed; '%Number.prototype.toPrecision%': typeof Number.prototype.toPrecision; '%Number.prototype.toString%': typeof Number.prototype.toString; '%Number.prototype.valueOf%': typeof Number.prototype.valueOf; '%Number.prototype.toLocaleString%': typeof Number.prototype.toLocaleString; '%Number.isFinite%': typeof Number.isFinite; '%Number.isInteger%': typeof Number.isInteger; '%Number.isNaN%': typeof Number.isNaN; '%Number.isSafeInteger%': typeof Number.isSafeInteger; '%Number.parseFloat%': typeof Number.parseFloat; '%Number.parseInt%': typeof Number.parseInt; '%Number.MAX_VALUE%': typeof Number.MAX_VALUE; '%Number.MIN_VALUE%': typeof Number.MIN_VALUE; '%Number.NaN%': typeof Number.NaN; '%Number.NEGATIVE_INFINITY%': typeof Number.NEGATIVE_INFINITY; '%Number.POSITIVE_INFINITY%': typeof Number.POSITIVE_INFINITY; '%Number.MAX_SAFE_INTEGER%': typeof Number.MAX_SAFE_INTEGER; '%Number.MIN_SAFE_INTEGER%': typeof Number.MIN_SAFE_INTEGER; '%Number.EPSILON%': typeof Number.EPSILON; '%NumberPrototype.toExponential%': typeof Number.prototype.toExponential; '%NumberPrototype.toFixed%': typeof Number.prototype.toFixed; '%NumberPrototype.toPrecision%': typeof Number.prototype.toPrecision; '%NumberPrototype.toString%': typeof Number.prototype.toString; '%NumberPrototype.valueOf%': typeof Number.prototype.valueOf; '%NumberPrototype.toLocaleString%': typeof Number.prototype.toLocaleString; '%Object.prototype%': typeof Object.prototype; '%Object.prototype.hasOwnProperty%': typeof Object.prototype.hasOwnProperty; '%Object.prototype.isPrototypeOf%': typeof Object.prototype.isPrototypeOf; '%Object.prototype.propertyIsEnumerable%': typeof Object.prototype.propertyIsEnumerable; '%Object.prototype.toString%': typeof Object.prototype.toString; '%Object.prototype.valueOf%': typeof Object.prototype.valueOf; '%Object.prototype.toLocaleString%': typeof Object.prototype.toLocaleString; '%Object.assign%': typeof Object.assign; '%Object.getOwnPropertyDescriptor%': typeof Object.getOwnPropertyDescriptor; '%Object.getOwnPropertyDescriptors%': typeof Object.getOwnPropertyDescriptors; '%Object.getOwnPropertyNames%': typeof Object.getOwnPropertyNames; '%Object.getOwnPropertySymbols%': typeof Object.getOwnPropertySymbols; '%Object.is%': typeof Object.is; '%Object.preventExtensions%': typeof Object.preventExtensions; '%Object.seal%': typeof Object.seal; '%Object.create%': typeof Object.create; '%Object.defineProperties%': typeof Object.defineProperties; '%Object.defineProperty%': typeof Object.defineProperty; '%Object.freeze%': typeof Object.freeze; '%Object.getPrototypeOf%': typeof Object.getPrototypeOf; '%Object.setPrototypeOf%': typeof Object.setPrototypeOf; '%Object.isExtensible%': typeof Object.isExtensible; '%Object.isFrozen%': typeof Object.isFrozen; '%Object.isSealed%': typeof Object.isSealed; '%Object.keys%': typeof Object.keys; '%Object.entries%': typeof Object.entries; '%Object.fromEntries%': typeof Object.fromEntries; '%Object.values%': typeof Object.values; '%ObjectPrototype.hasOwnProperty%': typeof Object.prototype.hasOwnProperty; '%ObjectPrototype.isPrototypeOf%': typeof Object.prototype.isPrototypeOf; '%ObjectPrototype.propertyIsEnumerable%': typeof Object.prototype.propertyIsEnumerable; '%ObjectPrototype.toString%': typeof Object.prototype.toString; '%ObjectPrototype.valueOf%': typeof Object.prototype.valueOf; '%ObjectPrototype.toLocaleString%': typeof Object.prototype.toLocaleString; '%Promise.prototype%': typeof Promise.prototype; '%Promise.prototype.then%': typeof Promise.prototype.then; '%Promise.prototype.catch%': typeof Promise.prototype.catch; '%Promise.prototype.finally%': typeof Promise.prototype.finally; '%Promise.all%': typeof Promise.all; '%Promise.race%': typeof Promise.race; '%Promise.resolve%': typeof Promise.resolve; '%Promise.reject%': typeof Promise.reject; '%Promise.allSettled%': typeof Promise.allSettled; '%PromisePrototype.then%': typeof Promise.prototype.then; '%PromisePrototype.catch%': typeof Promise.prototype.catch; '%PromisePrototype.finally%': typeof Promise.prototype.finally; '%Proxy.revocable%': typeof Proxy.revocable; '%RangeError.prototype%': RangeError; '%RangeError.prototype.name%': typeof RangeError.prototype.name; '%RangeError.prototype.message%': typeof RangeError.prototype.message; '%RangeErrorPrototype.name%': typeof RangeError.prototype.name; '%RangeErrorPrototype.message%': typeof RangeError.prototype.message; '%ReferenceError.prototype%': ReferenceError; '%ReferenceError.prototype.name%': typeof ReferenceError.prototype.name; '%ReferenceError.prototype.message%': typeof ReferenceError.prototype.message; '%ReferenceErrorPrototype.name%': typeof ReferenceError.prototype.name; '%ReferenceErrorPrototype.message%': typeof ReferenceError.prototype.message; '%Reflect.defineProperty%': typeof Reflect.defineProperty; '%Reflect.deleteProperty%': typeof Reflect.deleteProperty; '%Reflect.apply%': typeof Reflect.apply; '%Reflect.construct%': typeof Reflect.construct; '%Reflect.get%': typeof Reflect.get; '%Reflect.getOwnPropertyDescriptor%': typeof Reflect.getOwnPropertyDescriptor; '%Reflect.getPrototypeOf%': typeof Reflect.getPrototypeOf; '%Reflect.has%': typeof Reflect.has; '%Reflect.isExtensible%': typeof Reflect.isExtensible; '%Reflect.ownKeys%': typeof Reflect.ownKeys; '%Reflect.preventExtensions%': typeof Reflect.preventExtensions; '%Reflect.set%': typeof Reflect.set; '%Reflect.setPrototypeOf%': typeof Reflect.setPrototypeOf; '%RegExp.prototype%': RegExp; '%RegExp.prototype.exec%': typeof RegExp.prototype.exec; '%RegExp.prototype.dotAll%': (this: RegExp) => typeof RegExp.prototype.dotAll; '%RegExp.prototype.flags%': (this: RegExp) => typeof RegExp.prototype.flags; '%RegExp.prototype.global%': (this: RegExp) => typeof RegExp.prototype.global; '%RegExp.prototype.ignoreCase%': (this: RegExp) => typeof RegExp.prototype.ignoreCase; '%RegExp.prototype.multiline%': (this: RegExp) => typeof RegExp.prototype.multiline; '%RegExp.prototype.source%': (this: RegExp) => typeof RegExp.prototype.source; '%RegExp.prototype.sticky%': (this: RegExp) => typeof RegExp.prototype.sticky; '%RegExp.prototype.unicode%': (this: RegExp) => typeof RegExp.prototype.unicode; '%RegExp.prototype.compile%': typeof RegExp.prototype.compile; '%RegExp.prototype.toString%': typeof RegExp.prototype.toString; '%RegExp.prototype.test%': typeof RegExp.prototype.test; '%RegExpPrototype.exec%': typeof RegExp.prototype.exec; '%RegExpPrototype.dotAll%': (this: RegExp) => typeof RegExp.prototype.dotAll; '%RegExpPrototype.flags%': (this: RegExp) => typeof RegExp.prototype.flags; '%RegExpPrototype.global%': (this: RegExp) => typeof RegExp.prototype.global; '%RegExpPrototype.ignoreCase%': (this: RegExp) => typeof RegExp.prototype.ignoreCase; '%RegExpPrototype.multiline%': (this: RegExp) => typeof RegExp.prototype.multiline; '%RegExpPrototype.source%': (this: RegExp) => typeof RegExp.prototype.source; '%RegExpPrototype.sticky%': (this: RegExp) => typeof RegExp.prototype.sticky; '%RegExpPrototype.unicode%': (this: RegExp) => typeof RegExp.prototype.unicode; '%RegExpPrototype.compile%': typeof RegExp.prototype.compile; '%RegExpPrototype.toString%': typeof RegExp.prototype.toString; '%RegExpPrototype.test%': typeof RegExp.prototype.test; '%Set.prototype%': typeof Set.prototype; '%Set.prototype.has%': typeof Set.prototype.has; '%Set.prototype.add%': typeof Set.prototype.add; '%Set.prototype.delete%': typeof Set.prototype.delete; '%Set.prototype.clear%': typeof Set.prototype.clear; '%Set.prototype.entries%': typeof Set.prototype.entries; '%Set.prototype.forEach%': typeof Set.prototype.forEach; '%Set.prototype.size%': (this: Set<any>) => typeof Set.prototype.size; '%Set.prototype.values%': typeof Set.prototype.values; '%Set.prototype.keys%': typeof Set.prototype.keys; '%SetIteratorPrototype.next%': IterableIterator<any>['next']; '%SetPrototype.has%': typeof Set.prototype.has; '%SetPrototype.add%': typeof Set.prototype.add; '%SetPrototype.delete%': typeof Set.prototype.delete; '%SetPrototype.clear%': typeof Set.prototype.clear; '%SetPrototype.entries%': typeof Set.prototype.entries; '%SetPrototype.forEach%': typeof Set.prototype.forEach; '%SetPrototype.size%': (this: Set<any>) => typeof Set.prototype.size; '%SetPrototype.values%': typeof Set.prototype.values; '%SetPrototype.keys%': typeof Set.prototype.keys; '%SharedArrayBuffer.prototype%': SharedArrayBuffer; '%SharedArrayBuffer.prototype.byteLength%': (this: SharedArrayBuffer) => typeof SharedArrayBuffer.prototype.byteLength; '%SharedArrayBuffer.prototype.slice%': typeof SharedArrayBuffer.prototype.slice; '%SharedArrayBufferPrototype.byteLength%': (this: SharedArrayBuffer) => typeof SharedArrayBuffer.prototype.byteLength; '%SharedArrayBufferPrototype.slice%': typeof SharedArrayBuffer.prototype.slice; '%String.prototype%': typeof String.prototype; '%String.prototype.length%': typeof String.prototype.length; '%String.prototype.anchor%': typeof String.prototype.anchor; '%String.prototype.big%': typeof String.prototype.big; '%String.prototype.blink%': typeof String.prototype.blink; '%String.prototype.bold%': typeof String.prototype.bold; '%String.prototype.charAt%': typeof String.prototype.charAt; '%String.prototype.charCodeAt%': typeof String.prototype.charCodeAt; '%String.prototype.codePointAt%': typeof String.prototype.codePointAt; '%String.prototype.concat%': typeof String.prototype.concat; '%String.prototype.endsWith%': typeof String.prototype.endsWith; '%String.prototype.fontcolor%': typeof String.prototype.fontcolor; '%String.prototype.fontsize%': typeof String.prototype.fontsize; '%String.prototype.fixed%': typeof String.prototype.fixed; '%String.prototype.includes%': typeof String.prototype.includes; '%String.prototype.indexOf%': typeof String.prototype.indexOf; '%String.prototype.italics%': typeof String.prototype.italics; '%String.prototype.lastIndexOf%': typeof String.prototype.lastIndexOf; '%String.prototype.link%': typeof String.prototype.link; '%String.prototype.localeCompare%': typeof String.prototype.localeCompare; '%String.prototype.match%': typeof String.prototype.match; '%String.prototype.matchAll%': typeof String.prototype.matchAll; '%String.prototype.normalize%': typeof String.prototype.normalize; '%String.prototype.padEnd%': typeof String.prototype.padEnd; '%String.prototype.padStart%': typeof String.prototype.padStart; '%String.prototype.repeat%': typeof String.prototype.repeat; '%String.prototype.replace%': typeof String.prototype.replace; '%String.prototype.search%': typeof String.prototype.search; '%String.prototype.slice%': typeof String.prototype.slice; '%String.prototype.small%': typeof String.prototype.small; '%String.prototype.split%': typeof String.prototype.split; '%String.prototype.strike%': typeof String.prototype.strike; '%String.prototype.sub%': typeof String.prototype.sub; '%String.prototype.substr%': typeof String.prototype.substr; '%String.prototype.substring%': typeof String.prototype.substring; '%String.prototype.sup%': typeof String.prototype.sup; '%String.prototype.startsWith%': typeof String.prototype.startsWith; '%String.prototype.toString%': typeof String.prototype.toString; '%String.prototype.trim%': typeof String.prototype.trim; '%String.prototype.trimStart%': typeof String.prototype.trimStart; '%String.prototype.trimLeft%': typeof String.prototype.trimLeft; '%String.prototype.trimEnd%': typeof String.prototype.trimEnd; '%String.prototype.trimRight%': typeof String.prototype.trimRight; '%String.prototype.toLocaleLowerCase%': typeof String.prototype.toLocaleLowerCase; '%String.prototype.toLocaleUpperCase%': typeof String.prototype.toLocaleUpperCase; '%String.prototype.toLowerCase%': typeof String.prototype.toLowerCase; '%String.prototype.toUpperCase%': typeof String.prototype.toUpperCase; '%String.prototype.valueOf%': typeof String.prototype.valueOf; '%String.fromCharCode%': typeof String.fromCharCode; '%String.fromCodePoint%': typeof String.fromCodePoint; '%String.raw%': typeof String.raw; '%StringIteratorPrototype.next%': IterableIterator<string>['next']; '%StringPrototype.length%': typeof String.prototype.length; '%StringPrototype.anchor%': typeof String.prototype.anchor; '%StringPrototype.big%': typeof String.prototype.big; '%StringPrototype.blink%': typeof String.prototype.blink; '%StringPrototype.bold%': typeof String.prototype.bold; '%StringPrototype.charAt%': typeof String.prototype.charAt; '%StringPrototype.charCodeAt%': typeof String.prototype.charCodeAt; '%StringPrototype.codePointAt%': typeof String.prototype.codePointAt; '%StringPrototype.concat%': typeof String.prototype.concat; '%StringPrototype.endsWith%': typeof String.prototype.endsWith; '%StringPrototype.fontcolor%': typeof String.prototype.fontcolor; '%StringPrototype.fontsize%': typeof String.prototype.fontsize; '%StringPrototype.fixed%': typeof String.prototype.fixed; '%StringPrototype.includes%': typeof String.prototype.includes; '%StringPrototype.indexOf%': typeof String.prototype.indexOf; '%StringPrototype.italics%': typeof String.prototype.italics; '%StringPrototype.lastIndexOf%': typeof String.prototype.lastIndexOf; '%StringPrototype.link%': typeof String.prototype.link; '%StringPrototype.localeCompare%': typeof String.prototype.localeCompare; '%StringPrototype.match%': typeof String.prototype.match; '%StringPrototype.matchAll%': typeof String.prototype.matchAll; '%StringPrototype.normalize%': typeof String.prototype.normalize; '%StringPrototype.padEnd%': typeof String.prototype.padEnd; '%StringPrototype.padStart%': typeof String.prototype.padStart; '%StringPrototype.repeat%': typeof String.prototype.repeat; '%StringPrototype.replace%': typeof String.prototype.replace; '%StringPrototype.search%': typeof String.prototype.search; '%StringPrototype.slice%': typeof String.prototype.slice; '%StringPrototype.small%': typeof String.prototype.small; '%StringPrototype.split%': typeof String.prototype.split; '%StringPrototype.strike%': typeof String.prototype.strike; '%StringPrototype.sub%': typeof String.prototype.sub; '%StringPrototype.substr%': typeof String.prototype.substr; '%StringPrototype.substring%': typeof String.prototype.substring; '%StringPrototype.sup%': typeof String.prototype.sup; '%StringPrototype.startsWith%': typeof String.prototype.startsWith; '%StringPrototype.toString%': typeof String.prototype.toString; '%StringPrototype.trim%': typeof String.prototype.trim; '%StringPrototype.trimStart%': typeof String.prototype.trimStart; '%StringPrototype.trimLeft%': typeof String.prototype.trimLeft; '%StringPrototype.trimEnd%': typeof String.prototype.trimEnd; '%StringPrototype.trimRight%': typeof String.prototype.trimRight; '%StringPrototype.toLocaleLowerCase%': typeof String.prototype.toLocaleLowerCase; '%StringPrototype.toLocaleUpperCase%': typeof String.prototype.toLocaleUpperCase; '%StringPrototype.toLowerCase%': typeof String.prototype.toLowerCase; '%StringPrototype.toUpperCase%': typeof String.prototype.toUpperCase; '%StringPrototype.valueOf%': typeof String.prototype.valueOf; '%Symbol.prototype%': typeof Symbol.prototype; '%Symbol.prototype.toString%': typeof Symbol.prototype.toString; '%Symbol.prototype.valueOf%': typeof Symbol.prototype.valueOf; '%Symbol.prototype.description%': (this: symbol | Symbol) => typeof Symbol.prototype.description; '%Symbol.for%': typeof Symbol.for; '%Symbol.keyFor%': typeof Symbol.keyFor; '%Symbol.asyncIterator%': typeof Symbol.asyncIterator; '%Symbol.hasInstance%': typeof Symbol.hasInstance; '%Symbol.isConcatSpreadable%': typeof Symbol.isConcatSpreadable; '%Symbol.iterator%': typeof Symbol.iterator; '%Symbol.match%': typeof Symbol.match; '%Symbol.matchAll%': typeof Symbol.matchAll; '%Symbol.replace%': typeof Symbol.replace; '%Symbol.search%': typeof Symbol.search; '%Symbol.species%': typeof Symbol.species; '%Symbol.split%': typeof Symbol.split; '%Symbol.toPrimitive%': typeof Symbol.toPrimitive; '%Symbol.toStringTag%': typeof Symbol.toStringTag; '%Symbol.unscopables%': typeof Symbol.unscopables; '%SymbolPrototype.toString%': typeof Symbol.prototype.toString; '%SymbolPrototype.valueOf%': typeof Symbol.prototype.valueOf; '%SymbolPrototype.description%': (this: symbol | Symbol) => typeof Symbol.prototype.description; '%SyntaxError.prototype%': SyntaxError; '%SyntaxError.prototype.name%': typeof SyntaxError.prototype.name; '%SyntaxError.prototype.message%': typeof SyntaxError.prototype.message; '%SyntaxErrorPrototype.name%': typeof SyntaxError.prototype.name; '%SyntaxErrorPrototype.message%': typeof SyntaxError.prototype.message; '%TypedArray.prototype%': TypedArrayPrototype; '%TypedArray.prototype.buffer%': (this: TypedArray) => TypedArrayPrototype['buffer']; '%TypedArray.prototype.byteLength%': (this: TypedArray) => TypedArrayPrototype['byteLength']; '%TypedArray.prototype.byteOffset%': (this: TypedArray) => TypedArrayPrototype['byteOffset']; '%TypedArray.prototype.length%': (this: TypedArray) => TypedArrayPrototype['length']; '%TypedArray.prototype.entries%': TypedArrayPrototype['entries']; '%TypedArray.prototype.keys%': TypedArrayPrototype['keys']; '%TypedArray.prototype.values%': TypedArrayPrototype['values']; '%TypedArray.prototype.copyWithin%': TypedArrayPrototype['copyWithin']; '%TypedArray.prototype.every%': TypedArrayPrototype['every']; '%TypedArray.prototype.fill%': TypedArrayPrototype['fill']; '%TypedArray.prototype.filter%': TypedArrayPrototype['filter']; '%TypedArray.prototype.find%': TypedArrayPrototype['find']; '%TypedArray.prototype.findIndex%': TypedArrayPrototype['findIndex']; '%TypedArray.prototype.forEach%': TypedArrayPrototype['forEach']; '%TypedArray.prototype.includes%': TypedArrayPrototype['includes']; '%TypedArray.prototype.indexOf%': TypedArrayPrototype['indexOf']; '%TypedArray.prototype.join%': TypedArrayPrototype['join']; '%TypedArray.prototype.lastIndexOf%': TypedArrayPrototype['lastIndexOf']; '%TypedArray.prototype.map%': TypedArrayPrototype['map']; '%TypedArray.prototype.reverse%': TypedArrayPrototype['reverse']; '%TypedArray.prototype.reduce%': TypedArrayPrototype['reduce']; '%TypedArray.prototype.reduceRight%': TypedArrayPrototype['reduceRight']; '%TypedArray.prototype.set%': TypedArrayPrototype['set']; '%TypedArray.prototype.slice%': TypedArrayPrototype['slice']; '%TypedArray.prototype.some%': TypedArrayPrototype['some']; '%TypedArray.prototype.sort%': TypedArrayPrototype['sort']; '%TypedArray.prototype.subarray%': TypedArrayPrototype['subarray']; '%TypedArray.prototype.toLocaleString%': TypedArrayPrototype['toLocaleString']; '%TypedArray.prototype.toString%': TypedArrayPrototype['toString']; '%TypedArray.of%': TypedArrayConstructor['of']; '%TypedArray.from%': TypedArrayConstructor['from']; '%TypedArrayPrototype.buffer%': (this: TypedArray) => TypedArrayPrototype['buffer']; '%TypedArrayPrototype.byteLength%': (this: TypedArray) => TypedArrayPrototype['byteLength']; '%TypedArrayPrototype.byteOffset%': (this: TypedArray) => TypedArrayPrototype['byteOffset']; '%TypedArrayPrototype.length%': (this: TypedArray) => TypedArrayPrototype['length']; '%TypedArrayPrototype.entries%': TypedArrayPrototype['entries']; '%TypedArrayPrototype.keys%': TypedArrayPrototype['keys']; '%TypedArrayPrototype.values%': TypedArrayPrototype['values']; '%TypedArrayPrototype.copyWithin%': TypedArrayPrototype['copyWithin']; '%TypedArrayPrototype.every%': TypedArrayPrototype['every']; '%TypedArrayPrototype.fill%': TypedArrayPrototype['fill']; '%TypedArrayPrototype.filter%': TypedArrayPrototype['filter']; '%TypedArrayPrototype.find%': TypedArrayPrototype['find']; '%TypedArrayPrototype.findIndex%': TypedArrayPrototype['findIndex']; '%TypedArrayPrototype.forEach%': TypedArrayPrototype['forEach']; '%TypedArrayPrototype.includes%': TypedArrayPrototype['includes']; '%TypedArrayPrototype.indexOf%': TypedArrayPrototype['indexOf']; '%TypedArrayPrototype.join%': TypedArrayPrototype['join']; '%TypedArrayPrototype.lastIndexOf%': TypedArrayPrototype['lastIndexOf']; '%TypedArrayPrototype.map%': TypedArrayPrototype['map']; '%TypedArrayPrototype.reverse%': TypedArrayPrototype['reverse']; '%TypedArrayPrototype.reduce%': TypedArrayPrototype['reduce']; '%TypedArrayPrototype.reduceRight%': TypedArrayPrototype['reduceRight']; '%TypedArrayPrototype.set%': TypedArrayPrototype['set']; '%TypedArrayPrototype.slice%': TypedArrayPrototype['slice']; '%TypedArrayPrototype.some%': TypedArrayPrototype['some']; '%TypedArrayPrototype.sort%': TypedArrayPrototype['sort']; '%TypedArrayPrototype.subarray%': TypedArrayPrototype['subarray']; '%TypedArrayPrototype.toLocaleString%': TypedArrayPrototype['toLocaleString']; '%TypedArrayPrototype.toString%': TypedArrayPrototype['toString']; '%TypeError.prototype%': TypeError; '%TypeError.prototype.name%': typeof TypeError.prototype.name; '%TypeError.prototype.message%': typeof TypeError.prototype.message; '%TypeErrorPrototype.name%': typeof TypeError.prototype.name; '%TypeErrorPrototype.message%': typeof TypeError.prototype.message; '%Uint8Array.prototype%': Uint8Array; '%Uint8Array.prototype.BYTES_PER_ELEMENT%': typeof Uint8Array.prototype.BYTES_PER_ELEMENT; '%Uint8Array.BYTES_PER_ELEMENT%': typeof Uint8Array.BYTES_PER_ELEMENT; '%Uint8ArrayPrototype.BYTES_PER_ELEMENT%': typeof Uint8Array.prototype.BYTES_PER_ELEMENT; '%Uint8ClampedArray.prototype%': Uint8ClampedArray; '%Uint8ClampedArray.prototype.BYTES_PER_ELEMENT%': typeof Uint8ClampedArray.prototype.BYTES_PER_ELEMENT; '%Uint8ClampedArray.BYTES_PER_ELEMENT%': typeof Uint8ClampedArray.BYTES_PER_ELEMENT; '%Uint8ClampedArrayPrototype.BYTES_PER_ELEMENT%': typeof Uint8ClampedArray.prototype.BYTES_PER_ELEMENT; '%Uint16Array.prototype%': Uint16Array; '%Uint16Array.prototype.BYTES_PER_ELEMENT%': typeof Uint16Array.prototype.BYTES_PER_ELEMENT; '%Uint16Array.BYTES_PER_ELEMENT%': typeof Uint16Array.BYTES_PER_ELEMENT; '%Uint16ArrayPrototype.BYTES_PER_ELEMENT%': typeof Uint16Array.prototype.BYTES_PER_ELEMENT; '%Uint32Array.prototype%': Uint32Array; '%Uint32Array.prototype.BYTES_PER_ELEMENT%': typeof Uint32Array.prototype.BYTES_PER_ELEMENT; '%Uint32Array.BYTES_PER_ELEMENT%': typeof Uint32Array.BYTES_PER_ELEMENT; '%Uint32ArrayPrototype.BYTES_PER_ELEMENT%': typeof Uint32Array.prototype.BYTES_PER_ELEMENT; '%URIError.prototype%': URIError; '%URIError.prototype.name%': typeof URIError.prototype.name; '%URIError.prototype.message%': typeof URIError.prototype.message; '%URIErrorPrototype.name%': typeof URIError.prototype.name; '%URIErrorPrototype.message%': typeof URIError.prototype.message; '%WeakMap.prototype%': typeof WeakMap.prototype; '%WeakMap.prototype.delete%': typeof WeakMap.prototype.delete; '%WeakMap.prototype.get%': typeof WeakMap.prototype.get; '%WeakMap.prototype.set%': typeof WeakMap.prototype.set; '%WeakMap.prototype.has%': typeof WeakMap.prototype.has; '%WeakMapPrototype.delete%': typeof WeakMap.prototype.delete; '%WeakMapPrototype.get%': typeof WeakMap.prototype.get; '%WeakMapPrototype.set%': typeof WeakMap.prototype.set; '%WeakMapPrototype.has%': typeof WeakMap.prototype.has; '%WeakSet.prototype%': typeof WeakSet.prototype; '%WeakSet.prototype.delete%': typeof WeakSet.prototype.delete; '%WeakSet.prototype.has%': typeof WeakSet.prototype.has; '%WeakSet.prototype.add%': typeof WeakSet.prototype.add; '%WeakSetPrototype.delete%': typeof WeakSet.prototype.delete; '%WeakSetPrototype.has%': typeof WeakSet.prototype.has; '%WeakSetPrototype.add%': typeof WeakSet.prototype.add; } }<|fim▁end|>
'%Date.prototype.getMonth%': typeof Date.prototype.getMonth;
<|file_name|>authors.js<|end_file_name|><|fim▁begin|>import { FETCH_AUTHOR, UPDATE_AUTHOR } from 'shared/constants/actions'; const INITIAL_STATE = { <|fim▁hole|> errorMessage: '' }; export default function (state = INITIAL_STATE, action) { switch (action.type) { case FETCH_AUTHOR.SUCCESS: // author -> { id, email, name, image, description, introduction } return { author: action.payload.author, errorMessage: '' }; case UPDATE_AUTHOR.SUCCESS: return { ...state, author: {}, errorMessage: '' }; case UPDATE_AUTHOR.FAILURE: return { ...state, errorMessage: action.payload.errorMessage }; default: return state; } }<|fim▁end|>
author: {},
<|file_name|>ewf_path_spec.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python # -*- coding: utf-8 -*- """Tests for the EWF image path specification implementation.""" import unittest from dfvfs.path import ewf_path_spec from tests.path import test_lib class EwfPathSpecTest(test_lib.PathSpecTestCase): """Tests for the EWF image path specification implementation.""" def testInitialize(self): """Tests the path specification initialization.""" path_spec = ewf_path_spec.EwfPathSpec(parent=self._path_spec) self.assertNotEqual(path_spec, None) with self.assertRaises(ValueError): _ = ewf_path_spec.EwfPathSpec(parent=None) with self.assertRaises(ValueError): _ = ewf_path_spec.EwfPathSpec(parent=self._path_spec, bogus=u'BOGUS') def testComparable(self): """Tests the path specification comparable property.""" path_spec = ewf_path_spec.EwfPathSpec(parent=self._path_spec)<|fim▁hole|> expected_comparable = u'\n'.join([ u'type: TEST', u'type: EWF', u'']) self.assertEqual(path_spec.comparable, expected_comparable) if __name__ == '__main__': unittest.main()<|fim▁end|>
self.assertNotEqual(path_spec, None)
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright © 2015, Peter Atashian // Licensed under the MIT License <LICENSE.md> //! FFI bindings to winsatapi.<|fim▁hole|>extern crate winapi; use winapi::*; extern "system" { }<|fim▁end|>
#![no_std] #![experimental]
<|file_name|>spamcan1.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python ############################################################################### # Copyright (C) 1994 - 2013, Performance Dynamics Company # # # # This software is licensed as described in the file COPYING, which # # you should have received as part of this distribution. The terms # # are also available at http://www.perfdynamics.com/Tools/copyright.html. # # # # You may opt to use, copy, modify, merge, publish, distribute and/or sell #<|fim▁hole|># KIND, either express or implied. # ############################################################################### # $Id: spamcan1.py,v 1.3 2012/11/13 03:12:04 earl-lang Exp $ # Created by NJG on Wed, Apr 18, 2007 # # Queueing model of an email-spam analyzer system comprising a # battery of SMP servers essentially running in batch mode. # Each node was a 4-way SMP server. # The performance metric of interest was the mean queue length. # # This simple M/M/4 model gave results that were in surprisingly # good agreement with monitored queue lengths. import pdq # Measured performance parameters cpusPerServer = 4 emailThruput = 2376 # emails per hour scannerTime = 6.0 # seconds per email pdq.Init("Spam Farm Model") # Timebase is SECONDS ... nstreams = pdq.CreateOpen("Email", float(emailThruput)/3600) nnodes = pdq.CreateNode("spamCan", int(cpusPerServer), pdq.MSQ) pdq.SetDemand("spamCan", "Email", scannerTime) pdq.Solve(pdq.CANON) pdq.Report()<|fim▁end|>
# copies of the Software, and permit persons to whom the Software is # # furnished to do so, under the terms of the COPYING file. # # # # This software is distributed on an "AS IS" basis, WITHOUT WARRANTY OF ANY #
<|file_name|>machine.rs<|end_file_name|><|fim▁begin|>// Copyright (C) 2014 The 6502-rs Developers // All rights reserved. // // Redistribution and use in source and binary forms, with or without // modification, are permitted provided that the following conditions // are met: // 1. Redistributions of source code must retain the above copyright // notice, this list of conditions and the following disclaimer. // 2. Redistributions in binary form must reproduce the above copyright // notice, this list of conditions and the following disclaimer in the // documentation and/or other materials provided with the distribution. // 3. Neither the names of the copyright holders nor the names of any // contributors may be used to endorse or promote products derived from this // software without specific prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" // AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE // IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE // ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE // LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR // CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF // SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN // CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) // ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE // POSSIBILITY OF SUCH DAMAGE. use std; use address::{Address, AddressDiff}; use instruction; use instruction::{DecodedInstr, Instruction, OpInput}; use memory::Memory; use range_incl::range_incl; use registers::{ Registers, StackPointer, Status, StatusArgs }; use registers::{ PS_NEGATIVE, PS_DECIMAL_MODE, PS_OVERFLOW, PS_ZERO, PS_CARRY, PS_DISABLE_INTERRUPTS }; #[derive(Copy)] pub struct Machine { pub registers: Registers, pub memory: Memory } impl Machine { pub fn new() -> Machine { Machine{ registers: Registers::new(), memory: Memory::new() } } pub fn reset(&mut self) { *self = Machine::new(); } pub fn fetch_next_and_decode(&mut self) -> Option<DecodedInstr> { let x: u8 = self.memory.get_byte(self.registers.program_counter); match instruction::OPCODES[x as usize] { Some((instr, am)) => { let extra_bytes = am.extra_bytes(); let num_bytes = AddressDiff(1) + extra_bytes; let data_start = self.registers.program_counter + AddressDiff(1); let slice = self.memory.get_slice(data_start, extra_bytes); let am_out = am.process(self, slice); // Increment program counter self.registers.program_counter = self.registers.program_counter + num_bytes; Some((instr, am_out)) } _ => None } } pub fn execute_instruction(&mut self, decoded_instr: DecodedInstr) { match decoded_instr { (Instruction::ADC, OpInput::UseImmediate(val)) => { debug!("add with carry immediate: {}", val); self.add_with_carry(val as i8); } (Instruction::ADC, OpInput::UseAddress(addr)) => { let val = self.memory.get_byte(addr) as i8; debug!("add with carry. address: {:?}. value: {}", addr, val); self.add_with_carry(val); } (Instruction::AND, OpInput::UseImmediate(val)) => { self.and(val as i8); } (Instruction::AND, OpInput::UseAddress(addr)) => { let val = self.memory.get_byte(addr) as i8; self.and(val as i8); } (Instruction::ASL, OpInput::UseImplied) => { // Accumulator mode let mut val = self.registers.accumulator as u8; Machine::shift_left_with_flags(&mut val, &mut self.registers.status); self.registers.accumulator = val as i8; } (Instruction::ASL, OpInput::UseAddress(addr)) => { Machine::shift_left_with_flags( self.memory.get_byte_mut_ref(addr), &mut self.registers.status); } (Instruction::BCC, OpInput::UseRelative(rel)) => { let addr = self.registers.program_counter + AddressDiff(rel as i32); self.branch_if_carry_clear(addr); } (Instruction::BCS, OpInput::UseRelative(rel)) => { let addr = self.registers.program_counter + AddressDiff(rel as i32); self.branch_if_carry_set(addr); } (Instruction::BEQ, OpInput::UseRelative(rel)) => { let addr = self.registers.program_counter + AddressDiff(rel as i32); self.branch_if_equal(addr); } (Instruction::BIT, OpInput::UseAddress(addr)) => { let a: u8 = self.registers.accumulator as u8; let m: u8 = self.memory.get_byte(addr); let res = a & m; // The zero flag is set based on the result of the 'and'. let is_zero = 0 == res; // The N flag is set to bit 7 of the byte from memory. let bit7 = 0 != (0x80 & res); // The V flag is set to bit 6 of the byte from memory. let bit6 = 0 != (0x40 & res); self.registers.status.set_with_mask( PS_ZERO | PS_NEGATIVE | PS_OVERFLOW, Status::new(StatusArgs { zero: is_zero, negative: bit7, overflow: bit6, ..StatusArgs::none() } )); } (Instruction::BMI, OpInput::UseRelative(rel)) => { let addr = self.registers.program_counter + AddressDiff(rel as i32); debug!("branch if minus relative. address: {:?}", addr); self.branch_if_minus(addr); } (Instruction::BPL, OpInput::UseRelative(rel)) => { let addr = self.registers.program_counter + AddressDiff(rel as i32); self.branch_if_positive(addr); } (Instruction::BVC, OpInput::UseRelative(rel)) => { let addr = self.registers.program_counter + AddressDiff(rel as i32); self.branch_if_overflow_clear(addr); } (Instruction::BVS, OpInput::UseRelative(rel)) => { let addr = self.registers.program_counter + AddressDiff(rel as i32); self.branch_if_overflow_set(addr); } (Instruction::CLC, OpInput::UseImplied) => { self.registers.status.and(!PS_CARRY); } (Instruction::CLD, OpInput::UseImplied) => { self.registers.status.and(!PS_DECIMAL_MODE); } (Instruction::CLI, OpInput::UseImplied) => { self.registers.status.and(!PS_DISABLE_INTERRUPTS); } (Instruction::CLV, OpInput::UseImplied) => { self.registers.status.and(!PS_OVERFLOW); } (Instruction::CMP, OpInput::UseImmediate(val)) => { self.compare_with_a_register(val); } (Instruction::CMP, OpInput::UseAddress(addr)) => { let val = self.memory.get_byte(addr); self.compare_with_a_register(val); } (Instruction::CPX, OpInput::UseImmediate(val)) => { self.compare_with_x_register(val); } (Instruction::CPX, OpInput::UseAddress(addr)) => { let val = self.memory.get_byte(addr); self.compare_with_x_register(val); } (Instruction::CPY, OpInput::UseImmediate(val)) => { self.compare_with_y_register(val); } (Instruction::CPY, OpInput::UseAddress(addr)) => { let val = self.memory.get_byte(addr); self.compare_with_y_register(val); } (Instruction::DEC, OpInput::UseAddress(addr)) => { self.decrement_memory(addr) } (Instruction::DEX, OpInput::UseImplied) => { self.dec_x(); } (Instruction::EOR, OpInput::UseImmediate(val)) => { self.exclusive_or(val); } (Instruction::EOR, OpInput::UseAddress(addr)) => { let val = self.memory.get_byte(addr); self.exclusive_or(val); } (Instruction::INC, OpInput::UseAddress(addr)) => { let m = self.memory.get_byte(addr); let m = m + 1; self.memory.set_byte(addr, m); let i = m as i8; Machine::set_flags_from_i8(&mut self.registers.status, i); } (Instruction::INX, OpInput::UseImplied) => { let x = self.registers.index_x + 1; self.load_x_register(x); } (Instruction::INY, OpInput::UseImplied) => { let y = self.registers.index_y + 1; self.load_y_register(y); } (Instruction::JMP, OpInput::UseAddress(addr)) => { self.jump(addr) } (Instruction::LDA, OpInput::UseImmediate(val)) => { debug!("load A immediate: {}", val); self.load_accumulator(val as i8); } (Instruction::LDA, OpInput::UseAddress(addr)) => { let val = self.memory.get_byte(addr); debug!("load A. address: {:?}. value: {}", addr, val); self.load_accumulator(val as i8); } (Instruction::LDX, OpInput::UseImmediate(val)) => { debug!("load X immediate: {}", val); self.load_x_register(val as i8); } (Instruction::LDX, OpInput::UseAddress(addr)) => { let val = self.memory.get_byte(addr); debug!("load X. address: {:?}. value: {}", addr, val); self.load_x_register(val as i8); } (Instruction::LDY, OpInput::UseImmediate(val)) => { debug!("load Y immediate: {}", val); self.load_y_register(val as i8); } (Instruction::LDY, OpInput::UseAddress(addr)) => { let val = self.memory.get_byte(addr); debug!("load Y. address: {:?}. value: {}", addr, val); self.load_y_register(val as i8); } (Instruction::LSR, OpInput::UseImplied) => { // Accumulator mode let mut val = self.registers.accumulator as u8; Machine::shift_right_with_flags(&mut val, &mut self.registers.status); self.registers.accumulator = val as i8; } (Instruction::LSR, OpInput::UseAddress(addr)) => { Machine::shift_right_with_flags( self.memory.get_byte_mut_ref(addr), &mut self.registers.status); } (Instruction::ORA, OpInput::UseImmediate(val)) => { self.inclusive_or(val); } (Instruction::ORA, OpInput::UseAddress(addr)) => { let val = self.memory.get_byte(addr); self.inclusive_or(val); } (Instruction::PHA, OpInput::UseImplied) => { // Push accumulator let val = self.registers.accumulator as u8; self.push_on_stack(val); } (Instruction::PHP, OpInput::UseImplied) => { // Push status let val = self.registers.status.bits(); self.push_on_stack(val); } (Instruction::PLA, OpInput::UseImplied) => { // Pull accumulator let val: u8 = self.pull_from_stack(); self.registers.accumulator = val as i8; } (Instruction::PLP, OpInput::UseImplied) => { // Pull status let val: u8 = self.pull_from_stack(); // The `truncate` here won't do anything because we have a // constant for the single unused flags bit. This probably // corresponds to the behavior of the 6502...? FIXME: verify self.registers.status = Status::from_bits_truncate(val); } (Instruction::ROL, OpInput::UseImplied) => { // Accumulator mode let mut val = self.registers.accumulator as u8; Machine::rotate_left_with_flags(&mut val, &mut self.registers.status); self.registers.accumulator = val as i8; } (Instruction::ROL, OpInput::UseAddress(addr)) => { Machine::rotate_left_with_flags( self.memory.get_byte_mut_ref(addr), &mut self.registers.status); } (Instruction::ROR, OpInput::UseImplied) => { // Accumulator mode let mut val = self.registers.accumulator as u8; Machine::rotate_right_with_flags(&mut val, &mut self.registers.status); self.registers.accumulator = val as i8; } (Instruction::ROR, OpInput::UseAddress(addr)) => { Machine::rotate_right_with_flags( self.memory.get_byte_mut_ref(addr), &mut self.registers.status); } (Instruction::SBC, OpInput::UseImmediate(val)) => { debug!("subtract with carry immediate: {}", val); self.subtract_with_carry(val as i8); } (Instruction::SBC, OpInput::UseAddress(addr)) => { let val = self.memory.get_byte(addr) as i8; debug!("subtract with carry. address: {:?}. value: {}", addr, val); self.subtract_with_carry(val); } (Instruction::SEC, OpInput::UseImplied) => { self.registers.status.or(PS_CARRY); } (Instruction::SED, OpInput::UseImplied) => { self.registers.status.or(PS_DECIMAL_MODE); } (Instruction::SEI, OpInput::UseImplied) => { self.registers.status.or(PS_DISABLE_INTERRUPTS); } (Instruction::STA, OpInput::UseAddress(addr)) => { self.memory.set_byte(addr, self.registers.accumulator as u8); } (Instruction::STX, OpInput::UseAddress(addr)) => { self.memory.set_byte(addr, self.registers.index_x as u8); } (Instruction::STY, OpInput::UseAddress(addr)) => { self.memory.set_byte(addr, self.registers.index_y as u8); } (Instruction::TAX, OpInput::UseImplied) => { let val = self.registers.accumulator; self.load_x_register(val); } (Instruction::TAY, OpInput::UseImplied) => { let val = self.registers.accumulator; self.load_y_register(val); } (Instruction::TSX, OpInput::UseImplied) => { let StackPointer(val) = self.registers.stack_pointer; let val = val as i8; self.load_x_register(val); } (Instruction::TXA, OpInput::UseImplied) => { let val = self.registers.index_x; self.load_accumulator(val); } (Instruction::TXS, OpInput::UseImplied) => { // Note that this is the only 'transfer' instruction that does // NOT set the zero and negative flags. (Because the target // is the stack pointer) let val = self.registers.index_x; self.registers.stack_pointer = StackPointer(val as u8); } (Instruction::TYA, OpInput::UseImplied) => { let val = self.registers.index_y; self.load_accumulator(val); } (Instruction::NOP, OpInput::UseImplied) => { debug!("NOP instruction"); } (_, _) => { debug!("attempting to execute unimplemented or invalid \ instruction"); } }; } pub fn run(&mut self) { loop { if let Some(decoded_instr) = self.fetch_next_and_decode() { self.execute_instruction(decoded_instr); } else { break } } } fn set_flags_from_i8(status: &mut Status, value: i8) { let is_zero = value == 0; let is_negative = value < 0; status.set_with_mask( PS_ZERO | PS_NEGATIVE, Status::new(StatusArgs { zero: is_zero, negative: is_negative, ..StatusArgs::none() } )); } fn shift_left_with_flags(p_val: &mut u8, status: &mut Status) { let mask = 1 << 7; let is_bit_7_set = (*p_val & mask) == mask; let shifted = (*p_val & !(1 << 7)) << 1; *p_val = shifted; status.set_with_mask( PS_CARRY, Status::new(StatusArgs { carry: is_bit_7_set, ..StatusArgs::none() } )); Machine::set_flags_from_i8(status, *p_val as i8); } fn shift_right_with_flags(p_val: &mut u8, status: &mut Status) { let mask = 1; let is_bit_0_set = (*p_val & mask) == mask; *p_val = *p_val >> 1; status.set_with_mask( PS_CARRY, Status::new(StatusArgs { carry: is_bit_0_set, ..StatusArgs::none() } )); Machine::set_flags_from_i8(status, *p_val as i8); } fn rotate_left_with_flags(p_val: &mut u8, status: &mut Status) { let is_carry_set = status.contains(PS_CARRY); let mask = 1 << 7; let is_bit_7_set = (*p_val & mask) == mask; let shifted = (*p_val & !(1 << 7)) << 1; *p_val = shifted + if is_carry_set { 1 } else { 0 }; status.set_with_mask( PS_CARRY, Status::new(StatusArgs { carry: is_bit_7_set, ..StatusArgs::none() } )); Machine::set_flags_from_i8(status, *p_val as i8); } fn rotate_right_with_flags(p_val: &mut u8, status: &mut Status) { let is_carry_set = status.contains(PS_CARRY); let mask = 1; let is_bit_0_set = (*p_val & mask) == mask; let shifted = *p_val >> 1; *p_val = shifted + if is_carry_set { 1 << 7 } else { 0 }; status.set_with_mask( PS_CARRY, Status::new(StatusArgs { carry: is_bit_0_set, ..StatusArgs::none() } )); Machine::set_flags_from_i8(status, *p_val as i8); } fn set_i8_with_flags(mem: &mut i8, status: &mut Status, value: i8) { *mem = value; Machine::set_flags_from_i8(status, value); } fn load_x_register(&mut self, value: i8) { Machine::set_i8_with_flags(&mut self.registers.index_x, &mut self.registers.status, value); } fn load_y_register(&mut self, value: i8) { Machine::set_i8_with_flags(&mut self.registers.index_y, &mut self.registers.status, value); } fn load_accumulator(&mut self, value: i8) { Machine::set_i8_with_flags(&mut self.registers.accumulator, &mut self.registers.status, value); } fn add_with_carry(&mut self, value: i8) { if self.registers.status.contains(PS_DECIMAL_MODE) { // TODO akeeton: Implement binary-coded decimal. debug!("binary-coded decimal not implemented for add_with_carry"); } else { let a_before: i8 = self.registers.accumulator; let c_before: i8 = if self.registers.status.contains(PS_CARRY) { 1 } else { 0 }; let a_after: i8 = a_before + c_before + value; debug_assert_eq!(a_after as u8, a_before as u8 + c_before as u8 + value as u8); let did_carry = (a_after as u8) < (a_before as u8); let did_overflow = (a_before < 0 && value < 0 && a_after >= 0) || (a_before > 0 && value > 0 && a_after <= 0); let mask = PS_CARRY | PS_OVERFLOW; self.registers.status.set_with_mask(mask, Status::new(StatusArgs { carry: did_carry, overflow: did_overflow, ..StatusArgs::none() } )); self.load_accumulator(a_after); debug!("accumulator: {}", self.registers.accumulator); } } fn and(&mut self, value: i8) { let a_after = self.registers.accumulator & value; self.load_accumulator(a_after); } // TODO: Implement binary-coded decimal fn subtract_with_carry(&mut self, value: i8) { if self.registers.status.contains(PS_DECIMAL_MODE) { debug!("binary-coded decimal not implemented for \ subtract_with_carry"); } else { // A - M - (1 - C) // nc -- 'not carry' let nc: i8 = if self.registers.status.contains(PS_CARRY) { 0 } else { 1 }; let a_before: i8 = self.registers.accumulator; let a_after = a_before - value - nc; // The carry flag is set on unsigned overflow. let did_carry = (a_after as u8) > (a_before as u8); // The overflow flag is set on two's-complement overflow. // // range of A is -128 to 127 // range of - M - (1 - C) is -128 to 128 // -(127 + 1) to -(-128 + 0) // let over = ((nc == 0 && value < 0) || (nc == 1 && value < -1)) && a_before >= 0 && a_after < 0; let under = (a_before < 0) && (-value - nc < 0) && a_after >= 0; let did_overflow = over || under; let mask = PS_CARRY | PS_OVERFLOW; self.registers.status.set_with_mask(mask, Status::new(StatusArgs { carry: did_carry, overflow: did_overflow, ..StatusArgs::none() } )); self.load_accumulator(a_after); } } fn decrement_memory(&mut self, addr: Address) { let value_new = self.memory.get_byte(addr) - 1; self.memory.set_byte(addr, value_new); let is_negative = (value_new as i8) < 0; let is_zero = value_new == 0; self.registers.status.set_with_mask( PS_NEGATIVE | PS_ZERO, Status::new(StatusArgs { negative: is_negative, zero: is_zero, ..StatusArgs::none() } )); } fn dec_x(&mut self) { let val = self.registers.index_x; self.load_x_register(val - 1); } fn jump(&mut self, addr: Address) { self.registers.program_counter = addr; } fn branch_if_carry_clear(&mut self, addr: Address) { if !self.registers.status.contains(PS_CARRY) { self.registers.program_counter = addr; } } fn branch_if_carry_set(&mut self, addr: Address) { if self.registers.status.contains(PS_CARRY) { self.registers.program_counter = addr; } } fn branch_if_equal(&mut self, addr: Address) { if self.registers.status.contains(PS_ZERO) { self.registers.program_counter = addr; } } fn branch_if_minus(&mut self, addr: Address) { if self.registers.status.contains(PS_NEGATIVE) { self.registers.program_counter = addr; } } fn branch_if_positive(&mut self, addr: Address) { if !self.registers.status.contains(PS_NEGATIVE) { self.registers.program_counter = addr; } } fn branch_if_overflow_clear(&mut self, addr: Address) { if !self.registers.status.contains(PS_OVERFLOW) { self.registers.program_counter = addr; } } fn branch_if_overflow_set(&mut self, addr: Address) { if self.registers.status.contains(PS_OVERFLOW) { self.registers.program_counter = addr; } } // From http://www.6502.org/tutorials/compare_beyond.html: // If the Z flag is 0, then A <> NUM and BNE will branch // If the Z flag is 1, then A = NUM and BEQ will branch // If the C flag is 0, then A (unsigned) < NUM (unsigned) and BCC will branch // If the C flag is 1, then A (unsigned) >= NUM (unsigned) and BCS will branch // ... // The N flag contains most significant bit of the of the subtraction result. fn compare(&mut self, r: i8, val: u8) { if r as u8 >= val as u8 { self.registers.status.insert(PS_CARRY); } else { self.registers.status.remove(PS_CARRY); } if r as i8 == val as i8 { self.registers.status.insert(PS_ZERO); } else { self.registers.status.remove(PS_ZERO); } let diff: i8 = (r as i8) - (val as i8); if diff < 0 { self.registers.status.insert(PS_NEGATIVE); } else { self.registers.status.remove(PS_NEGATIVE); } } fn compare_with_a_register(&mut self, val: u8) { let a = self.registers.accumulator; self.compare(a, val); } fn compare_with_x_register(&mut self, val: u8) { debug!("compare_with_x_register"); let x = self.registers.index_x; self.compare(x, val); } fn compare_with_y_register(&mut self, val: u8) { let y = self.registers.index_y; self.compare(y, val); } fn exclusive_or(&mut self, val: u8) { let a_after = self.registers.accumulator ^ (val as i8); self.load_accumulator(a_after); } fn inclusive_or(&mut self, val: u8) { let a_after = self.registers.accumulator | (val as i8); self.load_accumulator(a_after); } fn push_on_stack(&mut self, val: u8) { let addr = self.registers.stack_pointer.to_address(); self.memory.set_byte(addr, val); self.registers.stack_pointer.decrement(); } fn pull_from_stack(&mut self) -> u8 { let addr = self.registers.stack_pointer.to_address(); let out = self.memory.get_byte(addr); self.registers.stack_pointer.increment(); out } } impl std::fmt::Debug for Machine { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { write!(f, "Machine Dump:\n\nAccumulator: {}", self.registers.accumulator) } } #[test] fn add_with_carry_test() { let mut machine = Machine::new(); machine.add_with_carry(1); assert_eq!(machine.registers.accumulator, 1); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.add_with_carry(-1); assert_eq!(machine.registers.accumulator, 0); assert_eq!(machine.registers.status.contains(PS_CARRY), true); assert_eq!(machine.registers.status.contains(PS_ZERO), true); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.add_with_carry(1); assert_eq!(machine.registers.accumulator, 2); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); let mut machine = Machine::new(); machine.add_with_carry(127); assert_eq!(machine.registers.accumulator, 127); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.add_with_carry(-127); assert_eq!(machine.registers.accumulator, 0); assert_eq!(machine.registers.status.contains(PS_CARRY), true); assert_eq!(machine.registers.status.contains(PS_ZERO), true); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.registers.status.remove(PS_CARRY); machine.add_with_carry(-128); assert_eq!(machine.registers.accumulator, -128); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.add_with_carry(127); assert_eq!(machine.registers.accumulator, -1); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); let mut machine = Machine::new(); machine.add_with_carry(127); assert_eq!(machine.registers.accumulator, 127); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.add_with_carry(1); assert_eq!(machine.registers.accumulator, -128); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), true); } #[test] fn and_test() { let mut machine = Machine::new(); machine.registers.accumulator = 0; machine.and(-1); assert_eq!(machine.registers.accumulator, 0); assert_eq!(machine.registers.status.contains(PS_ZERO), true); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); machine.registers.accumulator = -1; machine.and(0); assert_eq!(machine.registers.accumulator, 0); assert_eq!(machine.registers.status.contains(PS_ZERO), true); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); machine.registers.accumulator = -1; machine.and(0x0f); assert_eq!(machine.registers.accumulator, 0x0f); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); machine.registers.accumulator = -1; machine.and(-128); assert_eq!(machine.registers.accumulator, -128); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); } #[test] fn subtract_with_carry_test() { let mut machine = Machine::new(); machine.execute_instruction((Instruction::SEC, OpInput::UseImplied)); machine.registers.accumulator = 0; machine.subtract_with_carry(1); assert_eq!(machine.registers.accumulator, -1); assert_eq!(machine.registers.status.contains(PS_CARRY), true); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.execute_instruction((Instruction::SEC, OpInput::UseImplied)); machine.registers.accumulator = -128; machine.subtract_with_carry(1); assert_eq!(machine.registers.accumulator, 127); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), true); machine.execute_instruction((Instruction::SEC, OpInput::UseImplied)); machine.registers.accumulator = 127; machine.subtract_with_carry(-1); assert_eq!(machine.registers.accumulator, -128); assert_eq!(machine.registers.status.contains(PS_CARRY), true); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), true); machine.execute_instruction((Instruction::CLC, OpInput::UseImplied)); machine.registers.accumulator = -64; machine.subtract_with_carry(64); assert_eq!(machine.registers.accumulator, 127); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), true); machine.execute_instruction((Instruction::SEC, OpInput::UseImplied)); machine.registers.accumulator = 0; machine.subtract_with_carry(-128); assert_eq!(machine.registers.accumulator, -128); assert_eq!(machine.registers.status.contains(PS_CARRY), true); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), true); machine.execute_instruction((Instruction::CLC, OpInput::UseImplied)); machine.registers.accumulator = 0; machine.subtract_with_carry(127); assert_eq!(machine.registers.accumulator, -128); assert_eq!(machine.registers.status.contains(PS_CARRY), true); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); } #[test] fn decrement_memory_test() { let mut machine = Machine::new(); let addr = Address(0xA1B2); machine.memory.set_byte(addr, 5); machine.decrement_memory(addr); assert_eq!(machine.memory.get_byte(addr), 4); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); machine.decrement_memory(addr); assert_eq!(machine.memory.get_byte(addr), 3); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); machine.decrement_memory(addr); machine.decrement_memory(addr); machine.decrement_memory(addr); assert_eq!(machine.memory.get_byte(addr), 0); assert_eq!(machine.registers.status.contains(PS_ZERO), true); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); machine.decrement_memory(addr); assert_eq!(machine.memory.get_byte(addr) as i8, -1); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); } #[test] fn logical_shift_right_test() { // Testing UseImplied version (which targets the accumulator) only, for now let mut machine = Machine::new(); machine.execute_instruction((Instruction::LDA, OpInput::UseImmediate(0))); machine.execute_instruction((Instruction::LSR, OpInput::UseImplied)); assert_eq!(machine.registers.accumulator, 0); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), true); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.execute_instruction((Instruction::LDA, OpInput::UseImmediate(1))); machine.execute_instruction((Instruction::LSR, OpInput::UseImplied)); assert_eq!(machine.registers.accumulator, 0); assert_eq!(machine.registers.status.contains(PS_CARRY), true); assert_eq!(machine.registers.status.contains(PS_ZERO), true); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.execute_instruction((Instruction::LDA, OpInput::UseImmediate(255))); machine.execute_instruction((Instruction::LSR, OpInput::UseImplied)); assert_eq!(machine.registers.accumulator, 0x7F); assert_eq!(machine.registers.status.contains(PS_CARRY), true); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.execute_instruction((Instruction::LDA, OpInput::UseImmediate(254))); machine.execute_instruction((Instruction::LSR, OpInput::UseImplied)); assert_eq!(machine.registers.accumulator, 0x7F); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); } #[test] fn dec_x_test() { let mut machine = Machine::new(); machine.dec_x(); assert_eq!(machine.registers.index_x, -1); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.dec_x(); assert_eq!(machine.registers.index_x, -2); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.load_x_register(5); machine.dec_x(); assert_eq!(machine.registers.index_x, 4); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.dec_x(); machine.dec_x(); machine.dec_x(); machine.dec_x(); assert_eq!(machine.registers.index_x, 0); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), true); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), false); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); machine.dec_x(); assert_eq!(machine.registers.index_x, -1); assert_eq!(machine.registers.status.contains(PS_CARRY), false); assert_eq!(machine.registers.status.contains(PS_ZERO), false); assert_eq!(machine.registers.status.contains(PS_NEGATIVE), true); assert_eq!(machine.registers.status.contains(PS_OVERFLOW), false); } #[test] fn jump_test() { let mut machine = Machine::new(); let addr = Address(0xA1B1); machine.jump(addr); assert_eq!(machine.registers.program_counter, addr); } #[test] fn branch_if_carry_clear_test() { let mut machine = Machine::new(); machine.execute_instruction((Instruction::SEC, OpInput::UseImplied)); machine.branch_if_carry_clear(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0)); machine.execute_instruction((Instruction::CLC, OpInput::UseImplied)); machine.branch_if_carry_clear(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0xABCD)); } #[test] fn branch_if_carry_set_test() { let mut machine = Machine::new(); machine.execute_instruction((Instruction::CLC, OpInput::UseImplied)); machine.branch_if_carry_set(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0)); machine.execute_instruction((Instruction::SEC, OpInput::UseImplied)); machine.branch_if_carry_set(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0xABCD)); } #[test] fn branch_if_equal_test() { let mut machine = Machine::new(); machine.branch_if_equal(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0)); machine.registers.status.or(PS_ZERO); machine.branch_if_equal(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0xABCD)); } #[test] fn branch_if_minus_test() { { let mut machine = Machine::new(); let registers_before = machine.registers; machine.branch_if_minus(Address(0xABCD)); assert_eq!(machine.registers, registers_before); assert_eq!(machine.registers.program_counter, Address(0)); } { let mut machine = Machine::new(); machine.registers.status.or(PS_NEGATIVE); let registers_before = machine.registers; machine.branch_if_minus(Address(0xABCD)); assert_eq!(machine.registers.status, registers_before.status); assert_eq!(machine.registers.program_counter, Address(0xABCD)); } } #[test] fn branch_if_positive_test() { let mut machine = Machine::new(); machine.registers.status.insert(PS_NEGATIVE); machine.branch_if_positive(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0)); machine.registers.status.remove(PS_NEGATIVE); machine.branch_if_positive(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0xABCD)); } #[test] fn branch_if_overflow_clear_test() { let mut machine = Machine::new(); machine.registers.status.insert(PS_OVERFLOW); machine.branch_if_overflow_clear(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0)); machine.registers.status.remove(PS_OVERFLOW); machine.branch_if_overflow_clear(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0xABCD)); } #[test] fn branch_if_overflow_set_test() { let mut machine = Machine::new(); machine.branch_if_overflow_set(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0)); machine.registers.status.insert(PS_OVERFLOW); machine.branch_if_overflow_set(Address(0xABCD)); assert_eq!(machine.registers.program_counter, Address(0xABCD)); } #[cfg(test)] fn compare_test_helper<F> ( compare: &mut F, load_instruction: Instruction ) where F: FnMut(&mut Machine, u8) { let mut machine = Machine::new(); machine.execute_instruction( (load_instruction, OpInput::UseImmediate(127)) ); compare(&mut machine, 127); assert!( machine.registers.status.contains(PS_ZERO )); assert!( machine.registers.status.contains(PS_CARRY )); assert!(!machine.registers.status.contains(PS_NEGATIVE)); machine.execute_instruction( (load_instruction, OpInput::UseImmediate(127)) ); compare(&mut machine, 1); assert!(!machine.registers.status.contains(PS_ZERO )); assert!( machine.registers.status.contains(PS_CARRY )); assert!(!machine.registers.status.contains(PS_NEGATIVE)); machine.execute_instruction( (load_instruction, OpInput::UseImmediate(1)) ); compare(&mut machine, 2); assert!(!machine.registers.status.contains(PS_ZERO )); assert!(!machine.registers.status.contains(PS_CARRY )); assert!( machine.registers.status.contains(PS_NEGATIVE)); machine.execute_instruction( (load_instruction, OpInput::UseImmediate(20)) ); compare(&mut machine, -50); assert!(!machine.registers.status.contains(PS_ZERO )); assert!(!machine.registers.status.contains(PS_CARRY )); assert!(!machine.registers.status.contains(PS_NEGATIVE)); machine.execute_instruction( (load_instruction, OpInput::UseImmediate(1)) ); compare(&mut machine, -1); assert!(!machine.registers.status.contains(PS_ZERO )); assert!(!machine.registers.status.contains(PS_CARRY )); assert!(!machine.registers.status.contains(PS_NEGATIVE)); machine.execute_instruction( (load_instruction, OpInput::UseImmediate(127)) ); compare(&mut machine, -128); assert!(!machine.registers.status.contains(PS_ZERO )); assert!(!machine.registers.status.contains(PS_CARRY )); assert!( machine.registers.status.contains(PS_NEGATIVE)); } #[test] fn compare_with_a_register_test() { compare_test_helper( &mut |machine: &mut Machine, val: u8| { machine.compare_with_a_register(val); }, Instruction::LDA ); } #[test] fn compare_with_x_register_test() { compare_test_helper( &mut |machine: &mut Machine, val: u8| { machine.compare_with_x_register(val); }, Instruction::LDX ); } #[test] fn compare_with_y_register_test() { compare_test_helper( &mut |machine: &mut Machine, val: u8| { machine.compare_with_y_register(val); }, Instruction::LDY ); } #[test] fn exclusive_or_test() { let mut machine = Machine::new(); for a_before in range_incl(0u8, 255u8) { for val in range_incl(0u8, 255u8) { machine.execute_instruction( (Instruction::LDA, OpInput::UseImmediate(a_before)) ); machine.exclusive_or(val); let a_after = a_before ^ val; assert_eq!(machine.registers.accumulator, a_after as i8); if a_after == 0 { assert!(machine.registers.status.contains(PS_ZERO)); } else { assert!(!machine.registers.status.contains(PS_ZERO)); } if (a_after as i8) < 0 { assert!(machine.registers.status.contains(PS_NEGATIVE)); } else { assert!(!machine.registers.status.contains(PS_NEGATIVE)); } } }<|fim▁hole|>#[test] fn inclusive_or_test() { let mut machine = Machine::new(); for a_before in range_incl(0u8, 255u8) { for val in range_incl(0u8, 255u8) { machine.execute_instruction( (Instruction::LDA, OpInput::UseImmediate(a_before)) ); machine.inclusive_or(val); let a_after = a_before | val; assert_eq!(machine.registers.accumulator, a_after as i8); if a_after == 0 { assert!(machine.registers.status.contains(PS_ZERO)); } else { assert!(!machine.registers.status.contains(PS_ZERO)); } if (a_after as i8) < 0 { assert!(machine.registers.status.contains(PS_NEGATIVE)); } else { assert!(!machine.registers.status.contains(PS_NEGATIVE)); } } } }<|fim▁end|>
}
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># stdlib from collections import defaultdict import sys from typing import Any as TypeAny from typing import Callable from typing import Dict from typing import KeysView from typing import List as TypeList from typing import Set # third party from cachetools import cached from cachetools.keys import hashkey # relative from ...ast import add_classes from ...ast import add_methods from ...ast import add_modules from ...ast import globals from ...logger import traceback_and_raise from .union import lazy_pairing def get_cache() -> Dict: return dict() @cached(cache=get_cache(), key=lambda path, lib_ast: hashkey(path)) def solve_ast_type_functions(path: str, lib_ast: globals.Globals) -> KeysView: root = lib_ast for path_element in path.split("."): root = getattr(root, path_element) return root.attrs.keys() def get_allowed_functions( lib_ast: globals.Globals, union_types: TypeList[str] ) -> Dict[str, bool]: """ This function generates a set of functions that can go into a union type. A function has to meet the following requirements to be present on a union type: 1. If it's present on all Class attributes associated with the union types on the ast, add it. 2. If it's not present on all Class attributes associated with the union types, check if they exist on the original type functions list. If they do exist, drop it, if not, add it. Args: lib_ast (Globals): the AST on which we want to generate the union pointer. union_types (List[str]): the qualnames of the types on which we want a union. Returns: allowed_functions (dict): The keys of the dict are function names (str) and the values are Bool (if they are allowed or not). """ allowed_functions: Dict[str, bool] = defaultdict(lambda: True) def solve_real_type_functions(path: str) -> Set[str]: parts = path.split(".") klass_name = parts[-1] # TODO: a better way. Loot at https://github.com/OpenMined/PySyft/issues/5249 # A way to walkaround the problem we can't `import torch.return_types` and # get it from `sys.modules`. if parts[-2] == "return_types": modu = getattr(sys.modules["torch"], "return_types") else: modu = sys.modules[".".join(parts[:-1])] return set(dir(getattr(modu, klass_name))) for union_type in union_types: real_type_function_set = solve_real_type_functions(union_type) ast_type_function_set = solve_ast_type_functions(union_type, lib_ast) rejected_function_set = real_type_function_set - ast_type_function_set for accepted_function in ast_type_function_set: allowed_functions[accepted_function] &= True for rejected_function in rejected_function_set:<|fim▁hole|> return allowed_functions def create_union_ast( lib_ast: globals.Globals, client: TypeAny = None ) -> globals.Globals: ast = globals.Globals(client) modules = ["syft", "syft.lib", "syft.lib.misc", "syft.lib.misc.union"] classes = [] methods = [] for klass in lazy_pairing.keys(): classes.append( ( f"syft.lib.misc.union.{klass.__name__}", f"syft.lib.misc.union.{klass.__name__}", klass, ) ) union_types = lazy_pairing[klass] allowed_functions = get_allowed_functions(lib_ast, union_types) for target_method, allowed in allowed_functions.items(): if not allowed: continue def generate_func(target_method: str) -> Callable: def func(self: TypeAny, *args: TypeAny, **kwargs: TypeAny) -> TypeAny: func = getattr(self, target_method, None) if func: return func(*args, **kwargs) else: traceback_and_raise( ValueError( f"Can't call {target_method} on {klass} with the instance type of {type(self)}" ) ) return func def generate_attribute(target_attribute: str) -> TypeAny: def prop_get(self: TypeAny) -> TypeAny: prop = getattr(self, target_attribute, None) if prop is not None: return prop else: ValueError( f"Can't call {target_attribute} on {klass} with the instance type of {type(self)}" ) def prop_set(self: TypeAny, value: TypeAny) -> TypeAny: setattr(self, target_attribute, value) return property(prop_get, prop_set) # TODO: Support dynamic properties for types in AST # torch.Tensor.grad and torch.Tensor.data are not in the class # Issue: https://github.com/OpenMined/PySyft/issues/5338 if target_method == "grad" and "Tensor" in klass.__name__: setattr(klass, target_method, generate_attribute(target_method)) methods.append( ( f"syft.lib.misc.union.{klass.__name__}.{target_method}", "torch.Tensor", ) ) continue elif target_method == "data" and "Tensor" in klass.__name__: setattr(klass, target_method, generate_attribute(target_method)) else: setattr(klass, target_method, generate_func(target_method)) methods.append( ( f"syft.lib.misc.union.{klass.__name__}.{target_method}", "syft.lib.python.Any", ) ) add_modules(ast, modules) add_classes(ast, classes) add_methods(ast, methods) for ast_klass in ast.classes: ast_klass.create_pointer_class() ast_klass.create_send_method() ast_klass.create_storable_object_attr_convenience_methods() return ast<|fim▁end|>
allowed_functions[rejected_function] = False
<|file_name|>stand_alone.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python """ Standaone Rule ============== This is a customer spec, parser and rule and can be run against the local host using the following command:: $ insights-run -p examples.rules.stand_alone or from the examples/rules directory:: $ ./stand_alone.py """ from __future__ import print_function from collections import namedtuple from insights import get_active_lines, parser, Parser from insights import make_fail, make_pass, rule, run from insights.core.spec_factory import SpecSet, simple_file from insights.parsers.redhat_release import RedhatRelease # Error key used in make_fail ERROR_KEY = "TOO_MANY_HOSTS" # jinga2 template displayed for rule responses CONTENT = { make_fail: """Too many hosts in /etc/hosts: {{num}}""", make_pass: """Just right""" } class Specs(SpecSet): """ Datasources for collection from local host """ hosts = simple_file("/etc/hosts") @parser(Specs.hosts) class HostParser(Parser): """ Parses the results of the ``hosts`` Specs Attributes: hosts (list): List of the namedtuple Host which are the contents of the hosts file including ``.ip``, ``.host``, and ``.aliases``. """ Host = namedtuple("Host", ["ip", "host", "aliases"]) def parse_content(self, content): """ Method to parse the contents of file ``/etc/hosts`` This method must be implemented by each parser. Arguments: content (list): List of strings that are the contents of the /etc/hosts file. """ self.hosts = [] for line in get_active_lines(content): # remove inline comments line = line.partition("#")[0].strip() # break the line into parts parts = line.split() ip, host = parts[:2] aliases = parts[2:] self.hosts.append(HostParser.Host(ip, host, aliases)) def __repr__(self): """ str: Returns string representation of the class """ me = self.__class__.__name__ msg = "%s([" + ", ".join([str(d) for d in self.hosts]) + "])" return msg % me @rule(HostParser, RedhatRelease, content=CONTENT) def report(hp, rhr): """ Rule reports a response if there is more than 1 host entry defined in the /etc/hosts file. Arguments: hp (HostParser): Parser object for the custom parser in this module. rhr (RedhatRelease): Parser object for the /etc/redhat-release file. """ if len(hp.hosts) > 1: return make_fail("TOO_MANY_HOSTS", num=len(hp.hosts)) return make_pass("TOO_MANY_HOSTS", num=len(hp.hosts)) <|fim▁hole|><|fim▁end|>
if __name__ == "__main__": run(report, print_summary=True)
<|file_name|>EventDirective.js<|end_file_name|><|fim▁begin|>import { removeFromArray } from '../../../utils/array'; import fireEvent from '../../../events/fireEvent'; import Fragment from '../../Fragment'; import createFunction from '../../../shared/createFunction'; import { unbind } from '../../../shared/methodCallers'; import noop from '../../../utils/noop'; import resolveReference from '../../resolvers/resolveReference'; const eventPattern = /^event(?:\.(.+))?$/; const argumentsPattern = /^arguments\.(\d*)$/; const dollarArgsPattern = /^\$(\d*)$/; export default class EventDirective { constructor ( owner, event, template ) { this.owner = owner; this.event = event; this.template = template; this.ractive = owner.parentFragment.ractive; this.parentFragment = owner.parentFragment; this.context = null; this.passthru = false; // method calls this.method = null; this.resolvers = null; this.models = null; this.argsFn = null; // handler directive this.action = null; this.args = null; } bind () { this.context = this.parentFragment.findContext(); const template = this.template; if ( template.m ) { this.method = template.m; if ( this.passthru = template.g ) { // on-click="foo(...arguments)" // no models or args, just pass thru values } else { this.resolvers = []; this.models = template.a.r.map( ( ref, i ) => { if ( eventPattern.test( ref ) ) { // on-click="foo(event.node)" return { event: true, keys: ref.length > 5 ? ref.slice( 6 ).split( '.' ) : [], unbind: noop }; } const argMatch = argumentsPattern.exec( ref ); if ( argMatch ) { // on-click="foo(arguments[0])" return { argument: true, index: argMatch[1] }; } const dollarMatch = dollarArgsPattern.exec( ref ); if ( dollarMatch ) { // on-click="foo($1)" return { argument: true, index: dollarMatch[1] - 1 }; } let resolver; const model = resolveReference( this.parentFragment, ref ); if ( !model ) { resolver = this.parentFragment.resolve( ref, model => { this.models[i] = model; removeFromArray( this.resolvers, resolver ); }); this.resolvers.push( resolver ); } return model; }); this.argsFn = createFunction( template.a.s, template.a.r.length ); } } else { // TODO deprecate this style of directive this.action = typeof template === 'string' ? // on-click='foo' template : typeof template.n === 'string' ? // on-click='{{dynamic}}' template.n : new Fragment({ owner: this, template: template.n }); this.args = template.a ? // static arguments ( typeof template.a === 'string' ? [ template.a ] : template.a ) : template.d ? // dynamic arguments new Fragment({ owner: this, template: template.d }) : []; // no arguments } if ( this.template.n && typeof this.template.n !== 'string' ) this.action.bind(); if ( this.template.d ) this.args.bind(); } bubble () { if ( !this.dirty ) { this.dirty = true; this.owner.bubble();<|fim▁hole|> } } fire ( event, passedArgs ) { // augment event object if ( event ) { event.keypath = this.context.getKeypath(); event.context = this.context.get(); event.index = this.parentFragment.indexRefs; if ( passedArgs ) passedArgs.unshift( event ); } if ( this.method ) { if ( typeof this.ractive[ this.method ] !== 'function' ) { throw new Error( `Attempted to call a non-existent method ("${this.method}")` ); } let args; if ( this.passthru ) { args = passedArgs; } else { const values = this.models.map( model => { if ( !model ) return undefined; if ( model.event ) { let obj = event; let keys = model.keys.slice(); while ( keys.length ) obj = obj[ keys.shift() ]; return obj; } if ( model.argument ) { return passedArgs ? passedArgs[ model.index ] : void 0; } if ( model.wrapper ) { return model.wrapper.value; } return model.get(); }); args = this.argsFn.apply( null, values ); } // make event available as `this.event` const ractive = this.ractive; const oldEvent = ractive.event; ractive.event = event; ractive[ this.method ].apply( ractive, args ); ractive.event = oldEvent; } else { const action = this.action.toString(); let args = this.template.d ? this.args.getArgsList() : this.args; if ( event ) event.name = action; fireEvent( this.ractive, action, { event, args }); } } rebind () { throw new Error( 'EventDirective$rebind not yet implemented!' ); // TODO add tests } render () { this.event.listen( this ); } unbind () { const template = this.template; if ( template.m ) { this.resolvers.forEach( unbind ); this.resolvers = []; this.models.forEach( model => { if ( model ) model.unbind(); }); } else { // TODO this is brittle and non-explicit, fix it if ( this.action.unbind ) this.action.unbind(); if ( this.args.unbind ) this.args.unbind(); } } unrender () { this.event.unlisten(); } update () { if ( this.method ) return; // nothing to do // ugh legacy if ( this.action.update ) this.action.update(); if ( this.template.d ) this.args.update(); this.dirty = false; } }<|fim▁end|>
<|file_name|>models.py<|end_file_name|><|fim▁begin|>"""Django models for MK8 Kart Comparison Tool.""" from django.core.exceptions import ObjectDoesNotExist from django.db import models from django.templatetags.static import static import re import uuid import logging from ipware.ip import get_ip, get_real_ip logger = logging.getLogger(__name__) class KartComponent(models.Model): """Abstract model for all kart components.""" name = models.CharField(max_length=30, blank=True) def __unicode__(self): """Return the component name.""" return self.name def file(self): """Return a lowercase form of the name used for image filenames.""" return re.sub(ur'[\W_]+', u'', self.name.lower(), flags=re.UNICODE) class Meta: abstract = True ordering = ['pk'] class CommonStats(KartComponent): """Common stats across all kart components.""" speed_ground = models.DecimalField(max_digits=3, decimal_places=2) speed_water = models.DecimalField(max_digits=3, decimal_places=2) speed_air = models.DecimalField(max_digits=3, decimal_places=2) speed_antigravity = models.DecimalField(max_digits=3, decimal_places=2) acceleration = models.DecimalField(max_digits=3, decimal_places=2) weight = models.DecimalField(max_digits=3, decimal_places=2) handling_ground = models.DecimalField(max_digits=3, decimal_places=2) handling_water = models.DecimalField(max_digits=3, decimal_places=2) handling_air = models.DecimalField(max_digits=3, decimal_places=2) handling_antigravity = models.DecimalField(max_digits=3, decimal_places=2) traction = models.DecimalField(max_digits=3, decimal_places=2) miniturbo = models.DecimalField(max_digits=3, decimal_places=2) class Meta: abstract = True class CharacterStats(CommonStats): """Maps racers to the stats belonging to the 9 weight subclasses.""" sort_order = models.CharField(max_length=5) class Kart(CommonStats): """Stats for a kart body.""" def file(self): """Return a lowercase form of the name used for image filenames.""" return static('images/mk8/karts/%s.png' % super(Kart, self).file()) class Meta: verbose_name_plural = "karts" ordering = ['pk'] class Wheel(CommonStats): """Stats for a set of kart wheels.""" def file(self): """Return a lowercase form of the name used for image filenames.""" return static('images/mk8/wheels/%s.png' % super(Wheel, self).file()) class Meta: ordering = ['pk'] class Glider(CommonStats): """Stats for a kart glider.""" def file(self): """Return a lowercase form of the name used for image filenames.""" return static('images/mk8/gliders/%s.png' % super(Glider, self).file()) class Meta: ordering = ['pk'] class Character(KartComponent): """Stats for a kart racer/driver.""" stats = models.ForeignKey(CharacterStats) def file(self): """Return a lowercase form of the name used for image filenames.""" return static('images/mk8/faces/%s.png' % super(Character, self).file()) class Meta: ordering = ['pk'] class KartConfig(): """Stats for a complete kart configuration.""" <|fim▁hole|> """Create a config with the supplied component ids.""" try: self.character = Character.objects.get(pk=character_id) self.kart = Kart.objects.get(pk=kart_id) self.wheel = Wheel.objects.get(pk=wheel_id) self.glider = Glider.objects.get(pk=glider_id) self.valid = True self.speed_ground = \ self.character.stats.speed_ground + \ self.kart.speed_ground + \ self.wheel.speed_ground + \ self.glider.speed_ground self.speed_water = \ self.character.stats.speed_water + \ self.kart.speed_water + \ self.wheel.speed_water + \ self.glider.speed_water self.speed_air = \ self.character.stats.speed_air + \ self.kart.speed_air + \ self.wheel.speed_air + \ self.glider.speed_air self.speed_antigravity = \ self.character.stats.speed_antigravity + \ self.kart.speed_antigravity + \ self.wheel.speed_antigravity + \ self.glider.speed_antigravity self.acceleration = \ self.character.stats.acceleration + \ self.kart.acceleration + \ self.wheel.acceleration + \ self.glider.acceleration self.weight = \ self.character.stats.weight + \ self.kart.weight + \ self.wheel.weight + \ self.glider.weight self.handling_ground = \ self.character.stats.handling_ground + \ self.kart.handling_ground + \ self.wheel.handling_ground + \ self.glider.handling_ground self.handling_water = \ self.character.stats.handling_water + \ self.kart.handling_water + \ self.wheel.handling_water + \ self.glider.handling_water self.handling_air = \ self.character.stats.handling_air + \ self.kart.handling_air + \ self.wheel.handling_air + \ self.glider.handling_air self.handling_antigravity = \ self.character.stats.handling_antigravity + \ self.kart.handling_antigravity + \ self.wheel.handling_antigravity + \ self.glider.handling_antigravity self.traction = \ self.character.stats.traction + \ self.kart.traction + \ self.wheel.traction + \ self.glider.traction self.miniturbo = \ self.character.stats.miniturbo + \ self.kart.miniturbo + \ self.wheel.miniturbo + \ self.glider.miniturbo except ObjectDoesNotExist: self.valid = False class ConfigList(models.Model): """A saved kart configuration list associated with a url hash.""" URL_LENGTH = 5 url = models.CharField(max_length=URL_LENGTH) create_ip = models.GenericIPAddressField(default='0.0.0.0') create_date = models.DateTimeField(auto_now_add=True) view_count = models.PositiveIntegerField(default=0) @classmethod def create(cls, request): """Initialize a ConfigList with visitor's IP and generated url hash.""" ip = get_real_ip(request) if ip is None: ip = get_ip(request) if ip is None: ip = '111.111.111.111' url = cls.generate_url(cls.URL_LENGTH) list = cls(url=url, create_ip=ip) logger.info('Adding ConfigList \'%s\' (%s)' % (url, ip)) return list @staticmethod def generate_url(length): """Generate a unique url hash.""" while True: url_hash = uuid.uuid4().hex[0:length] try: ConfigList.objects.get(url=url_hash) break except ObjectDoesNotExist: return url_hash def __unicode__(self): """Display url hash to id mapping.""" return '[\'%s\' -> %s]' % (self.url, self.id) class ConfigListItem(models.Model): """A saved kart configuration associated with a ConfigList.""" list = models.ForeignKey(ConfigList) character = models.ForeignKey(Character) kart = models.ForeignKey(Kart) wheel = models.ForeignKey(Wheel) glider = models.ForeignKey(Glider) @classmethod def create(cls, list, character, kart, wheel, glider): """Initialize ConfigListItem with default parameters order.""" logger.info('Adding \'%s\' ConfigListItem [%s, %s, %s, %s]' % (list.url, character, kart, wheel, glider)) return cls(list=list, character=character, kart=kart, wheel=wheel, glider=glider) class Meta: unique_together = ("list", "character", "kart", "wheel", "glider") class KartRecord(models.Model): """A record of each anonymous kart configuration generated by users.""" character = models.ForeignKey(Character) kart = models.ForeignKey(Kart) wheel = models.ForeignKey(Wheel) glider = models.ForeignKey(Glider) create_ip = models.GenericIPAddressField(default='0.0.0.0') create_date = models.DateTimeField(auto_now_add=True)<|fim▁end|>
def __init__(self, (character_id, kart_id, wheel_id, glider_id)):
<|file_name|>pdf_viewer.js<|end_file_name|><|fim▁begin|>/* Copyright 2014 Mozilla Foundation * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ /*jshint globalstrict: false */ /* globals PDFJS, PDFViewer, PDFPageView, TextLayerBuilder, PDFLinkService, DefaultTextLayerFactory, AnnotationLayerBuilder, PDFHistory, DefaultAnnotationLayerFactory, DownloadManager, ProgressBar */ // Initializing PDFJS global object (if still undefined) if (typeof PDFJS === 'undefined') { (typeof window !== 'undefined' ? window : this).PDFJS = {}; } (function pdfViewerWrapper() { 'use strict'; var CSS_UNITS = 96.0 / 72.0; var DEFAULT_SCALE_VALUE = 'auto'; var DEFAULT_SCALE = 1.0; var UNKNOWN_SCALE = 0; var MAX_AUTO_SCALE = 1.25; var SCROLLBAR_PADDING = 40; var VERTICAL_PADDING = 5; /** * Returns scale factor for the canvas. It makes sense for the HiDPI displays. * @return {Object} The object with horizontal (sx) and vertical (sy) scales. The scaled property is set to false if scaling is not required, true otherwise. */ function getOutputScale(ctx) { var devicePixelRatio = window.devicePixelRatio || 1; var backingStoreRatio = ctx.webkitBackingStorePixelRatio || ctx.mozBackingStorePixelRatio || ctx.msBackingStorePixelRatio || ctx.oBackingStorePixelRatio || ctx.backingStorePixelRatio || 1; var pixelRatio = devicePixelRatio / backingStoreRatio; return { sx: pixelRatio, sy: pixelRatio, scaled: pixelRatio !== 1 }; } /** * Scrolls specified element into view of its parent. * @param {Object} element - The element to be visible. * @param {Object} spot - An object with optional top and left properties, * specifying the offset from the top left edge. * @param {boolean} skipOverflowHiddenElements - Ignore elements that have * the CSS rule `overflow: hidden;` set. The default is false. */ function scrollIntoView(element, spot, skipOverflowHiddenElements) { // Assuming offsetParent is available (it's not available when viewer is in // hidden iframe or object). We have to scroll: if the offsetParent is not set // producing the error. See also animationStartedClosure. var parent = element.offsetParent; if (!parent) { console.error('offsetParent is not set -- cannot scroll'); return; } var checkOverflow = skipOverflowHiddenElements || false; var offsetY = element.offsetTop + element.clientTop; var offsetX = element.offsetLeft + element.clientLeft; while (parent.clientHeight === parent.scrollHeight || (checkOverflow && getComputedStyle(parent).overflow === 'hidden')) { if (parent.dataset._scaleY) { offsetY /= parent.dataset._scaleY; offsetX /= parent.dataset._scaleX; } offsetY += parent.offsetTop; offsetX += parent.offsetLeft; parent = parent.offsetParent; if (!parent) { return; // no need to scroll } } if (spot) { if (spot.top !== undefined) { offsetY += spot.top; } if (spot.left !== undefined) { offsetX += spot.left; parent.scrollLeft = offsetX; } } parent.scrollTop = offsetY; } /** * Helper function to start monitoring the scroll event and converting them into * PDF.js friendly one: with scroll debounce and scroll direction. */ function watchScroll(viewAreaElement, callback) { var debounceScroll = function debounceScroll(evt) { if (rAF) { return; } // schedule an invocation of scroll for next animation frame. rAF = window.requestAnimationFrame(function viewAreaElementScrolled() { rAF = null; var currentY = viewAreaElement.scrollTop; var lastY = state.lastY; if (currentY !== lastY) { state.down = currentY > lastY; } state.lastY = currentY; callback(state); }); }; var state = { down: true, lastY: viewAreaElement.scrollTop, _eventHandler: debounceScroll }; var rAF = null; viewAreaElement.addEventListener('scroll', debounceScroll, true); return state; } /** * Helper function to parse query string (e.g. ?param1=value&parm2=...). */ function parseQueryString(query) { var parts = query.split('&'); var params = {}; for (var i = 0, ii = parts.length; i < ii; ++i) { var param = parts[i].split('='); var key = param[0].toLowerCase(); var value = param.length > 1 ? param[1] : null; params[decodeURIComponent(key)] = decodeURIComponent(value); } return params; } /** * Use binary search to find the index of the first item in a given array which * passes a given condition. The items are expected to be sorted in the sense * that if the condition is true for one item in the array, then it is also true * for all following items. * * @returns {Number} Index of the first array element to pass the test, * or |items.length| if no such element exists. */ function binarySearchFirstItem(items, condition) { var minIndex = 0; var maxIndex = items.length - 1; if (items.length === 0 || !condition(items[maxIndex])) { return items.length; } if (condition(items[minIndex])) { return minIndex; } while (minIndex < maxIndex) { var currentIndex = (minIndex + maxIndex) >> 1; var currentItem = items[currentIndex]; if (condition(currentItem)) { maxIndex = currentIndex; } else { minIndex = currentIndex + 1; } } return minIndex; /* === maxIndex */ } /** * Approximates float number as a fraction using Farey sequence (max order * of 8). * @param {number} x - Positive float number. * @returns {Array} Estimated fraction: the first array item is a numerator, * the second one is a denominator. */ function approximateFraction(x) { // Fast paths for int numbers or their inversions. if (Math.floor(x) === x) { return [x, 1]; } var xinv = 1 / x; var limit = 8; if (xinv > limit) { return [1, limit]; } else if (Math.floor(xinv) === xinv) { return [1, xinv]; } var x_ = x > 1 ? xinv : x; // a/b and c/d are neighbours in Farey sequence. var a = 0, b = 1, c = 1, d = 1; // Limiting search to order 8. while (true) { // Generating next term in sequence (order of q). var p = a + c, q = b + d; if (q > limit) { break; } if (x_ <= p / q) { c = p; d = q; } else { a = p; b = q; } } // Select closest of the neighbours to x. if (x_ - a / b < c / d - x_) { return x_ === x ? [a, b] : [b, a]; } else { return x_ === x ? [c, d] : [d, c]; } } function roundToDivide(x, div) { var r = x % div; return r === 0 ? x : Math.round(x - r + div); } /** * Generic helper to find out what elements are visible within a scroll pane. */ function getVisibleElements(scrollEl, views, sortByVisibility) { var top = scrollEl.scrollTop, bottom = top + scrollEl.clientHeight; var left = scrollEl.scrollLeft, right = left + scrollEl.clientWidth; function isElementBottomBelowViewTop(view) { var element = view.div; var elementBottom = element.offsetTop + element.clientTop + element.clientHeight; return elementBottom > top; } var visible = [], view, element; var currentHeight, viewHeight, hiddenHeight, percentHeight; var currentWidth, viewWidth; var firstVisibleElementInd = (views.length === 0) ? 0 : binarySearchFirstItem(views, isElementBottomBelowViewTop); for (var i = firstVisibleElementInd, ii = views.length; i < ii; i++) { view = views[i]; element = view.div; currentHeight = element.offsetTop + element.clientTop; viewHeight = element.clientHeight; if (currentHeight > bottom) { break; } currentWidth = element.offsetLeft + element.clientLeft; viewWidth = element.clientWidth; if (currentWidth + viewWidth < left || currentWidth > right) { continue; } hiddenHeight = Math.max(0, top - currentHeight) + Math.max(0, currentHeight + viewHeight - bottom); percentHeight = ((viewHeight - hiddenHeight) * 100 / viewHeight) | 0; visible.push({ id: view.id, x: currentWidth, y: currentHeight, view: view, percent: percentHeight }); } var first = visible[0]; var last = visible[visible.length - 1]; if (sortByVisibility) { visible.sort(function(a, b) { var pc = a.percent - b.percent; if (Math.abs(pc) > 0.001) { return -pc; } return a.id - b.id; // ensure stability }); } return {first: first, last: last, views: visible}; } /** * Event handler to suppress context menu. */ function noContextMenuHandler(e) { e.preventDefault(); } /** * Returns the filename or guessed filename from the url (see issue 3455). * url {String} The original PDF location. * @return {String} Guessed PDF file name. */ function getPDFFileNameFromURL(url) { var reURI = /^(?:([^:]+:)?\/\/[^\/]+)?([^?#]*)(\?[^#]*)?(#.*)?$/; // SCHEME HOST 1.PATH 2.QUERY 3.REF // Pattern to get last matching NAME.pdf var reFilename = /[^\/?#=]+\.pdf\b(?!.*\.pdf\b)/i; var splitURI = reURI.exec(url); var suggestedFilename = reFilename.exec(splitURI[1]) || reFilename.exec(splitURI[2]) || reFilename.exec(splitURI[3]); if (suggestedFilename) { suggestedFilename = suggestedFilename[0]; if (suggestedFilename.indexOf('%') !== -1) { // URL-encoded %2Fpath%2Fto%2Ffile.pdf should be file.pdf try { suggestedFilename = reFilename.exec(decodeURIComponent(suggestedFilename))[0]; } catch(e) { // Possible (extremely rare) errors: // URIError "Malformed URI", e.g. for "%AA.pdf" // TypeError "null has no properties", e.g. for "%2F.pdf" } } } return suggestedFilename || 'document.pdf'; } var ProgressBar = (function ProgressBarClosure() { function clamp(v, min, max) { return Math.min(Math.max(v, min), max); } function ProgressBar(id, opts) { this.visible = true; // Fetch the sub-elements for later. this.div = document.querySelector(id + ' .progress'); // Get the loading bar element, so it can be resized to fit the viewer. this.bar = this.div.parentNode; // Get options, with sensible defaults. this.height = opts.height || 100; this.width = opts.width || 100; this.units = opts.units || '%'; // Initialize heights. this.div.style.height = this.height + this.units; this.percent = 0; } ProgressBar.prototype = { updateBar: function ProgressBar_updateBar() { if (this._indeterminate) { this.div.classList.add('indeterminate'); this.div.style.width = this.width + this.units; return; } this.div.classList.remove('indeterminate'); var progressSize = this.width * this._percent / 100; this.div.style.width = progressSize + this.units; }, get percent() { return this._percent; }, set percent(val) { this._indeterminate = isNaN(val); this._percent = clamp(val, 0, 100); this.updateBar(); }, setWidth: function ProgressBar_setWidth(viewer) { if (viewer) { var container = viewer.parentNode; var scrollbarWidth = container.offsetWidth - viewer.offsetWidth; if (scrollbarWidth > 0) { this.bar.setAttribute('style', 'width: calc(100% - ' + scrollbarWidth + 'px);'); } } }, hide: function ProgressBar_hide() { if (!this.visible) { return; } this.visible = false; this.bar.classList.add('hidden'); document.body.classList.remove('loadingInProgress'); }, show: function ProgressBar_show() { if (this.visible) { return; } this.visible = true; document.body.classList.add('loadingInProgress'); this.bar.classList.remove('hidden'); } }; return ProgressBar; })(); /** * Performs navigation functions inside PDF, such as opening specified page, * or destination. * @class * @implements {IPDFLinkService} */ var PDFLinkService = (function () { /** * @constructs PDFLinkService */ function PDFLinkService() { this.baseUrl = null; this.pdfDocument = null; this.pdfViewer = null; this.pdfHistory = null; this._pagesRefCache = null; } PDFLinkService.prototype = { setDocument: function PDFLinkService_setDocument(pdfDocument, baseUrl) { this.baseUrl = baseUrl; this.pdfDocument = pdfDocument; this._pagesRefCache = Object.create(null); }, setViewer: function PDFLinkService_setViewer(pdfViewer) { this.pdfViewer = pdfViewer; }, setHistory: function PDFLinkService_setHistory(pdfHistory) { this.pdfHistory = pdfHistory; }, /** * @returns {number} */ get pagesCount() { return this.pdfDocument.numPages; }, /** * @returns {number} */ get page() { return this.pdfViewer.currentPageNumber; }, /** * @param {number} value */ set page(value) { this.pdfViewer.currentPageNumber = value; }, /** * @param dest - The PDF destination object. */ navigateTo: function PDFLinkService_navigateTo(dest) { var destString = ''; var self = this; var goToDestination = function(destRef) { // dest array looks like that: <page-ref> </XYZ|FitXXX> <args..> var pageNumber = destRef instanceof Object ? self._pagesRefCache[destRef.num + ' ' + destRef.gen + ' R'] : (destRef + 1); if (pageNumber) { if (pageNumber > self.pagesCount) { pageNumber = self.pagesCount; } self.pdfViewer.scrollPageIntoView(pageNumber, dest); if (self.pdfHistory) { // Update the browsing history. self.pdfHistory.push({ dest: dest, hash: destString, page: pageNumber }); } } else { self.pdfDocument.getPageIndex(destRef).then(function (pageIndex) { var pageNum = pageIndex + 1; var cacheKey = destRef.num + ' ' + destRef.gen + ' R'; self._pagesRefCache[cacheKey] = pageNum; goToDestination(destRef); }); } }; var destinationPromise; if (typeof dest === 'string') { destString = dest; destinationPromise = this.pdfDocument.getDestination(dest); } else { destinationPromise = Promise.resolve(dest); } destinationPromise.then(function(destination) { dest = destination; if (!(destination instanceof Array)) { return; // invalid destination } goToDestination(destination[0]); }); }, /** * @param dest - The PDF destination object. * @returns {string} The hyperlink to the PDF object. */ getDestinationHash: function PDFLinkService_getDestinationHash(dest) { if (typeof dest === 'string') { return this.getAnchorUrl('#' + escape(dest)); } if (dest instanceof Array) { var destRef = dest[0]; // see navigateTo method for dest format var pageNumber = destRef instanceof Object ? this._pagesRefCache[destRef.num + ' ' + destRef.gen + ' R'] : (destRef + 1); if (pageNumber) { var pdfOpenParams = this.getAnchorUrl('#page=' + pageNumber); var destKind = dest[1]; if (typeof destKind === 'object' && 'name' in destKind && destKind.name === 'XYZ') { var scale = (dest[4] || this.pdfViewer.currentScaleValue); var scaleNumber = parseFloat(scale); if (scaleNumber) { scale = scaleNumber * 100; } pdfOpenParams += '&zoom=' + scale; if (dest[2] || dest[3]) { pdfOpenParams += ',' + (dest[2] || 0) + ',' + (dest[3] || 0); } } return pdfOpenParams; } } return this.getAnchorUrl(''); }, /** * Prefix the full url on anchor links to make sure that links are resolved * relative to the current URL instead of the one defined in <base href>. * @param {String} anchor The anchor hash, including the #. * @returns {string} The hyperlink to the PDF object. */ getAnchorUrl: function PDFLinkService_getAnchorUrl(anchor) { return (this.baseUrl || '') + anchor; }, /** * @param {string} hash */ setHash: function PDFLinkService_setHash(hash) { if (hash.indexOf('=') >= 0) { var params = parseQueryString(hash); // borrowing syntax from "Parameters for Opening PDF Files" if ('nameddest' in params) { if (this.pdfHistory) { this.pdfHistory.updateNextHashParam(params.nameddest); } this.navigateTo(params.nameddest); return; } var pageNumber, dest; if ('page' in params) { pageNumber = (params.page | 0) || 1; } if ('zoom' in params) { // Build the destination array. var zoomArgs = params.zoom.split(','); // scale,left,top var zoomArg = zoomArgs[0]; var zoomArgNumber = parseFloat(zoomArg); if (zoomArg.indexOf('Fit') === -1) { // If the zoomArg is a number, it has to get divided by 100. If it's // a string, it should stay as it is. dest = [null, { name: 'XYZ' }, zoomArgs.length > 1 ? (zoomArgs[1] | 0) : null, zoomArgs.length > 2 ? (zoomArgs[2] | 0) : null, (zoomArgNumber ? zoomArgNumber / 100 : zoomArg)]; } else { if (zoomArg === 'Fit' || zoomArg === 'FitB') { dest = [null, { name: zoomArg }]; } else if ((zoomArg === 'FitH' || zoomArg === 'FitBH') || (zoomArg === 'FitV' || zoomArg === 'FitBV')) { dest = [null, { name: zoomArg }, zoomArgs.length > 1 ? (zoomArgs[1] | 0) : null]; } else if (zoomArg === 'FitR') { if (zoomArgs.length !== 5) { console.error('PDFLinkService_setHash: ' + 'Not enough parameters for \'FitR\'.'); } else { dest = [null, { name: zoomArg }, (zoomArgs[1] | 0), (zoomArgs[2] | 0), (zoomArgs[3] | 0), (zoomArgs[4] | 0)]; } } else { console.error('PDFLinkService_setHash: \'' + zoomArg + '\' is not a valid zoom value.'); } } } if (dest) { this.pdfViewer.scrollPageIntoView(pageNumber || this.page, dest); } else if (pageNumber) { this.page = pageNumber; // simple page } if ('pagemode' in params) { var event = document.createEvent('CustomEvent'); event.initCustomEvent('pagemode', true, true, { mode: params.pagemode, }); this.pdfViewer.container.dispatchEvent(event); } } else if (/^\d+$/.test(hash)) { // page number this.page = hash; } else { // named destination if (this.pdfHistory) { this.pdfHistory.updateNextHashParam(unescape(hash)); } this.navigateTo(unescape(hash)); } }, /** * @param {string} action */ executeNamedAction: function PDFLinkService_executeNamedAction(action) { // See PDF reference, table 8.45 - Named action switch (action) { case 'GoBack': if (this.pdfHistory) { this.pdfHistory.back(); } break; case 'GoForward': if (this.pdfHistory) { this.pdfHistory.forward(); } break; case 'NextPage': this.page++; break; case 'PrevPage': this.page--; break; case 'LastPage': this.page = this.pagesCount; break; case 'FirstPage': this.page = 1; break; default: break; // No action according to spec } var event = document.createEvent('CustomEvent'); event.initCustomEvent('namedaction', true, true, { action: action }); this.pdfViewer.container.dispatchEvent(event); }, /** * @param {number} pageNum - page number. * @param {Object} pageRef - reference to the page. */ cachePageRef: function PDFLinkService_cachePageRef(pageNum, pageRef) { var refStr = pageRef.num + ' ' + pageRef.gen + ' R'; this._pagesRefCache[refStr] = pageNum; } }; return PDFLinkService; })(); var PresentationModeState = { UNKNOWN: 0, NORMAL: 1, CHANGING: 2, FULLSCREEN: 3, }; var IGNORE_CURRENT_POSITION_ON_ZOOM = false; var DEFAULT_CACHE_SIZE = 10; var CLEANUP_TIMEOUT = 30000; var RenderingStates = { INITIAL: 0, RUNNING: 1, PAUSED: 2, FINISHED: 3 }; /** * Controls rendering of the views for pages and thumbnails. * @class */ var PDFRenderingQueue = (function PDFRenderingQueueClosure() { /** * @constructs */ function PDFRenderingQueue() { this.pdfViewer = null; this.pdfThumbnailViewer = null; this.onIdle = null; this.highestPriorityPage = null; this.idleTimeout = null; this.printing = false; this.isThumbnailViewEnabled = false; } PDFRenderingQueue.prototype = /** @lends PDFRenderingQueue.prototype */ { /** * @param {PDFViewer} pdfViewer */ setViewer: function PDFRenderingQueue_setViewer(pdfViewer) { this.pdfViewer = pdfViewer; }, /** * @param {PDFThumbnailViewer} pdfThumbnailViewer */ setThumbnailViewer: function PDFRenderingQueue_setThumbnailViewer(pdfThumbnailViewer) { this.pdfThumbnailViewer = pdfThumbnailViewer; }, /** * @param {IRenderableView} view * @returns {boolean} */ isHighestPriority: function PDFRenderingQueue_isHighestPriority(view) { return this.highestPriorityPage === view.renderingId; }, renderHighestPriority: function PDFRenderingQueue_renderHighestPriority(currentlyVisiblePages) { if (this.idleTimeout) { clearTimeout(this.idleTimeout); this.idleTimeout = null; } // Pages have a higher priority than thumbnails, so check them first. if (this.pdfViewer.forceRendering(currentlyVisiblePages)) { return; } // No pages needed rendering so check thumbnails. if (this.pdfThumbnailViewer && this.isThumbnailViewEnabled) { if (this.pdfThumbnailViewer.forceRendering()) { return; } } if (this.printing) { // If printing is currently ongoing do not reschedule cleanup. return; } if (this.onIdle) { this.idleTimeout = setTimeout(this.onIdle.bind(this), CLEANUP_TIMEOUT); } }, getHighestPriority: function PDFRenderingQueue_getHighestPriority(visible, views, scrolledDown) { // The state has changed figure out which page has the highest priority to // render next (if any). // Priority: // 1 visible pages // 2 if last scrolled down page after the visible pages // 2 if last scrolled up page before the visible pages var visibleViews = visible.views; var numVisible = visibleViews.length; if (numVisible === 0) { return false; } for (var i = 0; i < numVisible; ++i) { var view = visibleViews[i].view; if (!this.isViewFinished(view)) { return view; } } // All the visible views have rendered, try to render next/previous pages. if (scrolledDown) { var nextPageIndex = visible.last.id; // ID's start at 1 so no need to add 1. if (views[nextPageIndex] && !this.isViewFinished(views[nextPageIndex])) { return views[nextPageIndex]; } } else { var previousPageIndex = visible.first.id - 2; if (views[previousPageIndex] && !this.isViewFinished(views[previousPageIndex])) { return views[previousPageIndex]; } } // Everything that needs to be rendered has been. return null; }, /** * @param {IRenderableView} view * @returns {boolean} */ isViewFinished: function PDFRenderingQueue_isViewFinished(view) { return view.renderingState === RenderingStates.FINISHED; }, /** * Render a page or thumbnail view. This calls the appropriate function * based on the views state. If the view is already rendered it will return * false. * @param {IRenderableView} view */ renderView: function PDFRenderingQueue_renderView(view) { var state = view.renderingState; switch (state) { case RenderingStates.FINISHED: return false; case RenderingStates.PAUSED: this.highestPriorityPage = view.renderingId; view.resume(); break; case RenderingStates.RUNNING: this.highestPriorityPage = view.renderingId; break; case RenderingStates.INITIAL: this.highestPriorityPage = view.renderingId; var continueRendering = function () { this.renderHighestPriority(); }.bind(this); view.draw().then(continueRendering, continueRendering); break; } return true; }, }; return PDFRenderingQueue; })(); var TEXT_LAYER_RENDER_DELAY = 200; // ms /** * @typedef {Object} PDFPageViewOptions * @property {HTMLDivElement} container - The viewer element. * @property {number} id - The page unique ID (normally its number). * @property {number} scale - The page scale display. * @property {PageViewport} defaultViewport - The page viewport. * @property {PDFRenderingQueue} renderingQueue - The rendering queue object. * @property {IPDFTextLayerFactory} textLayerFactory * @property {IPDFAnnotationLayerFactory} annotationLayerFactory */ /** * @class * @implements {IRenderableView} */ var PDFPageView = (function PDFPageViewClosure() { /** * @constructs PDFPageView * @param {PDFPageViewOptions} options */ function PDFPageView(options) { var container = options.container; var id = options.id; var scale = options.scale; var defaultViewport = options.defaultViewport; var renderingQueue = options.renderingQueue; var textLayerFactory = options.textLayerFactory; var annotationLayerFactory = options.annotationLayerFactory; this.id = id; this.renderingId = 'page' + id; this.rotation = 0; this.scale = scale || DEFAULT_SCALE; this.viewport = defaultViewport; this.pdfPageRotate = defaultViewport.rotation; this.hasRestrictedScaling = false; this.renderingQueue = renderingQueue; this.textLayerFactory = textLayerFactory; this.annotationLayerFactory = annotationLayerFactory; this.renderingState = RenderingStates.INITIAL; this.resume = null; this.onBeforeDraw = null; this.onAfterDraw = null; this.textLayer = null; this.zoomLayer = null; this.annotationLayer = null; var div = document.createElement('div'); div.id = 'pageContainer' + this.id; div.className = 'page'; div.style.width = Math.floor(this.viewport.width) + 'px'; div.style.height = Math.floor(this.viewport.height) + 'px'; div.setAttribute('data-page-number', this.id); this.div = div; container.appendChild(div); } PDFPageView.prototype = { setPdfPage: function PDFPageView_setPdfPage(pdfPage) { this.pdfPage = pdfPage; this.pdfPageRotate = pdfPage.rotate; var totalRotation = (this.rotation + this.pdfPageRotate) % 360; this.viewport = pdfPage.getViewport(this.scale * CSS_UNITS, totalRotation); this.stats = pdfPage.stats; this.reset(); }, destroy: function PDFPageView_destroy() { this.zoomLayer = null; this.reset(); if (this.pdfPage) { this.pdfPage.cleanup(); } }, reset: function PDFPageView_reset(keepZoomLayer, keepAnnotations) { if (this.renderTask) { this.renderTask.cancel(); } this.resume = null; this.renderingState = RenderingStates.INITIAL; var div = this.div; div.style.width = Math.floor(this.viewport.width) + 'px'; div.style.height = Math.floor(this.viewport.height) + 'px'; var childNodes = div.childNodes; var currentZoomLayerNode = (keepZoomLayer && this.zoomLayer) || null; var currentAnnotationNode = (keepAnnotations && this.annotationLayer && this.annotationLayer.div) || null; for (var i = childNodes.length - 1; i >= 0; i--) { var node = childNodes[i]; if (currentZoomLayerNode === node || currentAnnotationNode === node) { continue; } div.removeChild(node); } div.removeAttribute('data-loaded'); if (currentAnnotationNode) { // Hide annotationLayer until all elements are resized // so they are not displayed on the already-resized page this.annotationLayer.hide(); } else { this.annotationLayer = null; } if (this.canvas && !currentZoomLayerNode) { // Zeroing the width and height causes Firefox to release graphics // resources immediately, which can greatly reduce memory consumption. this.canvas.width = 0; this.canvas.height = 0; delete this.canvas; } this.loadingIconDiv = document.createElement('div'); this.loadingIconDiv.className = 'loadingIcon'; div.appendChild(this.loadingIconDiv); }, update: function PDFPageView_update(scale, rotation) { this.scale = scale || this.scale; if (typeof rotation !== 'undefined') { this.rotation = rotation; } var totalRotation = (this.rotation + this.pdfPageRotate) % 360; this.viewport = this.viewport.clone({ scale: this.scale * CSS_UNITS, rotation: totalRotation });<|fim▁hole|> var outputScale = this.outputScale; var pixelsInViewport = this.viewport.width * this.viewport.height; var maxScale = Math.sqrt(PDFJS.maxCanvasPixels / pixelsInViewport); if (((Math.floor(this.viewport.width) * outputScale.sx) | 0) * ((Math.floor(this.viewport.height) * outputScale.sy) | 0) > PDFJS.maxCanvasPixels) { isScalingRestricted = true; } } if (this.canvas) { if (PDFJS.useOnlyCssZoom || (this.hasRestrictedScaling && isScalingRestricted)) { this.cssTransform(this.canvas, true); var event = document.createEvent('CustomEvent'); event.initCustomEvent('pagerendered', true, true, { pageNumber: this.id, cssTransform: true, }); this.div.dispatchEvent(event); return; } if (!this.zoomLayer) { this.zoomLayer = this.canvas.parentNode; this.zoomLayer.style.position = 'absolute'; } } if (this.zoomLayer) { this.cssTransform(this.zoomLayer.firstChild); } this.reset(/* keepZoomLayer = */ true, /* keepAnnotations = */ true); }, /** * Called when moved in the parent's container. */ updatePosition: function PDFPageView_updatePosition() { if (this.textLayer) { this.textLayer.render(TEXT_LAYER_RENDER_DELAY); } }, cssTransform: function PDFPageView_transform(canvas, redrawAnnotations) { var CustomStyle = PDFJS.CustomStyle; // Scale canvas, canvas wrapper, and page container. var width = this.viewport.width; var height = this.viewport.height; var div = this.div; canvas.style.width = canvas.parentNode.style.width = div.style.width = Math.floor(width) + 'px'; canvas.style.height = canvas.parentNode.style.height = div.style.height = Math.floor(height) + 'px'; // The canvas may have been originally rotated, rotate relative to that. var relativeRotation = this.viewport.rotation - canvas._viewport.rotation; var absRotation = Math.abs(relativeRotation); var scaleX = 1, scaleY = 1; if (absRotation === 90 || absRotation === 270) { // Scale x and y because of the rotation. scaleX = height / width; scaleY = width / height; } var cssTransform = 'rotate(' + relativeRotation + 'deg) ' + 'scale(' + scaleX + ',' + scaleY + ')'; CustomStyle.setProp('transform', canvas, cssTransform); if (this.textLayer) { // Rotating the text layer is more complicated since the divs inside the // the text layer are rotated. // TODO: This could probably be simplified by drawing the text layer in // one orientation then rotating overall. var textLayerViewport = this.textLayer.viewport; var textRelativeRotation = this.viewport.rotation - textLayerViewport.rotation; var textAbsRotation = Math.abs(textRelativeRotation); var scale = width / textLayerViewport.width; if (textAbsRotation === 90 || textAbsRotation === 270) { scale = width / textLayerViewport.height; } var textLayerDiv = this.textLayer.textLayerDiv; var transX, transY; switch (textAbsRotation) { case 0: transX = transY = 0; break; case 90: transX = 0; transY = '-' + textLayerDiv.style.height; break; case 180: transX = '-' + textLayerDiv.style.width; transY = '-' + textLayerDiv.style.height; break; case 270: transX = '-' + textLayerDiv.style.width; transY = 0; break; default: console.error('Bad rotation value.'); break; } CustomStyle.setProp('transform', textLayerDiv, 'rotate(' + textAbsRotation + 'deg) ' + 'scale(' + scale + ', ' + scale + ') ' + 'translate(' + transX + ', ' + transY + ')'); CustomStyle.setProp('transformOrigin', textLayerDiv, '0% 0%'); } if (redrawAnnotations && this.annotationLayer) { this.annotationLayer.render(this.viewport, 'display'); } }, get width() { return this.viewport.width; }, get height() { return this.viewport.height; }, getPagePoint: function PDFPageView_getPagePoint(x, y) { return this.viewport.convertToPdfPoint(x, y); }, draw: function PDFPageView_draw() { if (this.renderingState !== RenderingStates.INITIAL) { console.error('Must be in new state before drawing'); } this.renderingState = RenderingStates.RUNNING; var pdfPage = this.pdfPage; var viewport = this.viewport; var div = this.div; // Wrap the canvas so if it has a css transform for highdpi the overflow // will be hidden in FF. var canvasWrapper = document.createElement('div'); canvasWrapper.style.width = div.style.width; canvasWrapper.style.height = div.style.height; canvasWrapper.classList.add('canvasWrapper'); var canvas = document.createElement('canvas'); canvas.id = 'page' + this.id; // Keep the canvas hidden until the first draw callback, or until drawing // is complete when `!this.renderingQueue`, to prevent black flickering. canvas.setAttribute('hidden', 'hidden'); var isCanvasHidden = true; canvasWrapper.appendChild(canvas); if (this.annotationLayer && this.annotationLayer.div) { // annotationLayer needs to stay on top div.insertBefore(canvasWrapper, this.annotationLayer.div); } else { div.appendChild(canvasWrapper); } this.canvas = canvas; var ctx = canvas.getContext('2d', {alpha: false}); var outputScale = getOutputScale(ctx); this.outputScale = outputScale; if (PDFJS.useOnlyCssZoom) { var actualSizeViewport = viewport.clone({scale: CSS_UNITS}); // Use a scale that will make the canvas be the original intended size // of the page. outputScale.sx *= actualSizeViewport.width / viewport.width; outputScale.sy *= actualSizeViewport.height / viewport.height; outputScale.scaled = true; } if (PDFJS.maxCanvasPixels > 0) { var pixelsInViewport = viewport.width * viewport.height; var maxScale = Math.sqrt(PDFJS.maxCanvasPixels / pixelsInViewport); if (outputScale.sx > maxScale || outputScale.sy > maxScale) { outputScale.sx = maxScale; outputScale.sy = maxScale; outputScale.scaled = true; this.hasRestrictedScaling = true; } else { this.hasRestrictedScaling = false; } } var sfx = approximateFraction(outputScale.sx); var sfy = approximateFraction(outputScale.sy); canvas.width = roundToDivide(viewport.width * outputScale.sx, sfx[0]); canvas.height = roundToDivide(viewport.height * outputScale.sy, sfy[0]); canvas.style.width = roundToDivide(viewport.width, sfx[1]) + 'px'; canvas.style.height = roundToDivide(viewport.height, sfy[1]) + 'px'; // Add the viewport so it's known what it was originally drawn with. canvas._viewport = viewport; var textLayerDiv = null; var textLayer = null; if (this.textLayerFactory) { textLayerDiv = document.createElement('div'); textLayerDiv.className = 'textLayer'; textLayerDiv.style.width = canvasWrapper.style.width; textLayerDiv.style.height = canvasWrapper.style.height; if (this.annotationLayer && this.annotationLayer.div) { // annotationLayer needs to stay on top div.insertBefore(textLayerDiv, this.annotationLayer.div); } else { div.appendChild(textLayerDiv); } textLayer = this.textLayerFactory.createTextLayerBuilder(textLayerDiv, this.id - 1, this.viewport); } this.textLayer = textLayer; var resolveRenderPromise, rejectRenderPromise; var promise = new Promise(function (resolve, reject) { resolveRenderPromise = resolve; rejectRenderPromise = reject; }); // Rendering area var self = this; function pageViewDrawCallback(error) { // The renderTask may have been replaced by a new one, so only remove // the reference to the renderTask if it matches the one that is // triggering this callback. if (renderTask === self.renderTask) { self.renderTask = null; } if (error === 'cancelled') { rejectRenderPromise(error); return; } self.renderingState = RenderingStates.FINISHED; if (isCanvasHidden) { self.canvas.removeAttribute('hidden'); isCanvasHidden = false; } if (self.loadingIconDiv) { div.removeChild(self.loadingIconDiv); delete self.loadingIconDiv; } if (self.zoomLayer) { // Zeroing the width and height causes Firefox to release graphics // resources immediately, which can greatly reduce memory consumption. var zoomLayerCanvas = self.zoomLayer.firstChild; zoomLayerCanvas.width = 0; zoomLayerCanvas.height = 0; div.removeChild(self.zoomLayer); self.zoomLayer = null; } self.error = error; self.stats = pdfPage.stats; if (self.onAfterDraw) { self.onAfterDraw(); } var event = document.createEvent('CustomEvent'); event.initCustomEvent('pagerendered', true, true, { pageNumber: self.id, cssTransform: false, }); div.dispatchEvent(event); if (!error) { resolveRenderPromise(undefined); } else { rejectRenderPromise(error); } } var renderContinueCallback = null; if (this.renderingQueue) { renderContinueCallback = function renderContinueCallback(cont) { if (!self.renderingQueue.isHighestPriority(self)) { self.renderingState = RenderingStates.PAUSED; self.resume = function resumeCallback() { self.renderingState = RenderingStates.RUNNING; cont(); }; return; } if (isCanvasHidden) { self.canvas.removeAttribute('hidden'); isCanvasHidden = false; } cont(); }; } var transform = !outputScale.scaled ? null : [outputScale.sx, 0, 0, outputScale.sy, 0, 0]; var renderContext = { canvasContext: ctx, transform: transform, viewport: this.viewport, // intent: 'default', // === 'display' }; var renderTask = this.renderTask = this.pdfPage.render(renderContext); renderTask.onContinue = renderContinueCallback; this.renderTask.promise.then( function pdfPageRenderCallback() { pageViewDrawCallback(null); if (textLayer) { self.pdfPage.getTextContent({ normalizeWhitespace: true }).then( function textContentResolved(textContent) { textLayer.setTextContent(textContent); textLayer.render(TEXT_LAYER_RENDER_DELAY); } ); } }, function pdfPageRenderError(error) { pageViewDrawCallback(error); } ); if (this.annotationLayerFactory) { if (!this.annotationLayer) { this.annotationLayer = this.annotationLayerFactory. createAnnotationLayerBuilder(div, this.pdfPage); } this.annotationLayer.render(this.viewport, 'display'); } div.setAttribute('data-loaded', true); if (self.onBeforeDraw) { self.onBeforeDraw(); } return promise; }, beforePrint: function PDFPageView_beforePrint() { var CustomStyle = PDFJS.CustomStyle; var pdfPage = this.pdfPage; var viewport = pdfPage.getViewport(1); // Use the same hack we use for high dpi displays for printing to get // better output until bug 811002 is fixed in FF. var PRINT_OUTPUT_SCALE = 2; var canvas = document.createElement('canvas'); // The logical size of the canvas. canvas.width = Math.floor(viewport.width) * PRINT_OUTPUT_SCALE; canvas.height = Math.floor(viewport.height) * PRINT_OUTPUT_SCALE; // The rendered size of the canvas, relative to the size of canvasWrapper. canvas.style.width = (PRINT_OUTPUT_SCALE * 100) + '%'; canvas.style.height = (PRINT_OUTPUT_SCALE * 100) + '%'; var cssScale = 'scale(' + (1 / PRINT_OUTPUT_SCALE) + ', ' + (1 / PRINT_OUTPUT_SCALE) + ')'; CustomStyle.setProp('transform' , canvas, cssScale); CustomStyle.setProp('transformOrigin' , canvas, '0% 0%'); var printContainer = document.getElementById('printContainer'); var canvasWrapper = document.createElement('div'); canvasWrapper.style.width = viewport.width + 'pt'; canvasWrapper.style.height = viewport.height + 'pt'; canvasWrapper.appendChild(canvas); printContainer.appendChild(canvasWrapper); canvas.mozPrintCallback = function(obj) { var ctx = obj.context; ctx.save(); ctx.fillStyle = 'rgb(255, 255, 255)'; ctx.fillRect(0, 0, canvas.width, canvas.height); ctx.restore(); // Used by the mozCurrentTransform polyfill in src/display/canvas.js. ctx._transformMatrix = [PRINT_OUTPUT_SCALE, 0, 0, PRINT_OUTPUT_SCALE, 0, 0]; ctx.scale(PRINT_OUTPUT_SCALE, PRINT_OUTPUT_SCALE); var renderContext = { canvasContext: ctx, viewport: viewport, intent: 'print' }; pdfPage.render(renderContext).promise.then(function() { // Tell the printEngine that rendering this canvas/page has finished. obj.done(); }, function(error) { console.error(error); // Tell the printEngine that rendering this canvas/page has failed. // This will make the print proces stop. if ('abort' in obj) { obj.abort(); } else { obj.done(); } }); }; }, }; return PDFPageView; })(); /** * @typedef {Object} TextLayerBuilderOptions * @property {HTMLDivElement} textLayerDiv - The text layer container. * @property {number} pageIndex - The page index. * @property {PageViewport} viewport - The viewport of the text layer. * @property {PDFFindController} findController */ /** * TextLayerBuilder provides text-selection functionality for the PDF. * It does this by creating overlay divs over the PDF text. These divs * contain text that matches the PDF text they are overlaying. This object * also provides a way to highlight text that is being searched for. * @class */ var TextLayerBuilder = (function TextLayerBuilderClosure() { function TextLayerBuilder(options) { this.textLayerDiv = options.textLayerDiv; this.renderingDone = false; this.divContentDone = false; this.pageIdx = options.pageIndex; this.pageNumber = this.pageIdx + 1; this.matches = []; this.viewport = options.viewport; this.textDivs = []; this.findController = options.findController || null; this.textLayerRenderTask = null; this._bindMouse(); } TextLayerBuilder.prototype = { _finishRendering: function TextLayerBuilder_finishRendering() { this.renderingDone = true; var endOfContent = document.createElement('div'); endOfContent.className = 'endOfContent'; this.textLayerDiv.appendChild(endOfContent); var event = document.createEvent('CustomEvent'); event.initCustomEvent('textlayerrendered', true, true, { pageNumber: this.pageNumber }); this.textLayerDiv.dispatchEvent(event); }, /** * Renders the text layer. * @param {number} timeout (optional) if specified, the rendering waits * for specified amount of ms. */ render: function TextLayerBuilder_render(timeout) { if (!this.divContentDone || this.renderingDone) { return; } if (this.textLayerRenderTask) { this.textLayerRenderTask.cancel(); this.textLayerRenderTask = null; } this.textDivs = []; var textLayerFrag = document.createDocumentFragment(); this.textLayerRenderTask = PDFJS.renderTextLayer({ textContent: this.textContent, container: textLayerFrag, viewport: this.viewport, textDivs: this.textDivs, timeout: timeout }); this.textLayerRenderTask.promise.then(function () { this.textLayerDiv.appendChild(textLayerFrag); this._finishRendering(); this.updateMatches(); }.bind(this), function (reason) { // canceled or failed to render text layer -- skipping errors }); }, setTextContent: function TextLayerBuilder_setTextContent(textContent) { if (this.textLayerRenderTask) { this.textLayerRenderTask.cancel(); this.textLayerRenderTask = null; } this.textContent = textContent; this.divContentDone = true; }, convertMatches: function TextLayerBuilder_convertMatches(matches) { var i = 0; var iIndex = 0; var bidiTexts = this.textContent.items; var end = bidiTexts.length - 1; var queryLen = (this.findController === null ? 0 : this.findController.state.query.length); var ret = []; for (var m = 0, len = matches.length; m < len; m++) { // Calculate the start position. var matchIdx = matches[m]; // Loop over the divIdxs. while (i !== end && matchIdx >= (iIndex + bidiTexts[i].str.length)) { iIndex += bidiTexts[i].str.length; i++; } if (i === bidiTexts.length) { console.error('Could not find a matching mapping'); } var match = { begin: { divIdx: i, offset: matchIdx - iIndex } }; // Calculate the end position. matchIdx += queryLen; // Somewhat the same array as above, but use > instead of >= to get // the end position right. while (i !== end && matchIdx > (iIndex + bidiTexts[i].str.length)) { iIndex += bidiTexts[i].str.length; i++; } match.end = { divIdx: i, offset: matchIdx - iIndex }; ret.push(match); } return ret; }, renderMatches: function TextLayerBuilder_renderMatches(matches) { // Early exit if there is nothing to render. if (matches.length === 0) { return; } var bidiTexts = this.textContent.items; var textDivs = this.textDivs; var prevEnd = null; var pageIdx = this.pageIdx; var isSelectedPage = (this.findController === null ? false : (pageIdx === this.findController.selected.pageIdx)); var selectedMatchIdx = (this.findController === null ? -1 : this.findController.selected.matchIdx); var highlightAll = (this.findController === null ? false : this.findController.state.highlightAll); var infinity = { divIdx: -1, offset: undefined }; function beginText(begin, className) { var divIdx = begin.divIdx; textDivs[divIdx].textContent = ''; appendTextToDiv(divIdx, 0, begin.offset, className); } function appendTextToDiv(divIdx, fromOffset, toOffset, className) { var div = textDivs[divIdx]; var content = bidiTexts[divIdx].str.substring(fromOffset, toOffset); var node = document.createTextNode(content); if (className) { var span = document.createElement('span'); span.className = className; span.appendChild(node); div.appendChild(span); return; } div.appendChild(node); } var i0 = selectedMatchIdx, i1 = i0 + 1; if (highlightAll) { i0 = 0; i1 = matches.length; } else if (!isSelectedPage) { // Not highlighting all and this isn't the selected page, so do nothing. return; } for (var i = i0; i < i1; i++) { var match = matches[i]; var begin = match.begin; var end = match.end; var isSelected = (isSelectedPage && i === selectedMatchIdx); var highlightSuffix = (isSelected ? ' selected' : ''); if (this.findController) { this.findController.updateMatchPosition(pageIdx, i, textDivs, begin.divIdx, end.divIdx); } // Match inside new div. if (!prevEnd || begin.divIdx !== prevEnd.divIdx) { // If there was a previous div, then add the text at the end. if (prevEnd !== null) { appendTextToDiv(prevEnd.divIdx, prevEnd.offset, infinity.offset); } // Clear the divs and set the content until the starting point. beginText(begin); } else { appendTextToDiv(prevEnd.divIdx, prevEnd.offset, begin.offset); } if (begin.divIdx === end.divIdx) { appendTextToDiv(begin.divIdx, begin.offset, end.offset, 'highlight' + highlightSuffix); } else { appendTextToDiv(begin.divIdx, begin.offset, infinity.offset, 'highlight begin' + highlightSuffix); for (var n0 = begin.divIdx + 1, n1 = end.divIdx; n0 < n1; n0++) { textDivs[n0].className = 'highlight middle' + highlightSuffix; } beginText(end, 'highlight end' + highlightSuffix); } prevEnd = end; } if (prevEnd) { appendTextToDiv(prevEnd.divIdx, prevEnd.offset, infinity.offset); } }, updateMatches: function TextLayerBuilder_updateMatches() { // Only show matches when all rendering is done. if (!this.renderingDone) { return; } // Clear all matches. var matches = this.matches; var textDivs = this.textDivs; var bidiTexts = this.textContent.items; var clearedUntilDivIdx = -1; // Clear all current matches. for (var i = 0, len = matches.length; i < len; i++) { var match = matches[i]; var begin = Math.max(clearedUntilDivIdx, match.begin.divIdx); for (var n = begin, end = match.end.divIdx; n <= end; n++) { var div = textDivs[n]; div.textContent = bidiTexts[n].str; div.className = ''; } clearedUntilDivIdx = match.end.divIdx + 1; } if (this.findController === null || !this.findController.active) { return; } // Convert the matches on the page controller into the match format // used for the textLayer. this.matches = this.convertMatches(this.findController === null ? [] : (this.findController.pageMatches[this.pageIdx] || [])); this.renderMatches(this.matches); }, /** * Fixes text selection: adds additional div where mouse was clicked. * This reduces flickering of the content if mouse slowly dragged down/up. * @private */ _bindMouse: function TextLayerBuilder_bindMouse() { var div = this.textLayerDiv; div.addEventListener('mousedown', function (e) { var end = div.querySelector('.endOfContent'); if (!end) { return; } // On non-Firefox browsers, the selection will feel better if the height // of the endOfContent div will be adjusted to start at mouse click // location -- this will avoid flickering when selections moves up. // However it does not work when selection started on empty space. var adjustTop = e.target !== div; if (adjustTop) { var divBounds = div.getBoundingClientRect(); var r = Math.max(0, (e.pageY - divBounds.top) / divBounds.height); end.style.top = (r * 100).toFixed(2) + '%'; } end.classList.add('active'); }); div.addEventListener('mouseup', function (e) { var end = div.querySelector('.endOfContent'); if (!end) { return; } end.style.top = ''; end.classList.remove('active'); }); }, }; return TextLayerBuilder; })(); /** * @constructor * @implements IPDFTextLayerFactory */ function DefaultTextLayerFactory() {} DefaultTextLayerFactory.prototype = { /** * @param {HTMLDivElement} textLayerDiv * @param {number} pageIndex * @param {PageViewport} viewport * @returns {TextLayerBuilder} */ createTextLayerBuilder: function (textLayerDiv, pageIndex, viewport) { return new TextLayerBuilder({ textLayerDiv: textLayerDiv, pageIndex: pageIndex, viewport: viewport }); } }; /** * @typedef {Object} AnnotationLayerBuilderOptions * @property {HTMLDivElement} pageDiv * @property {PDFPage} pdfPage * @property {IPDFLinkService} linkService * @property {DownloadManager} downloadManager */ /** * @class */ var AnnotationLayerBuilder = (function AnnotationLayerBuilderClosure() { /** * @param {AnnotationLayerBuilderOptions} options * @constructs AnnotationLayerBuilder */ function AnnotationLayerBuilder(options) { this.pageDiv = options.pageDiv; this.pdfPage = options.pdfPage; this.linkService = options.linkService; this.downloadManager = options.downloadManager; this.div = null; } AnnotationLayerBuilder.prototype = /** @lends AnnotationLayerBuilder.prototype */ { /** * @param {PageViewport} viewport * @param {string} intent (default value is 'display') */ render: function AnnotationLayerBuilder_render(viewport, intent) { var self = this; var parameters = { intent: (intent === undefined ? 'display' : intent), }; this.pdfPage.getAnnotations(parameters).then(function (annotations) { viewport = viewport.clone({ dontFlip: true }); parameters = { viewport: viewport, div: self.div, annotations: annotations, page: self.pdfPage, linkService: self.linkService, downloadManager: self.downloadManager }; if (self.div) { // If an annotationLayer already exists, refresh its children's // transformation matrices. PDFJS.AnnotationLayer.update(parameters); } else { // Create an annotation layer div and render the annotations // if there is at least one annotation. if (annotations.length === 0) { return; } self.div = document.createElement('div'); self.div.className = 'annotationLayer'; self.pageDiv.appendChild(self.div); parameters.div = self.div; PDFJS.AnnotationLayer.render(parameters); if (typeof mozL10n !== 'undefined') { mozL10n.translate(self.div); } } }); }, hide: function AnnotationLayerBuilder_hide() { if (!this.div) { return; } this.div.setAttribute('hidden', 'true'); } }; return AnnotationLayerBuilder; })(); /** * @constructor * @implements IPDFAnnotationLayerFactory */ function DefaultAnnotationLayerFactory() {} DefaultAnnotationLayerFactory.prototype = { /** * @param {HTMLDivElement} pageDiv * @param {PDFPage} pdfPage * @returns {AnnotationLayerBuilder} */ createAnnotationLayerBuilder: function (pageDiv, pdfPage) { return new AnnotationLayerBuilder({ pageDiv: pageDiv, pdfPage: pdfPage, linkService: new SimpleLinkService(), }); } }; /** * @typedef {Object} PDFViewerOptions * @property {HTMLDivElement} container - The container for the viewer element. * @property {HTMLDivElement} viewer - (optional) The viewer element. * @property {IPDFLinkService} linkService - The navigation/linking service. * @property {DownloadManager} downloadManager - (optional) The download * manager component. * @property {PDFRenderingQueue} renderingQueue - (optional) The rendering * queue object. * @property {boolean} removePageBorders - (optional) Removes the border shadow * around the pages. The default is false. */ /** * Simple viewer control to display PDF content/pages. * @class * @implements {IRenderableView} */ var PDFViewer = (function pdfViewer() { function PDFPageViewBuffer(size) { var data = []; this.push = function cachePush(view) { var i = data.indexOf(view); if (i >= 0) { data.splice(i, 1); } data.push(view); if (data.length > size) { data.shift().destroy(); } }; this.resize = function (newSize) { size = newSize; while (data.length > size) { data.shift().destroy(); } }; } function isSameScale(oldScale, newScale) { if (newScale === oldScale) { return true; } if (Math.abs(newScale - oldScale) < 1e-15) { // Prevent unnecessary re-rendering of all pages when the scale // changes only because of limited numerical precision. return true; } return false; } /** * @constructs PDFViewer * @param {PDFViewerOptions} options */ function PDFViewer(options) { this.container = options.container; this.viewer = options.viewer || options.container.firstElementChild; this.linkService = options.linkService || new SimpleLinkService(); this.downloadManager = options.downloadManager || null; this.removePageBorders = options.removePageBorders || false; this.defaultRenderingQueue = !options.renderingQueue; if (this.defaultRenderingQueue) { // Custom rendering queue is not specified, using default one this.renderingQueue = new PDFRenderingQueue(); this.renderingQueue.setViewer(this); } else { this.renderingQueue = options.renderingQueue; } this.scroll = watchScroll(this.container, this._scrollUpdate.bind(this)); this.updateInProgress = false; this.presentationModeState = PresentationModeState.UNKNOWN; this._resetView(); if (this.removePageBorders) { this.viewer.classList.add('removePageBorders'); } } PDFViewer.prototype = /** @lends PDFViewer.prototype */{ get pagesCount() { return this._pages.length; }, getPageView: function (index) { return this._pages[index]; }, get currentPageNumber() { return this._currentPageNumber; }, set currentPageNumber(val) { if (!this.pdfDocument) { this._currentPageNumber = val; return; } var event = document.createEvent('UIEvents'); event.initUIEvent('pagechange', true, true, window, 0); event.updateInProgress = this.updateInProgress; if (!(0 < val && val <= this.pagesCount)) { event.pageNumber = this._currentPageNumber; event.previousPageNumber = val; this.container.dispatchEvent(event); return; } event.previousPageNumber = this._currentPageNumber; this._currentPageNumber = val; event.pageNumber = val; this.container.dispatchEvent(event); // Check if the caller is `PDFViewer_update`, to avoid breaking scrolling. if (this.updateInProgress) { return; } this.scrollPageIntoView(val); }, /** * @returns {number} */ get currentScale() { return this._currentScale !== UNKNOWN_SCALE ? this._currentScale : DEFAULT_SCALE; }, /** * @param {number} val - Scale of the pages in percents. */ set currentScale(val) { if (isNaN(val)) { throw new Error('Invalid numeric scale'); } if (!this.pdfDocument) { this._currentScale = val; this._currentScaleValue = val !== UNKNOWN_SCALE ? val.toString() : null; return; } this._setScale(val, false); }, /** * @returns {string} */ get currentScaleValue() { return this._currentScaleValue; }, /** * @param val - The scale of the pages (in percent or predefined value). */ set currentScaleValue(val) { if (!this.pdfDocument) { this._currentScale = isNaN(val) ? UNKNOWN_SCALE : val; this._currentScaleValue = val; return; } this._setScale(val, false); }, /** * @returns {number} */ get pagesRotation() { return this._pagesRotation; }, /** * @param {number} rotation - The rotation of the pages (0, 90, 180, 270). */ set pagesRotation(rotation) { this._pagesRotation = rotation; for (var i = 0, l = this._pages.length; i < l; i++) { var pageView = this._pages[i]; pageView.update(pageView.scale, rotation); } this._setScale(this._currentScaleValue, true); if (this.defaultRenderingQueue) { this.update(); } }, /** * @param pdfDocument {PDFDocument} */ setDocument: function (pdfDocument) { if (this.pdfDocument) { this._resetView(); } this.pdfDocument = pdfDocument; if (!pdfDocument) { return; } var pagesCount = pdfDocument.numPages; var self = this; var resolvePagesPromise; var pagesPromise = new Promise(function (resolve) { resolvePagesPromise = resolve; }); this.pagesPromise = pagesPromise; pagesPromise.then(function () { var event = document.createEvent('CustomEvent'); event.initCustomEvent('pagesloaded', true, true, { pagesCount: pagesCount }); self.container.dispatchEvent(event); }); var isOnePageRenderedResolved = false; var resolveOnePageRendered = null; var onePageRendered = new Promise(function (resolve) { resolveOnePageRendered = resolve; }); this.onePageRendered = onePageRendered; var bindOnAfterAndBeforeDraw = function (pageView) { pageView.onBeforeDraw = function pdfViewLoadOnBeforeDraw() { // Add the page to the buffer at the start of drawing. That way it can // be evicted from the buffer and destroyed even if we pause its // rendering. self._buffer.push(this); }; // when page is painted, using the image as thumbnail base pageView.onAfterDraw = function pdfViewLoadOnAfterDraw() { if (!isOnePageRenderedResolved) { isOnePageRenderedResolved = true; resolveOnePageRendered(); } }; }; var firstPagePromise = pdfDocument.getPage(1); this.firstPagePromise = firstPagePromise; // Fetch a single page so we can get a viewport that will be the default // viewport for all pages return firstPagePromise.then(function(pdfPage) { var scale = this.currentScale; var viewport = pdfPage.getViewport(scale * CSS_UNITS); for (var pageNum = 1; pageNum <= pagesCount; ++pageNum) { var textLayerFactory = null; if (!PDFJS.disableTextLayer) { textLayerFactory = this; } var pageView = new PDFPageView({ container: this.viewer, id: pageNum, scale: scale, defaultViewport: viewport.clone(), renderingQueue: this.renderingQueue, textLayerFactory: textLayerFactory, annotationLayerFactory: this }); bindOnAfterAndBeforeDraw(pageView); this._pages.push(pageView); } var linkService = this.linkService; // Fetch all the pages since the viewport is needed before printing // starts to create the correct size canvas. Wait until one page is // rendered so we don't tie up too many resources early on. onePageRendered.then(function () { if (!PDFJS.disableAutoFetch) { var getPagesLeft = pagesCount; for (var pageNum = 1; pageNum <= pagesCount; ++pageNum) { pdfDocument.getPage(pageNum).then(function (pageNum, pdfPage) { var pageView = self._pages[pageNum - 1]; if (!pageView.pdfPage) { pageView.setPdfPage(pdfPage); } linkService.cachePageRef(pageNum, pdfPage.ref); getPagesLeft--; if (!getPagesLeft) { resolvePagesPromise(); } }.bind(null, pageNum)); } } else { // XXX: Printing is semi-broken with auto fetch disabled. resolvePagesPromise(); } }); var event = document.createEvent('CustomEvent'); event.initCustomEvent('pagesinit', true, true, null); self.container.dispatchEvent(event); if (this.defaultRenderingQueue) { this.update(); } if (this.findController) { this.findController.resolveFirstPage(); } }.bind(this)); }, _resetView: function () { this._pages = []; this._currentPageNumber = 1; this._currentScale = UNKNOWN_SCALE; this._currentScaleValue = null; this._buffer = new PDFPageViewBuffer(DEFAULT_CACHE_SIZE); this._location = null; this._pagesRotation = 0; this._pagesRequests = []; var container = this.viewer; while (container.hasChildNodes()) { container.removeChild(container.lastChild); } }, _scrollUpdate: function PDFViewer_scrollUpdate() { if (this.pagesCount === 0) { return; } this.update(); for (var i = 0, ii = this._pages.length; i < ii; i++) { this._pages[i].updatePosition(); } }, _setScaleDispatchEvent: function pdfViewer_setScaleDispatchEvent( newScale, newValue, preset) { var event = document.createEvent('UIEvents'); event.initUIEvent('scalechange', true, true, window, 0); event.scale = newScale; if (preset) { event.presetValue = newValue; } this.container.dispatchEvent(event); }, _setScaleUpdatePages: function pdfViewer_setScaleUpdatePages( newScale, newValue, noScroll, preset) { this._currentScaleValue = newValue; if (isSameScale(this._currentScale, newScale)) { if (preset) { this._setScaleDispatchEvent(newScale, newValue, true); } return; } for (var i = 0, ii = this._pages.length; i < ii; i++) { this._pages[i].update(newScale); } this._currentScale = newScale; if (!noScroll) { var page = this._currentPageNumber, dest; if (this._location && !IGNORE_CURRENT_POSITION_ON_ZOOM && !(this.isInPresentationMode || this.isChangingPresentationMode)) { page = this._location.pageNumber; dest = [null, { name: 'XYZ' }, this._location.left, this._location.top, null]; } this.scrollPageIntoView(page, dest); } this._setScaleDispatchEvent(newScale, newValue, preset); if (this.defaultRenderingQueue) { this.update(); } }, _setScale: function pdfViewer_setScale(value, noScroll) { var scale = parseFloat(value); if (scale > 0) { this._setScaleUpdatePages(scale, value, noScroll, false); } else { var currentPage = this._pages[this._currentPageNumber - 1]; if (!currentPage) { return; } var hPadding = (this.isInPresentationMode || this.removePageBorders) ? 0 : SCROLLBAR_PADDING; var vPadding = (this.isInPresentationMode || this.removePageBorders) ? 0 : VERTICAL_PADDING; var pageWidthScale = (this.container.clientWidth - hPadding) / currentPage.width * currentPage.scale; var pageHeightScale = (this.container.clientHeight - vPadding) / currentPage.height * currentPage.scale; switch (value) { case 'page-actual': scale = 1; break; case 'page-width': scale = pageWidthScale; break; case 'page-height': scale = pageHeightScale; break; case 'page-fit': scale = Math.min(pageWidthScale, pageHeightScale); break; case 'auto': var isLandscape = (currentPage.width > currentPage.height); // For pages in landscape mode, fit the page height to the viewer // *unless* the page would thus become too wide to fit horizontally. var horizontalScale = isLandscape ? Math.min(pageHeightScale, pageWidthScale) : pageWidthScale; scale = Math.min(MAX_AUTO_SCALE, horizontalScale); break; default: console.error('pdfViewSetScale: \'' + value + '\' is an unknown zoom value.'); return; } this._setScaleUpdatePages(scale, value, noScroll, true); } }, /** * Scrolls page into view. * @param {number} pageNumber * @param {Array} dest - (optional) original PDF destination array: * <page-ref> </XYZ|FitXXX> <args..> */ scrollPageIntoView: function PDFViewer_scrollPageIntoView(pageNumber, dest) { if (!this.pdfDocument) { return; } var pageView = this._pages[pageNumber - 1]; if (this.isInPresentationMode) { if (this._currentPageNumber !== pageView.id) { // Avoid breaking getVisiblePages in presentation mode. this.currentPageNumber = pageView.id; return; } dest = null; // Fixes the case when PDF has different page sizes. this._setScale(this._currentScaleValue, true); } if (!dest) { scrollIntoView(pageView.div); return; } var x = 0, y = 0; var width = 0, height = 0, widthScale, heightScale; var changeOrientation = (pageView.rotation % 180 === 0 ? false : true); var pageWidth = (changeOrientation ? pageView.height : pageView.width) / pageView.scale / CSS_UNITS; var pageHeight = (changeOrientation ? pageView.width : pageView.height) / pageView.scale / CSS_UNITS; var scale = 0; switch (dest[1].name) { case 'XYZ': x = dest[2]; y = dest[3]; scale = dest[4]; // If x and/or y coordinates are not supplied, default to // _top_ left of the page (not the obvious bottom left, // since aligning the bottom of the intended page with the // top of the window is rarely helpful). x = x !== null ? x : 0; y = y !== null ? y : pageHeight; break; case 'Fit': case 'FitB': scale = 'page-fit'; break; case 'FitH': case 'FitBH': y = dest[2]; scale = 'page-width'; // According to the PDF spec, section 12.3.2.2, a `null` value in the // parameter should maintain the position relative to the new page. if (y === null && this._location) { x = this._location.left; y = this._location.top; } break; case 'FitV': case 'FitBV': x = dest[2]; width = pageWidth; height = pageHeight; scale = 'page-height'; break; case 'FitR': x = dest[2]; y = dest[3]; width = dest[4] - x; height = dest[5] - y; var hPadding = this.removePageBorders ? 0 : SCROLLBAR_PADDING; var vPadding = this.removePageBorders ? 0 : VERTICAL_PADDING; widthScale = (this.container.clientWidth - hPadding) / width / CSS_UNITS; heightScale = (this.container.clientHeight - vPadding) / height / CSS_UNITS; scale = Math.min(Math.abs(widthScale), Math.abs(heightScale)); break; default: return; } if (scale && scale !== this._currentScale) { this.currentScaleValue = scale; } else if (this._currentScale === UNKNOWN_SCALE) { this.currentScaleValue = DEFAULT_SCALE_VALUE; } if (scale === 'page-fit' && !dest[4]) { scrollIntoView(pageView.div); return; } var boundingRect = [ pageView.viewport.convertToViewportPoint(x, y), pageView.viewport.convertToViewportPoint(x + width, y + height) ]; var left = Math.min(boundingRect[0][0], boundingRect[1][0]); var top = Math.min(boundingRect[0][1], boundingRect[1][1]); scrollIntoView(pageView.div, { left: left, top: top }); }, _updateLocation: function (firstPage) { var currentScale = this._currentScale; var currentScaleValue = this._currentScaleValue; var normalizedScaleValue = parseFloat(currentScaleValue) === currentScale ? Math.round(currentScale * 10000) / 100 : currentScaleValue; var pageNumber = firstPage.id; var pdfOpenParams = '#page=' + pageNumber; pdfOpenParams += '&zoom=' + normalizedScaleValue; var currentPageView = this._pages[pageNumber - 1]; var container = this.container; var topLeft = currentPageView.getPagePoint( (container.scrollLeft - firstPage.x), (container.scrollTop - firstPage.y)); var intLeft = Math.round(topLeft[0]); var intTop = Math.round(topLeft[1]); pdfOpenParams += ',' + intLeft + ',' + intTop; this._location = { pageNumber: pageNumber, scale: normalizedScaleValue, top: intTop, left: intLeft, pdfOpenParams: pdfOpenParams }; }, update: function PDFViewer_update() { var visible = this._getVisiblePages(); var visiblePages = visible.views; if (visiblePages.length === 0) { return; } this.updateInProgress = true; var suggestedCacheSize = Math.max(DEFAULT_CACHE_SIZE, 2 * visiblePages.length + 1); this._buffer.resize(suggestedCacheSize); this.renderingQueue.renderHighestPriority(visible); var currentId = this._currentPageNumber; var firstPage = visible.first; for (var i = 0, ii = visiblePages.length, stillFullyVisible = false; i < ii; ++i) { var page = visiblePages[i]; if (page.percent < 100) { break; } if (page.id === currentId) { stillFullyVisible = true; break; } } if (!stillFullyVisible) { currentId = visiblePages[0].id; } if (!this.isInPresentationMode) { this.currentPageNumber = currentId; } this._updateLocation(firstPage); this.updateInProgress = false; var event = document.createEvent('UIEvents'); event.initUIEvent('updateviewarea', true, true, window, 0); event.location = this._location; this.container.dispatchEvent(event); }, containsElement: function (element) { return this.container.contains(element); }, focus: function () { this.container.focus(); }, get isInPresentationMode() { return this.presentationModeState === PresentationModeState.FULLSCREEN; }, get isChangingPresentationMode() { return this.presentationModeState === PresentationModeState.CHANGING; }, get isHorizontalScrollbarEnabled() { return (this.isInPresentationMode ? false : (this.container.scrollWidth > this.container.clientWidth)); }, _getVisiblePages: function () { if (!this.isInPresentationMode) { return getVisibleElements(this.container, this._pages, true); } else { // The algorithm in getVisibleElements doesn't work in all browsers and // configurations when presentation mode is active. var visible = []; var currentPage = this._pages[this._currentPageNumber - 1]; visible.push({ id: currentPage.id, view: currentPage }); return { first: currentPage, last: currentPage, views: visible }; } }, cleanup: function () { for (var i = 0, ii = this._pages.length; i < ii; i++) { if (this._pages[i] && this._pages[i].renderingState !== RenderingStates.FINISHED) { this._pages[i].reset(); } } }, /** * @param {PDFPageView} pageView * @returns {PDFPage} * @private */ _ensurePdfPageLoaded: function (pageView) { if (pageView.pdfPage) { return Promise.resolve(pageView.pdfPage); } var pageNumber = pageView.id; if (this._pagesRequests[pageNumber]) { return this._pagesRequests[pageNumber]; } var promise = this.pdfDocument.getPage(pageNumber).then( function (pdfPage) { pageView.setPdfPage(pdfPage); this._pagesRequests[pageNumber] = null; return pdfPage; }.bind(this)); this._pagesRequests[pageNumber] = promise; return promise; }, forceRendering: function (currentlyVisiblePages) { var visiblePages = currentlyVisiblePages || this._getVisiblePages(); var pageView = this.renderingQueue.getHighestPriority(visiblePages, this._pages, this.scroll.down); if (pageView) { this._ensurePdfPageLoaded(pageView).then(function () { this.renderingQueue.renderView(pageView); }.bind(this)); return true; } return false; }, getPageTextContent: function (pageIndex) { return this.pdfDocument.getPage(pageIndex + 1).then(function (page) { return page.getTextContent({ normalizeWhitespace: true }); }); }, /** * @param {HTMLDivElement} textLayerDiv * @param {number} pageIndex * @param {PageViewport} viewport * @returns {TextLayerBuilder} */ createTextLayerBuilder: function (textLayerDiv, pageIndex, viewport) { return new TextLayerBuilder({ textLayerDiv: textLayerDiv, pageIndex: pageIndex, viewport: viewport, findController: this.isInPresentationMode ? null : this.findController }); }, /** * @param {HTMLDivElement} pageDiv * @param {PDFPage} pdfPage * @returns {AnnotationLayerBuilder} */ createAnnotationLayerBuilder: function (pageDiv, pdfPage) { return new AnnotationLayerBuilder({ pageDiv: pageDiv, pdfPage: pdfPage, linkService: this.linkService, downloadManager: this.downloadManager }); }, setFindController: function (findController) { this.findController = findController; }, }; return PDFViewer; })(); var SimpleLinkService = (function SimpleLinkServiceClosure() { function SimpleLinkService() {} SimpleLinkService.prototype = { /** * @returns {number} */ get page() { return 0; }, /** * @param {number} value */ set page(value) {}, /** * @param dest - The PDF destination object. */ navigateTo: function (dest) {}, /** * @param dest - The PDF destination object. * @returns {string} The hyperlink to the PDF object. */ getDestinationHash: function (dest) { return '#'; }, /** * @param hash - The PDF parameters/hash. * @returns {string} The hyperlink to the PDF object. */ getAnchorUrl: function (hash) { return '#'; }, /** * @param {string} hash */ setHash: function (hash) {}, /** * @param {string} action */ executeNamedAction: function (action) {}, /** * @param {number} pageNum - page number. * @param {Object} pageRef - reference to the page. */ cachePageRef: function (pageNum, pageRef) {} }; return SimpleLinkService; })(); var PDFHistory = (function () { function PDFHistory(options) { this.linkService = options.linkService; this.initialized = false; this.initialDestination = null; this.initialBookmark = null; } PDFHistory.prototype = { /** * @param {string} fingerprint * @param {IPDFLinkService} linkService */ initialize: function pdfHistoryInitialize(fingerprint) { this.initialized = true; this.reInitialized = false; this.allowHashChange = true; this.historyUnlocked = true; this.isViewerInPresentationMode = false; this.previousHash = window.location.hash.substring(1); this.currentBookmark = ''; this.currentPage = 0; this.updatePreviousBookmark = false; this.previousBookmark = ''; this.previousPage = 0; this.nextHashParam = ''; this.fingerprint = fingerprint; this.currentUid = this.uid = 0; this.current = {}; var state = window.history.state; if (this._isStateObjectDefined(state)) { // This corresponds to navigating back to the document // from another page in the browser history. if (state.target.dest) { this.initialDestination = state.target.dest; } else { this.initialBookmark = state.target.hash; } this.currentUid = state.uid; this.uid = state.uid + 1; this.current = state.target; } else { // This corresponds to the loading of a new document. if (state && state.fingerprint && this.fingerprint !== state.fingerprint) { // Reinitialize the browsing history when a new document // is opened in the web viewer. this.reInitialized = true; } this._pushOrReplaceState({fingerprint: this.fingerprint}, true); } var self = this; window.addEventListener('popstate', function pdfHistoryPopstate(evt) { if (!self.historyUnlocked) { return; } if (evt.state) { // Move back/forward in the history. self._goTo(evt.state); return; } // If the state is not set, then the user tried to navigate to a // different hash by manually editing the URL and pressing Enter, or by // clicking on an in-page link (e.g. the "current view" link). // Save the current view state to the browser history. // Note: In Firefox, history.null could also be null after an in-page // navigation to the same URL, and without dispatching the popstate // event: https://bugzilla.mozilla.org/show_bug.cgi?id=1183881 if (self.uid === 0) { // Replace the previous state if it was not explicitly set. var previousParams = (self.previousHash && self.currentBookmark && self.previousHash !== self.currentBookmark) ? {hash: self.currentBookmark, page: self.currentPage} : {page: 1}; replacePreviousHistoryState(previousParams, function() { updateHistoryWithCurrentHash(); }); } else { updateHistoryWithCurrentHash(); } }, false); function updateHistoryWithCurrentHash() { self.previousHash = window.location.hash.slice(1); self._pushToHistory({hash: self.previousHash}, false, true); self._updatePreviousBookmark(); } function replacePreviousHistoryState(params, callback) { // To modify the previous history entry, the following happens: // 1. history.back() // 2. _pushToHistory, which calls history.replaceState( ... ) // 3. history.forward() // Because a navigation via the history API does not immediately update // the history state, the popstate event is used for synchronization. self.historyUnlocked = false; // Suppress the hashchange event to avoid side effects caused by // navigating back and forward. self.allowHashChange = false; window.addEventListener('popstate', rewriteHistoryAfterBack); history.back(); function rewriteHistoryAfterBack() { window.removeEventListener('popstate', rewriteHistoryAfterBack); window.addEventListener('popstate', rewriteHistoryAfterForward); self._pushToHistory(params, false, true); history.forward(); } function rewriteHistoryAfterForward() { window.removeEventListener('popstate', rewriteHistoryAfterForward); self.allowHashChange = true; self.historyUnlocked = true; callback(); } } function pdfHistoryBeforeUnload() { var previousParams = self._getPreviousParams(null, true); if (previousParams) { var replacePrevious = (!self.current.dest && self.current.hash !== self.previousHash); self._pushToHistory(previousParams, false, replacePrevious); self._updatePreviousBookmark(); } // Remove the event listener when navigating away from the document, // since 'beforeunload' prevents Firefox from caching the document. window.removeEventListener('beforeunload', pdfHistoryBeforeUnload, false); } window.addEventListener('beforeunload', pdfHistoryBeforeUnload, false); window.addEventListener('pageshow', function pdfHistoryPageShow(evt) { // If the entire viewer (including the PDF file) is cached in // the browser, we need to reattach the 'beforeunload' event listener // since the 'DOMContentLoaded' event is not fired on 'pageshow'. window.addEventListener('beforeunload', pdfHistoryBeforeUnload, false); }, false); window.addEventListener('presentationmodechanged', function(e) { self.isViewerInPresentationMode = !!e.detail.active; }); }, clearHistoryState: function pdfHistory_clearHistoryState() { this._pushOrReplaceState(null, true); }, _isStateObjectDefined: function pdfHistory_isStateObjectDefined(state) { return (state && state.uid >= 0 && state.fingerprint && this.fingerprint === state.fingerprint && state.target && state.target.hash) ? true : false; }, _pushOrReplaceState: function pdfHistory_pushOrReplaceState(stateObj, replace) { if (replace) { window.history.replaceState(stateObj, ''); } else { window.history.pushState(stateObj, ''); } }, get isHashChangeUnlocked() { if (!this.initialized) { return true; } return this.allowHashChange; }, _updatePreviousBookmark: function pdfHistory_updatePreviousBookmark() { if (this.updatePreviousBookmark && this.currentBookmark && this.currentPage) { this.previousBookmark = this.currentBookmark; this.previousPage = this.currentPage; this.updatePreviousBookmark = false; } }, updateCurrentBookmark: function pdfHistoryUpdateCurrentBookmark(bookmark, pageNum) { if (this.initialized) { this.currentBookmark = bookmark.substring(1); this.currentPage = pageNum | 0; this._updatePreviousBookmark(); } }, updateNextHashParam: function pdfHistoryUpdateNextHashParam(param) { if (this.initialized) { this.nextHashParam = param; } }, push: function pdfHistoryPush(params, isInitialBookmark) { if (!(this.initialized && this.historyUnlocked)) { return; } if (params.dest && !params.hash) { params.hash = (this.current.hash && this.current.dest && this.current.dest === params.dest) ? this.current.hash : this.linkService.getDestinationHash(params.dest).split('#')[1]; } if (params.page) { params.page |= 0; } if (isInitialBookmark) { var target = window.history.state.target; if (!target) { // Invoked when the user specifies an initial bookmark, // thus setting initialBookmark, when the document is loaded. this._pushToHistory(params, false); this.previousHash = window.location.hash.substring(1); } this.updatePreviousBookmark = this.nextHashParam ? false : true; if (target) { // If the current document is reloaded, // avoid creating duplicate entries in the history. this._updatePreviousBookmark(); } return; } if (this.nextHashParam) { if (this.nextHashParam === params.hash) { this.nextHashParam = null; this.updatePreviousBookmark = true; return; } else { this.nextHashParam = null; } } if (params.hash) { if (this.current.hash) { if (this.current.hash !== params.hash) { this._pushToHistory(params, true); } else { if (!this.current.page && params.page) { this._pushToHistory(params, false, true); } this.updatePreviousBookmark = true; } } else { this._pushToHistory(params, true); } } else if (this.current.page && params.page && this.current.page !== params.page) { this._pushToHistory(params, true); } }, _getPreviousParams: function pdfHistory_getPreviousParams(onlyCheckPage, beforeUnload) { if (!(this.currentBookmark && this.currentPage)) { return null; } else if (this.updatePreviousBookmark) { this.updatePreviousBookmark = false; } if (this.uid > 0 && !(this.previousBookmark && this.previousPage)) { // Prevent the history from getting stuck in the current state, // effectively preventing the user from going back/forward in // the history. // // This happens if the current position in the document didn't change // when the history was previously updated. The reasons for this are // either: // 1. The current zoom value is such that the document does not need to, // or cannot, be scrolled to display the destination. // 2. The previous destination is broken, and doesn't actally point to a // position within the document. // (This is either due to a bad PDF generator, or the user making a // mistake when entering a destination in the hash parameters.) return null; } if ((!this.current.dest && !onlyCheckPage) || beforeUnload) { if (this.previousBookmark === this.currentBookmark) { return null; } } else if (this.current.page || onlyCheckPage) { if (this.previousPage === this.currentPage) { return null; } } else { return null; } var params = {hash: this.currentBookmark, page: this.currentPage}; if (this.isViewerInPresentationMode) { params.hash = null; } return params; }, _stateObj: function pdfHistory_stateObj(params) { return {fingerprint: this.fingerprint, uid: this.uid, target: params}; }, _pushToHistory: function pdfHistory_pushToHistory(params, addPrevious, overwrite) { if (!this.initialized) { return; } if (!params.hash && params.page) { params.hash = ('page=' + params.page); } if (addPrevious && !overwrite) { var previousParams = this._getPreviousParams(); if (previousParams) { var replacePrevious = (!this.current.dest && this.current.hash !== this.previousHash); this._pushToHistory(previousParams, false, replacePrevious); } } this._pushOrReplaceState(this._stateObj(params), (overwrite || this.uid === 0)); this.currentUid = this.uid++; this.current = params; this.updatePreviousBookmark = true; }, _goTo: function pdfHistory_goTo(state) { if (!(this.initialized && this.historyUnlocked && this._isStateObjectDefined(state))) { return; } if (!this.reInitialized && state.uid < this.currentUid) { var previousParams = this._getPreviousParams(true); if (previousParams) { this._pushToHistory(this.current, false); this._pushToHistory(previousParams, false); this.currentUid = state.uid; window.history.back(); return; } } this.historyUnlocked = false; if (state.target.dest) { this.linkService.navigateTo(state.target.dest); } else { this.linkService.setHash(state.target.hash); } this.currentUid = state.uid; if (state.uid > this.uid) { this.uid = state.uid; } this.current = state.target; this.updatePreviousBookmark = true; var currentHash = window.location.hash.substring(1); if (this.previousHash !== currentHash) { this.allowHashChange = false; } this.previousHash = currentHash; this.historyUnlocked = true; }, back: function pdfHistoryBack() { this.go(-1); }, forward: function pdfHistoryForward() { this.go(1); }, go: function pdfHistoryGo(direction) { if (this.initialized && this.historyUnlocked) { var state = window.history.state; if (direction === -1 && state && state.uid > 0) { window.history.back(); } else if (direction === 1 && state && state.uid < (this.uid - 1)) { window.history.forward(); } } } }; return PDFHistory; })(); var DownloadManager = (function DownloadManagerClosure() { function download(blobUrl, filename) { var a = document.createElement('a'); if (a.click) { // Use a.click() if available. Otherwise, Chrome might show // "Unsafe JavaScript attempt to initiate a navigation change // for frame with URL" and not open the PDF at all. // Supported by (not mentioned = untested): // - Firefox 6 - 19 (4- does not support a.click, 5 ignores a.click) // - Chrome 19 - 26 (18- does not support a.click) // - Opera 9 - 12.15 // - Internet Explorer 6 - 10 // - Safari 6 (5.1- does not support a.click) a.href = blobUrl; a.target = '_parent'; // Use a.download if available. This increases the likelihood that // the file is downloaded instead of opened by another PDF plugin. if ('download' in a) { a.download = filename; } // <a> must be in the document for IE and recent Firefox versions. // (otherwise .click() is ignored) (document.body || document.documentElement).appendChild(a); a.click(); a.parentNode.removeChild(a); } else { if (window.top === window && blobUrl.split('#')[0] === window.location.href.split('#')[0]) { // If _parent == self, then opening an identical URL with different // location hash will only cause a navigation, not a download. var padCharacter = blobUrl.indexOf('?') === -1 ? '?' : '&'; blobUrl = blobUrl.replace(/#|$/, padCharacter + '$&'); } window.open(blobUrl, '_parent'); } } function DownloadManager() {} DownloadManager.prototype = { downloadUrl: function DownloadManager_downloadUrl(url, filename) { if (!PDFJS.isValidUrl(url, true)) { return; // restricted/invalid URL } download(url + '#pdfjs.action=download', filename); }, downloadData: function DownloadManager_downloadData(data, filename, contentType) { if (navigator.msSaveBlob) { // IE10 and above return navigator.msSaveBlob(new Blob([data], { type: contentType }), filename); } var blobUrl = PDFJS.createObjectURL(data, contentType); download(blobUrl, filename); }, download: function DownloadManager_download(blob, url, filename) { if (!URL) { // URL.createObjectURL is not supported this.downloadUrl(url, filename); return; } if (navigator.msSaveBlob) { // IE10 / IE11 if (!navigator.msSaveBlob(blob, filename)) { this.downloadUrl(url, filename); } return; } var blobUrl = URL.createObjectURL(blob); download(blobUrl, filename); } }; return DownloadManager; })(); PDFJS.PDFViewer = PDFViewer; PDFJS.PDFPageView = PDFPageView; PDFJS.PDFLinkService = PDFLinkService; PDFJS.TextLayerBuilder = TextLayerBuilder; PDFJS.DefaultTextLayerFactory = DefaultTextLayerFactory; PDFJS.AnnotationLayerBuilder = AnnotationLayerBuilder; PDFJS.DefaultAnnotationLayerFactory = DefaultAnnotationLayerFactory; PDFJS.PDFHistory = PDFHistory; PDFJS.DownloadManager = DownloadManager; PDFJS.ProgressBar = ProgressBar; }).call((typeof window === 'undefined') ? this : window);<|fim▁end|>
var isScalingRestricted = false; if (this.canvas && PDFJS.maxCanvasPixels > 0) {
<|file_name|>longest-common-prefix.py<|end_file_name|><|fim▁begin|># Link: https://leetcode.com/problems/longest-common-prefix/<|fim▁hole|> # @param {string[]} strs # @return {string} def longestCommonPrefix(self, strs): if not len(strs): return '' if len(strs) == 1: return strs[0] ret = [] for i in range(0, len(strs[0])): for j in range(1, len(strs)): if len(strs[j]) == i or strs[j][i] != strs[0][i]: return ''.join(ret) ret.append(strs[0][i]) return ''.join(ret)<|fim▁end|>
class Solution:
<|file_name|>minimal_example.py<|end_file_name|><|fim▁begin|>""" Loads hyperspy as a regular python library, creates a spectrum with random numbers and plots it to a file""" <|fim▁hole|>import matplotlib.pyplot as plt s = hs.signals.Spectrum(np.random.rand(1024)) s.plot() plt.savefig("testSpectrum.png")<|fim▁end|>
import hyperspy.api as hs import numpy as np
<|file_name|>d9.rs<|end_file_name|><|fim▁begin|>extern crate adventofcode; use adventofcode::d2::{Error, Parser, Program}; use std::io; use std::io::BufRead; fn program_for(inp: &str, v: Vec<i64>) -> Vec<i64> { let mut p = Program::new(Parser::parse(&inp)); p.run(&mut v.into_iter()) .collect::<Result<Vec<_>, Error>>() .unwrap() } fn main() -> io::Result<()> { let b = io::BufReader::new(io::stdin()); if let Some(inp) = b.lines().next() {<|fim▁hole|> println!("{:?}", program_for(inp, vec![2])); } Ok(()) }<|fim▁end|>
let inp = &inp?; println!("{:?}", program_for(inp, vec![1]));
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>#![cfg_attr(feature = "serde_macros", feature(custom_derive, plugin))] #![cfg_attr(feature = "serde_macros", plugin(serde_macros))] extern crate serde; extern crate serde_json; <|fim▁hole|>#[cfg(not(feature = "serde_macros"))] include!(concat!(env!("OUT_DIR"), "/lib.rs"));<|fim▁end|>
#[cfg(feature = "serde_macros")] include!("lib.rs.in");
<|file_name|>InternalTypes.ts<|end_file_name|><|fim▁begin|>/* * Copyright 2017-2020 Simon Edwards <[email protected]> * * This source code is licensed under the MIT license which is detailed in the LICENSE.txt file. */ import * as ExtensionApi from "@extraterm/extraterm-extension-api"; import { EtTerminal, LineRangeChange } from "../Terminal"; import { ViewerElement } from "../viewers/ViewerElement"; import { ExtensionMetadata, ExtensionPlatform, Category, ExtensionCommandContribution, ExtensionMenusContribution } from "../../ExtensionMetadata"; import { EtViewerTab } from "../ViewerTab"; import { SupportsDialogStack } from "../SupportsDialogStack"; import { CommandsRegistry } from "./CommandsRegistry"; import { TextEditor } from "../viewers/TextEditorType"; import { CommonExtensionWindowState } from "./CommonExtensionState"; import { TabWidget } from "../gui/TabWidget"; import { SessionConfiguration } from "@extraterm/extraterm-extension-api"; import { ExtensionContainerElement } from "./ExtensionContainerElement"; import { SplitLayout } from "../SplitLayout"; export interface CommandQueryOptions { categories?: Category[]; commandPalette?: boolean; contextMenu?: boolean; emptyPaneMenu?: boolean; newTerminalMenu?: boolean; terminalTitleMenu?: boolean; windowMenu?: boolean; when?: boolean; commands?: string[]; } export interface ExtensionManager { startUp(): void; setSplitLayout(splitLayout: SplitLayout): void; getAllExtensions(): ExtensionMetadata[]; onStateChanged: ExtensionApi.Event<void>; isExtensionRunning(name: string):boolean; enableExtension(name: string): void; disableExtension(name: string): void; extensionUiUtils: ExtensionUiUtils; getExtensionContextByName(name: string): InternalExtensionContext; findViewerElementTagByMimeType(mimeType: string): string; getAllSessionTypes(): { name: string, type: string }[]; getAllTerminalThemeFormats(): { name: string, formatName: string }[]; getAllSyntaxThemeFormats(): { name: string, formatName: string }[]; getActiveTab(): HTMLElement; getActiveTerminal(): EtTerminal; getActiveTabContent(): HTMLElement; getActiveTabWidget(): TabWidget; getActiveTextEditor(): TextEditor; isInputFieldFocus(): boolean; queryCommands(options: CommandQueryOptions): ExtensionCommandContribution[]; queryCommandsWithExtensionWindowState(options: CommandQueryOptions, context: CommonExtensionWindowState): ExtensionCommandContribution[]; executeCommand(command: string, args?: any): any; executeCommandWithExtensionWindowState(tempState: CommonExtensionWindowState, command: string, args?: any): any; updateExtensionWindowStateFromEvent(ev: Event): void; copyExtensionWindowState(): CommonExtensionWindowState; getExtensionWindowStateFromEvent(ev: Event): CommonExtensionWindowState; refocus(state: CommonExtensionWindowState): void; newTerminalCreated(newTerminal: EtTerminal, allTerminals: EtTerminal[]): void; terminalDestroyed(deadTerminal: EtTerminal, allTerminals: EtTerminal[]): void; onCommandsChanged: ExtensionApi.Event<void>; commandRegistrationChanged(): void; createNewTerminalTabTitleWidgets(terminal: EtTerminal); createSessionEditor(sessionType: string, sessionConfiguration: SessionConfiguration): InternalSessionEditor; createSessionSettingsEditors(sessionType: string, sessionConfiguration: SessionConfiguration): InternalSessionSettingsEditor[]; setViewerTabDisplay(viewerTabDisplay: ViewerTabDisplay): void; getViewerTabDisplay(): ViewerTabDisplay; } export interface AcceptsExtensionManager { setExtensionManager(extensionManager: ExtensionManager): void; } export function injectExtensionManager(instance: any, extensionManager: ExtensionManager): void { if (isAcceptsExtensionManager(instance)) { instance.setExtensionManager(extensionManager); } } export function isAcceptsExtensionManager(instance: any): instance is AcceptsExtensionManager { return (<AcceptsExtensionManager> instance).setExtensionManager !== undefined; } /** * Interface for something which can display ViewerElements in tabs. */ export interface ViewerTabDisplay { openViewerTab(viewerElement: ViewerElement): void; closeViewerTab(viewerElement: ViewerElement): void; switchToTab(viewerElement: ViewerElement): void; } export interface ProxyFactory { getTabProxy(tabLike: EtTerminal | EtViewerTab): ExtensionApi.Tab; getTerminalProxy(terminal: EtTerminal): ExtensionApi.Terminal; hasTerminalProxy(terminal: EtTerminal): boolean; getBlock(viewer: ViewerElement): ExtensionApi.Block; } export interface ExtensionUiUtils { showNumberInput(host: SupportsDialogStack & HTMLElement, options: ExtensionApi.NumberInputOptions): Promise<number | undefined>; showListPicker(host: SupportsDialogStack & HTMLElement, options: ExtensionApi.ListPickerOptions): Promise<number | undefined>; showOnCursorListPicker(terminal: EtTerminal, options: ExtensionApi.ListPickerOptions): Promise<number | undefined>; } export interface InternalWindow extends ExtensionApi.Window { findViewerElementTagByMimeType(mimeType: string): string; createSessionEditor(sessionType: string, sessionConfiguration: SessionConfiguration): InternalSessionEditor; createSessionSettingsEditors(sessionType: string, sessionConfiguration: SessionConfiguration): InternalSessionSettingsEditor[]; getTerminalBorderWidgetFactory(name: string): ExtensionApi.TerminalBorderWidgetFactory; newTerminalCreated(newTerminal: EtTerminal, allTerminals: EtTerminal[]): void; terminalDestroyed(deadTerminal: EtTerminal, allTerminals: EtTerminal[]): void; terminalAppendedViewer(newTerminal: EtTerminal, viewer: ViewerElement): void; terminalEnvironmentChanged(terminal: EtTerminal, changeList: string[]): void; terminalDidAppendScrollbackLines(terminal: EtTerminal, ev: LineRangeChange): void; terminalDidScreenChange(terminal: EtTerminal, ev: LineRangeChange): void; } /** * Holds internal accounting needed to support an Extension.<|fim▁hole|> */ export interface InternalExtensionContext extends ExtensionApi.ExtensionContext, ExtensionApi.Disposable { // Note: Most of these fields start with `_` to signal to any extension // developers that these fields are internal. _extensionManager: ExtensionManager; commands: CommandsRegistry; _extensionMetadata: ExtensionMetadata; _internalWindow: InternalWindow; _proxyFactory: ProxyFactory; _findViewerElementTagByMimeType(mimeType: string): string; _registerCommandContribution(contribution: ExtensionCommandContribution): ExtensionApi.Disposable; _setCommandMenu(command: string, menuType: keyof ExtensionMenusContribution, on: boolean); _debugRegisteredCommands(): void; _registerTabTitleWidget(name: string, factory: ExtensionApi.TabTitleWidgetFactory): void; _createTabTitleWidgets(terminal: EtTerminal): HTMLElement[]; } export interface InternalTerminalBorderWidget extends ExtensionApi.TerminalBorderWidget { _handleOpen(): void; _handleClose(): void; } export interface InternalTabTitleWidget extends ExtensionApi.TabTitleWidget { } export interface SessionSettingsChange { settingsConfigKey: string; settings: Object; } export interface InternalSessionSettingsEditor extends ExtensionApi.SessionSettingsEditorBase { name: string; onSettingsChanged: ExtensionApi.Event<SessionSettingsChange>; _getExtensionContainerElement(): ExtensionContainerElement; _init(): void; } export interface SessionConfigurationChange { sessionConfiguration: SessionConfiguration; } export interface InternalSessionEditor extends ExtensionApi.SessionEditorBase { onSessionConfigurationChanged: ExtensionApi.Event<SessionConfigurationChange>; _getExtensionContainerElement(): ExtensionContainerElement; _init(): void; } export function isSupportedOnThisPlatform(metadata: ExtensionMetadata): boolean { let included = metadata.includePlatform.length === 0; for (const platform of metadata.includePlatform) { included = included || _platformMatches(platform); } if ( ! included) { return false; } if (metadata.excludePlatform.length === 0) { return true; } for (const platform of metadata.excludePlatform) { if (_platformMatches(platform)) { return false; } } return true; } function _platformMatches(platform: ExtensionPlatform): boolean { if (platform.os == null && platform.arch == null) { return false; } if (platform.os === process.platform && platform.arch == null) { return true; } if (platform.arch === process.arch && platform.os == null) { return true; } return platform.arch === process.arch && platform.os === process.platform; }<|fim▁end|>
* * It also provides methods for the core application to interact with an * Extension and all the different things it may have registered and * provided.
<|file_name|>test_util.py<|end_file_name|><|fim▁begin|>import unittest from locust.util.timespan import parse_timespan from locust.util.rounding import proper_round class TestParseTimespan(unittest.TestCase): def test_parse_timespan_invalid_values(self): self.assertRaises(ValueError, parse_timespan, None) self.assertRaises(ValueError, parse_timespan, "") self.assertRaises(ValueError, parse_timespan, "q") def test_parse_timespan(self): self.assertEqual(7, parse_timespan("7")) self.assertEqual(7, parse_timespan("7s")) self.assertEqual(60, parse_timespan("1m")) self.assertEqual(7200, parse_timespan("2h")) self.assertEqual(3787, parse_timespan("1h3m7s")) class TestRounding(unittest.TestCase): def test_rounding_down(self): self.assertEqual(1, proper_round(1.499999999)) self.assertEqual(5, proper_round(5.499999999))<|fim▁hole|> def test_rounding_up(self): self.assertEqual(2, proper_round(1.5)) self.assertEqual(3, proper_round(2.5)) self.assertEqual(4, proper_round(3.5)) self.assertEqual(5, proper_round(4.5)) self.assertEqual(6, proper_round(5.5))<|fim▁end|>
self.assertEqual(2, proper_round(2.05)) self.assertEqual(3, proper_round(3.05))
<|file_name|>morestack3.rs<|end_file_name|><|fim▁begin|>// Copyright 2012 The Rust Project Developers. See the COPYRIGHT // file at the top-level directory of this distribution and at // http://rust-lang.org/COPYRIGHT. // // Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or // http://www.apache.org/licenses/LICENSE-2.0> or the MIT license // <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your // option. This file may not be copied, modified, or distributed // except according to those terms. // error-pattern:explicit failure // Just testing unwinding extern mod std; fn getbig_and_fail(&&i: int) { let _r = and_then_get_big_again(5); if i != 0 { getbig_and_fail(i - 1); } else { fail!(); } } struct and_then_get_big_again { x:int, } impl Drop for and_then_get_big_again { fn finalize(&self) { fn getbig(i: int) { if i != 0 { getbig(i - 1); } } getbig(100); } } fn and_then_get_big_again(x:int) -> and_then_get_big_again { and_then_get_big_again { x: x } } fn main() { do task::spawn { getbig_and_fail(400);<|fim▁hole|><|fim▁end|>
}; }
<|file_name|>Outline.py<|end_file_name|><|fim▁begin|># coding=utf8 import sublime from .Base import Base from ...utils import Debug from ...utils.uiutils import get_prefix class Outline(Base): regions = {} ts_view = None def __init__(self, t3sviews): super(Outline, self).__init__('Typescript : Outline View', t3sviews) # SET TEXT def set_text(self, edit_token, members, ts_view): """ This function takes the tss.js members structure instead of a string. """ # this will process the outline, even if the view is closed self.ts_view = ts_view if type(members) == list: self._tssjs_2_outline_format(members) elif type(members) == str: self.text = members super(Outline, self).set_text(edit_token, self.text) def is_current_ts(self, ts_view): if ts_view is None or self.ts_view is None: return return ts_view.id() == self.ts_view.id() def _tssjs_2_outline_format(self, members): text = [] line = 0 self.regions = {} for member in members: start_line = member['min']['line'] end_line = member['lim']['line'] left = member['min']['character'] right = member['lim']['character'] a = self.ts_view.text_point(start_line-1, left-1) b = self.ts_view.text_point(end_line-1, right-1) region = sublime.Region(a, b) kind = get_prefix(member['loc']['kind']) container_kind = get_prefix(member['loc']['containerKind']) if member['loc']['kindModifiers'] != "": member['loc']['kindModifiers'] = " " + member['loc']['kindModifiers'] if member['loc']['kind'] != 'class' and member['loc']['kind'] != 'interface': t = "%s %s %s %s" % (kind, member['loc']['kindModifiers'], member['loc']['kind'], member['loc']['name']) text.append('\n\t') text.append(t.strip()) line += 1 self.regions[line] = region else: t = "%s %s %s %s {" % (container_kind, member['loc']['kindModifiers'], member['loc']['kind'], member['loc']['name']) if len(text) == 0: text.append('\n%s\n' % t.strip()) line += 2 self.regions[line - 1] = region else: text.append('\n\n}\n\n%s\n' % t.strip()) line += 5 self.regions[line - 1] = region if len(members) == 0: text.append("\n\nno members found\n") self.text = ''.join(text) is_focusing_ts_view = False def on_click(self,line): if self.is_focusing_ts_view: Debug('focus', 'Outline.on_click: is just focusing other view > ignore') return if line in self.regions: draw = sublime.DRAW_NO_FILL self.ts_view.add_regions('typescript-definition', [self.regions[line]], 'comment', 'dot', draw) self._focus_member_in_view(self.regions[line]) def _focus_member_in_view(self, region): if self.ts_view.is_loading(): return<|fim▁hole|> self.ts_view.window().focus_view(self.ts_view) self.is_focusing_ts_view = False<|fim▁end|>
else: Debug('focus', "_focus_member_in_view, Region @pos %i" % (region.begin())) self.is_focusing_ts_view = True self.ts_view.show(region)
<|file_name|>filters.js<|end_file_name|><|fim▁begin|>'use strict';<|fim▁hole|>]). // create your own filter here filter('yourFilterName', function () { return function () { return; }; });<|fim▁end|>
/* Filters */ angular.module('multi-screen-demo.filters', [
<|file_name|>rp_extract.py<|end_file_name|><|fim▁begin|>''' RP_extract: Rhythm Patterns Audio Feature Extractor @author: 2014-2015 Alexander Schindler, Thomas Lidy Re-implementation by Alexander Schindler of RP_extract for Matlab Matlab version originally by Thomas Lidy, based on Musik Analysis Toolbox by Elias Pampalk ( see http://ifs.tuwien.ac.at/mir/downloads.html ) Main function is rp_extract. See function definition and description for more information, or example usage in main function. Note: All required functions are provided by the two main scientific libraries numpy and scipy. Note: In case you alter the code to use transform2mel, librosa needs to be installed: pip install librosa ''' import numpy as np from scipy import stats from scipy.fftpack import fft #from scipy.fftpack import rfft # Discrete Fourier transform of a real sequence. from scipy import interpolate # suppress numpy warnings (divide by 0 etc.) np.set_printoptions(suppress=True) # required for debugging np.set_printoptions(precision=8, threshold=10, suppress=True, linewidth=200, edgeitems=10) # INITIALIZATION: Constants & Mappings # Bark Scale bark = [100, 200, 300, 400, 510, 630, 770, 920, 1080, 1270, 1480, 1720, 2000, 2320, 2700, 3150, 3700, 4400, 5300, 6400, 7700, 9500, 12000, 15500] n_bark_bands = len(bark) # copy the bark vector (using [:]) and add a 0 in front (to make calculations below easier) barks = bark[:] barks.insert(0,0) # Phone Scale phon = [3, 20, 40, 60, 80, 100, 101] # copy the bark vector (using [:]) and add a 0 in front (to make calculations below easier) phons = phon[:] phons.insert(0,0) phons = np.asarray(phons) # Loudness Curves eq_loudness = np.array([[55, 40, 32, 24, 19, 14, 10, 6, 4, 3, 2, 2, 0,-2,-5,-4, 0, 5, 10, 14, 25, 35], [66, 52, 43, 37, 32, 27, 23, 21, 20, 20, 20, 20,19,16,13,13,18, 22, 25, 30, 40, 50], [76, 64, 57, 51, 47, 43, 41, 41, 40, 40, 40,39.5,38,35,33,33,35, 41, 46, 50, 60, 70], [89, 79, 74, 70, 66, 63, 61, 60, 60, 60, 60, 59,56,53,52,53,56, 61, 65, 70, 80, 90], [103, 96, 92, 88, 85, 83, 81, 80, 80, 80, 80, 79,76,72,70,70,75, 79, 83, 87, 95,105], [118,110,107,105,103,102,101,100,100,100,100, 99,97,94,90,90,95,100,103,105,108,115]]) loudn_freq = np.array([31.62, 50, 70.7, 100, 141.4, 200, 316.2, 500, 707.1, 1000, 1414, 1682, 2000, 2515, 3162, 3976, 5000, 7071, 10000, 11890, 14140, 15500]) # We have the loudness values for the frequencies in loudn_freq # now we calculate in loudn_bark a matrix of loudness sensation values for the bark bands margins i = 0 j = 0 loudn_bark = np.zeros((eq_loudness.shape[0], len(bark))) for bsi in bark: while j < len(loudn_freq) and bsi > loudn_freq[j]: j += 1 j -= 1 if np.where(loudn_freq == bsi)[0].size != 0: # loudness value for this frequency already exists loudn_bark[:,i] = eq_loudness[:,np.where(loudn_freq == bsi)][:,0,0] else: w1 = 1 / np.abs(loudn_freq[j] - bsi) w2 = 1 / np.abs(loudn_freq[j + 1] - bsi) loudn_bark[:,i] = (eq_loudness[:,j]*w1 + eq_loudness[:,j+1]*w2) / (w1 + w2) i += 1 # SPECTRAL MASKING Spreading Function # CONST_spread contains matrix of spectral frequency masking factors CONST_spread = np.zeros((n_bark_bands,n_bark_bands)) for i in range(n_bark_bands): CONST_spread[i,:] = 10**((15.81+7.5*((i-np.arange(n_bark_bands))+0.474)-17.5*(1+((i-np.arange(n_bark_bands))+0.474)**2)**0.5)/10) # UTILITY FUNCTIONS def nextpow2(num): '''NextPow2 find the next highest number to the power of 2 to a given number and return the exponent to 2 (analogously to Matlab's nextpow2() function) ''' n = 2 i = 1 while n < num: n *= 2 i += 1 return i # FFT FUNCTIONS def periodogram(x,win,Fs=None,nfft=1024): ''' Periodogram Periodogram power spectral density estimate Note: this function was written with 1:1 Matlab compatibility in mind. The number of points, nfft, in the discrete Fourier transform (DFT) is the maximum of 256 or the next power of two greater than the signal length. :param x: time series data (e.g. audio signal), ideally length matches nfft :param win: window function to be applied (e.g. Hanning window). in this case win expects already data points of the window to be provided. :param Fs: sampling frequency (unused) :param nfft: number of bins for FFT (ideally matches length of x) :return: Periodogram power spectrum (np.array) ''' #if Fs == None: # Fs = 2 * np.pi # commented out because unused U = np.dot(win.conj().transpose(), win) # compensates for the power of the window. Xx = fft((x * win),nfft) # verified P = Xx*np.conjugate(Xx)/U # Compute the 1-sided or 2-sided PSD [Power/freq] or mean-square [Power]. # Also, compute the corresponding freq vector & freq units. # Generate the one-sided spectrum [Power] if so wanted if nfft % 2 != 0: select = np.arange((nfft+1)/2) # ODD P = P[select,:] # Take only [0,pi] or [0,pi) P[1:-1] = P[1:-1] * 2 # Only DC is a unique point and doesn't get doubled else: #select = np.arange(nfft/2+1); # EVEN #P = P[select,:] # Take only [0,pi] or [0,pi) # TODO: why commented out? P[1:-2] = P[1:-2] * 2 P = P / (2 * np.pi) return P def calc_spectrogram(wavsegment,fft_window_size,fft_overlap = 0.5,real_values=True): ''' Calc_Spectrogram calculate spectrogram using periodogram function (which performs FFT) to convert wave signal data from time to frequency domain (applying a Hanning window and (by default) 50 % window overlap) :param wavsegment: audio wave file data for a segment to be analyzed (mono (i.e. 1-dimensional vector) only :param fft_window_size: windows size to apply FFT to :param fft_overlap: overlap to apply during FFT analysis in % fraction (e.g. default = 0.5, means 50% overlap) :param real_values: if True, return real values by taking abs(spectrogram), if False return complex values :return: spectrogram matrix as numpy array (fft_window_size, n_frames) ''' # hop_size (increment step in samples, determined by fft_window_size and fft_overlap) hop_size = int(fft_window_size*(1-fft_overlap)) # this would compute the segment length, but it's pre-defined above ... # segment_size = fft_window_size + (frames-1) * hop_size # ... therefore we convert the formula to give the number of frames needed to iterate over the segment: n_frames = (wavsegment.shape[0] - fft_window_size) / hop_size + 1 # n_frames_old = wavsegment.shape[0] / fft_window_size * 2 - 1 # number of iterations with 50% overlap # TODO: provide this as parameter for better caching? han_window = np.hanning(fft_window_size) # verified # initialize result matrix for spectrogram spectrogram = np.zeros((fft_window_size, n_frames), dtype=np.complex128) # start index for frame-wise iteration ix = 0 for i in range(n_frames): # stepping through the wave segment, building spectrum for each window spectrogram[:,i] = periodogram(wavsegment[ix:ix+fft_window_size], win=han_window,nfft=fft_window_size) ix = ix + hop_size # NOTE: tested scipy periodogram BUT it delivers totally different values AND takes 2x the time of our periodogram function (0.13 sec vs. 0.06 sec) # from scipy.signal import periodogram # move on top #f, spec = periodogram(x=wavsegment[idx],fs=samplerate,window='hann',nfft=fft_window_size,scaling='spectrum',return_onesided=True) if real_values: spectrogram = np.abs(spectrogram) return (spectrogram) # FEATURE FUNCTIONS def calc_statistical_features(matrix): result = np.zeros((matrix.shape[0],7)) result[:,0] = np.mean(matrix, axis=1) result[:,1] = np.var(matrix, axis=1, dtype=np.float64) # the values for variance differ between MATLAB and Numpy! result[:,2] = stats.skew(matrix, axis=1) result[:,3] = stats.kurtosis(matrix, axis=1, fisher=False) # Matlab calculates Pearson's Kurtosis result[:,4] = np.median(matrix, axis=1) result[:,5] = np.min(matrix, axis=1) result[:,6] = np.max(matrix, axis=1) result[np.where(np.isnan(result))] = 0 return result # PSYCHO-ACOUSTIC TRANSFORMS as individual functions # Transform 2 Mel Scale: NOT USED by rp_extract, but included for testing purposes or for import into other programs def transform2mel(spectrogram,samplerate,fft_window_size,n_mel_bands = 80,freq_min = 0,freq_max = None): '''Transform to Mel convert a spectrogram to a Mel scale spectrogram by grouping original frequency bins to Mel frequency bands (using Mel filter from Librosa) Parameters spectrogram: input spectrogram samplerate: samplerate of audio signal fft_window_size: number of time window / frequency bins in the FFT analysis n_mel_bands: number of desired Mel bands, typically 20, 40, 80 (max. 128 which is default when 'None' is provided) freq_min: minimum frequency (Mel filters will be applied >= this frequency, but still return n_meld_bands number of bands) freq_max: cut-off frequency (Mel filters will be applied <= this frequency, but still return n_meld_bands number of bands) Returns: mel_spectrogram: Mel spectrogram: np.array of shape(n_mel_bands,frames) maintaining the number of frames in the original spectrogram ''' import librosa.filters # Syntax: librosa.filters.mel(sr, n_fft, n_mels=128, fmin=0.0, fmax=None, htk=False) mel_basis = librosa.filters.mel(samplerate,fft_window_size, n_mels=n_mel_bands,fmin=freq_min,fmax=freq_max) freq_bin_max = mel_basis.shape[1] # will be fft_window_size / 2 + 1 # IMPLEMENTATION WITH FOR LOOP # initialize Mel Spectrogram matrix #n_mel_bands = mel_basis.shape[0] # get the number of bands from result in case 'None' was specified as parameter #mel_spectrogram = np.empty((n_mel_bands, frames)) #for i in range(frames): # stepping through the wave segment, building spectrum for each window # mel_spectrogram[:,i] = np.dot(mel_basis,spectrogram[0:freq_bin_max,i]) # IMPLEMENTATION WITH DOT PRODUCT (15% faster) # multiply the mel filter of each band with the spectogram frame (dot product executes it on all frames) mel_spectrogram = np.dot(mel_basis,spectrogram[0:freq_bin_max,:]) return (mel_spectrogram) # Bark Transform: Convert Spectrogram to Bark Scale # matrix: Spectrogram values as returned from periodogram function # freq_axis: array of frequency values along the frequency axis # max_bands: limit number of Bark bands (1...24) (counting from lowest band) def transform2bark(matrix, freq_axis, max_bands=None): # barks and n_bark_bands have been initialized globally above if max_bands == None: max_band = n_bark_bands else: max_band = min(n_bark_bands,max_bands) matrix_out = np.zeros((max_band,matrix.shape[1]),dtype=matrix.dtype) for b in range(max_band-1): matrix_out[b] = np.sum(matrix[((freq_axis >= barks[b]) & (freq_axis < barks[b+1]))], axis=0) return(matrix_out) # Spectral Masking (assumes values are arranged in <=24 Bark bands) def do_spectral_masking(matrix): n_bands = matrix.shape[0] # CONST_spread has been initialized globally above spread = CONST_spread[0:n_bands,0:n_bands] # not sure if column limitation is right here; was originally written for n_bark_bands = 24 only matrix = np.dot(spread, matrix) return(matrix) # Map to Decibel Scale def transform2db(matrix): '''Map to Decibel Scale''' matrix[np.where(matrix < 1)] = 1 matrix = 10 * np.log10(matrix) return(matrix) # Transform to Phon (assumes matrix is in dB scale) def transform2phon(matrix): old_npsetting = np.seterr(invalid='ignore') # avoid 'RuntimeWarning: invalid value encountered in divide' at ifac division below # number of bark bands, matrix length in time dim n_bands = matrix.shape[0] t = matrix.shape[1] # DB-TO-PHON BARK-SCALE-LIMIT TABLE # introducing 1 level more with level(1) being infinite # to avoid (levels - 1) producing errors like division by 0 #%%table_dim = size(CONST_loudn_bark,2); table_dim = n_bands; # OK cbv = np.concatenate((np.tile(np.inf,(table_dim,1)), loudn_bark[:,0:n_bands].transpose()),1) # OK # init lowest level = 2 levels = np.tile(2,(n_bands,t)) # OK for lev in range(1,6): # OK db_thislev = np.tile(np.asarray([cbv[:,lev]]).transpose(),(1,t)) levels[np.where(matrix > db_thislev)] = lev + 2 # the matrix 'levels' stores the correct Phon level for each data point cbv_ind_hi = np.ravel_multi_index(dims=(table_dim,7), multi_index=np.array([np.tile(np.array([range(0,table_dim)]).transpose(),(1,t)), levels-1]), order='F') cbv_ind_lo = np.ravel_multi_index(dims=(table_dim,7), multi_index=np.array([np.tile(np.array([range(0,table_dim)]).transpose(),(1,t)), levels-2]), order='F') # interpolation factor % OPT: pre-calc diff ifac = (matrix[:,0:t] - cbv.transpose().ravel()[cbv_ind_lo]) / (cbv.transpose().ravel()[cbv_ind_hi] - cbv.transpose().ravel()[cbv_ind_lo]) ifac[np.where(levels==2)] = 1 # keeps the upper phon value; ifac[np.where(levels==8)] = 1 # keeps the upper phon value; # phons has been initialized globally above matrix[:,0:t] = phons.transpose().ravel()[levels - 2] + (ifac * (phons.transpose().ravel()[levels - 1] - phons.transpose().ravel()[levels - 2])) # OPT: pre-calc diff np.seterr(invalid=old_npsetting['invalid']) # restore RuntimeWarning setting for np division error return(matrix) # Transform to Sone scale (assumes matrix is in Phon scale) def transform2sone(matrix): idx = np.where(matrix >= 40) not_idx = np.where(matrix < 40) matrix[idx] = 2**((matrix[idx]-40)/10) # matrix[not_idx] = (matrix[not_idx]/40)**2.642 # max => 438.53 return(matrix) # MAIN Rhythm Pattern Extraction Function def rp_extract( wavedata, # pcm (wav) signal data normalized to (-1,1) samplerate, # signal sampling rate # which features to extract extract_rp = False, # extract Rhythm Patterns features extract_ssd = False, # extract Statistical Spectrum Descriptor extract_tssd = False, # extract temporal Statistical Spectrum Descriptor extract_rh = False, # extract Rhythm Histogram features extract_rh2 = False, # extract Rhythm Histogram features including Fluctuation Strength Weighting extract_trh = False, # extract temporal Rhythm Histogram features extract_mvd = False, # extract modulation variance descriptor # processing options skip_leadin_fadeout = 1, # >=0 how many sample windows to skip at the beginning and the end step_width = 1, # >=1 each step_width'th sample window is analyzed n_bark_bands = 24, # 2..24 number of desired Bark bands (from low frequencies to high) (e.g. 15 or 20 or 24 for 11, 22 and 44 kHz audio respectively) (1 delivers undefined output) mod_ampl_limit = 60, # 2..257 number of modulation frequencies on x-axis # enable/disable parts of feature extraction transform_bark = True, # [S2] transform to Bark scale spectral_masking = True, # [S3] compute Spectral Masking transform_db = True, # [S4] transfrom to dB: advisable only to turn off when [S5] and [S6] are turned off too transform_phon = True, # [S5] transform to Phon: if disabled, Sone_transform will be disabled too transform_sone = True, # [S6] transform to Sone scale (only applies if transform_phon = True) fluctuation_strength_weighting = True, # [R2] apply Fluctuation Strength weighting curve #blurring = True # [R3] Gradient+Gauss filter # TODO: not yet implemented return_segment_features = False, # this will return features per each analyzed segment instead of aggregated ones verbose = False # print messages whats going on ): '''Rhythm Pattern Feature Extraction performs segment-wise audio feature extraction from provided audio wave (PCM) data and extracts the following features: Rhythm Pattern Statistical Spectrum Descriptor Statistical Histogram temporal Statistical Spectrum Descriptor Rhythm Histogram temporal Rhythm Histogram features Modulation Variance Descriptor Examples: >>> from audiofile_read import * >>> samplerate, samplewidth, wavedata = audiofile_read("music/BoxCat_Games_-_10_-_Epic_Song.mp3") #doctest: +ELLIPSIS Decoded .mp3 with: mpg123 -q -w /....wav music/BoxCat_Games_-_10_-_Epic_Song.mp3 >>> feat = rp_extract(wavedata, samplerate, extract_rp=True, extract_ssd=True, extract_rh=True) Analyzing 7 segments >>> for k in feat.keys(): ... print k.upper() + ":", feat[k].shape[0], "dimensions" SSD: 168 dimensions RH: 60 dimensions RP: 1440 dimensions >>> print feat["rp"] [ 0.01599218 0.01979605 0.01564305 0.01674175 0.00959912 0.00931604 0.00937831 0.00709122 0.00929631 0.00754473 ..., 0.02998088 0.03602739 0.03633861 0.03664331 0.02589753 0.02110256 0.01457744 0.01221825 0.0073788 0.00164668] >>> print feat["rh"] [ 7.11614842 12.58303013 6.96717295 5.24244146 6.49677561 4.21249659 12.43844045 4.19672357 5.30714983 6.1674115 ..., 1.55870044 2.69988854 2.75075831 3.67269877 13.0351257 11.7871738 3.76106713 2.45225195 2.20457928 2.06494926] >>> print feat["ssd"] [ 3.7783279 5.84444695 5.58439197 4.87849697 4.14983056 4.09638223 4.04971225 3.96152261 3.65551062 3.2857232 ..., 14.45953191 14.6088727 14.03351539 12.84783095 10.81735946 9.04121124 7.13804008 5.6633501 3.09678286 0.52076428] ''' # PARAMETER INITIALIZATION # non-exhibited parameters include_DC = False FLATTEN_ORDER = 'F' # order how matrices are flattened to vector: 'F' for Matlab/Fortran, 'C' for C order (IMPORTANT TO USE THE SAME WHEN reading+reshaping the features) # segment_size should always be ~6 sec, fft_window_size should always be ~ 23ms if (samplerate == 11025): segment_size = 2**16 fft_window_size = 256 elif (samplerate == 22050): segment_size = 2**17 fft_window_size = 512 elif (samplerate == 44100): segment_size = 2**18 fft_window_size = 1024 else: # throw error not supported raise ValueError('A sample rate of ' + str(samplerate) + " is not supported (only 11, 22 and 44 kHz).") # calculate frequency values on y-axis (for Bark scale calculation): # freq_axis = float(samplerate)/fft_window_size * np.arange(0,(fft_window_size/2) + 1) # linear space from 0 to samplerate/2 in (fft_window_size/2+1) steps freq_axis = np.linspace(0, float(samplerate)/2, int(fft_window_size//2) + 1, endpoint=True) # CONVERT STEREO TO MONO: Average the channels if wavedata.ndim > 1: # if we have more than 1 dimension if wavedata.shape[1] == 1: # check if 2nd dimension is just 1 wavedata = wavedata[:,0] # then we take first and only channel else: wavedata = np.mean(wavedata, 1) # otherwise we average the signals over the channels # SEGMENT INITIALIZATION # find positions of wave segments skip_seg = skip_leadin_fadeout seg_pos = np.array([1, segment_size]) # array with 2 entries: start and end position of selected segment seg_pos_list = [] # list to store all the individual segment positions (only when return_segment_features == True) # if file is too small, don't skip leadin/fadeout and set step_width to 1 """ if ((skip_leadin_fadeout > 0) or (step_width > 1)): duration = wavedata.shape[0]/samplerate if (duration < 45): step_width = 1 skip_seg = 0 # TODO: do this as a warning? if verbose: print "Duration < 45 seconds: setting step_width to 1 and skip_leadin_fadeout to 0." else: # advance by number of skip_seg segments (i.e. skip lead_in) seg_pos = seg_pos + segment_size * skip_seg """ # calculate number of segments n_segments = 1 #int(np.floor( (np.floor( (wavedata.shape[0] - (skip_seg*2*segment_size)) / segment_size ) - 1 ) / step_width ) + 1) if verbose: print "Analyzing", n_segments, "segments" #if n_segments == 0: # raise ValueError("Not enough data to analyze! Minimum sample length needs to be " + # str(segment_size) + " (5.94 seconds) but it is " + str(wavedata.shape[0]) + # " (" + str(round(wavedata.shape[0] * 1.0 / samplerate,2)) + " seconds)") # initialize output features = {} ssd_list = [] sh_list = [] rh_list = [] rh2_list = [] rp_list = [] mvd_list = [] hearing_threshold_factor = 0.0875 * (2**15) # SEGMENT ITERATION for seg_id in range(n_segments): # keep track of segment position if return_segment_features: seg_pos_list.append(seg_pos) # EXTRACT WAVE SEGMENT that will be processed # data is assumed to be mono waveform wavsegment = wavedata #[seg_pos[0]-1:seg_pos[1]] # verified # v210715 # Python : [-0.0269165 -0.02128601 -0.01864624 -0.01893616 -0.02166748 -0.02694702 -0.03457642 -0.04333496 -0.05166626 -0.05891418] # Matlab : [-0,0269165 -0,02125549 -0,01861572 -0,01893616 -0,02165222 -0,02694702 -0,03456115 -0,04331970 -0,05166626 -0,05891418] # adjust hearing threshold # TODO: move after stereo-mono conversion above? wavsegment = wavsegment * hearing_threshold_factor # v210715 # Python : [ -77.175 -61.03125 -53.4625 -54.29375 -62.125 -77.2625 -99.1375 -124.25 -148.1375 -168.91875] # Matlab : [ -77,175 -60,94375 -53,3750 -54,29375 -62,081 -77,2625 -99,0938 -124,21 -148,1375 -168,91875] matrix = calc_spectrogram(wavsegment,fft_window_size) # v210715 #Python: 0.01372537 0.51454915 72.96077581 84.86663379 2.09940049 3.29631279 97373.2756834 23228.2065494 2678.44451741 30467.235416 # : 84.50635406 58.32826049 1263.82538188 234.11858349 85.48176796 97.26094525 214067.91208223 3570917.53366476 2303291.96676741 1681002.94519665 # : 171.47168402 1498.04129116 3746.45491915 153.01444364 37.20801758 177.74229702 238810.1975412 3064388.50572536 5501187.79635479 4172009.81345923 #Matlab: 0,01528259 0,49653179 73,32978523 85,38774541 2,00416767 3,36618763 97416,24267209 23239,84650814 2677,01521862 30460,9231041364 # : 84,73805309 57,84524803 1263,40594029 235,62185973 85,13826606 97,61122652 214078,02415144 3571346,74831746 2303286,74666381 1680967,41922679 # : 170,15377915 1500,98052242 3744,98456435 154,14108817 36,69362260 177,48982263 238812,02171250 3064642,99278220 5501230,26588318 4172058,72803277 # # PSYCHO-ACOUSTIC TRANSFORMS # Map to Bark Scale if transform_bark: matrix = transform2bark(matrix,freq_axis,n_bark_bands) # v210715 # Python: 255.991763 1556.884100 5083.2410768 471.9996609 124.789186 278.299555 550251.385306 6658534.245939 7807158.207639 5883479.99407189 # : 77128.354925 10446.109041 22613.8525735 13266.2502432 2593.395039 1367.697057 675114.554043 23401741.536499 6300109.471193 8039710.71759598 # : 127165.795400 91270.354107 15240.3501050 16291.2234730 1413.851495 2166.723800 868138.817452 20682384.237884 8971171.605009 5919089.97818692 # Matlab: 254,907114 1559,322302 5081,720289 475,1506933 123,836056 278,46723 550306,288536 6659229,587607 7807194,027765 5883487,07036370 # : 77118,196343 10447,961479 22605,559124 13266,4432995 2591,064037 1368,48462 675116,996782 23400723,570438 6300124,132022 8039688,83884099 # : 127172,560642 91251,040768 15246,639683 16286,4542687 1414,053166 2166,42874 868063,055613 20681863,052695 8971108,607811 5919136,16752791 # Spectral Masking if spectral_masking: matrix = do_spectral_masking(matrix) # v210715 # Python: 12978.051641 3416.109125 8769.913963 2648.888265 547.12360 503.50224 660888.17361 10480839.33617 8840234.405272 7193404.23970964 # : 100713.471006 27602.656332 27169.741240 16288.350176 2887.60281 1842.05959 1021358.42618 29229962.41626 10653981.441005 11182818.62910279 # : 426733.607945 262537.326945 43522.106075 41091.381283 4254.39289 4617.45877 1315036.85377 31353824.35688 12417010.121754 9673923.23590653 # Matlab: 12975,335615 3418,81282 8767,062187 2652,061105 545,79379 503,79683 660943,32199 10481368,76411 8840272,477464 7193407,85259461 # : 100704,175421 27602,34142 27161,901160 16288,924458 2884,94883 1842,86020 1021368,99046 29229118,99738 10653999,341989 11182806,7524195 # : 426751,992198 262523,89306 43524,970883 41085,415594 4253,42029 4617,35691 1314966,73269 31353021,99155 12416968,806879 9673951,88376021 # Map to Decibel Scale if transform_db: matrix = transform2db(matrix) # v210715 # Python: 41.13209498 35.33531736 39.42995333 34.23063639 27.38085455 27.02001413 58.2012798 70.20396064 69.46463781 68.56934467 # : 50.03087564 44.40950878 44.34085502 42.11877097 34.60537456 32.65303677 60.09178176 74.65828257 70.27511936 70.48551281 # : 56.30156848 54.19191059 46.38709903 46.1375074 36.28837595 36.64403027 61.18937924 74.96290521 70.94017035 69.85602637 # Matlab: 41,13118599 35,33875324 39,42854087 34,23583526 27,37028596 27,02255437 58,20164218 70,20418000 69,46465651 68,56934684 # : 50,03047477 44,40945923 44,33960164 42,11892409 34,60138115 32,65492392 60,09182668 74,65815725 70,27512665 70,48550820 # : 56,30175557 54,19168835 46,38738489 46,13687684 36,28738298 36,64393446 61,18914765 74,96279407 70,94015590 69,85603922 # Transform Phon if transform_phon: matrix = transform2phon(matrix) # v210715 # Python: 25.90299283 17.82310731 23.4713619 16.37852452 7.42111749 6.94924924 47.58029453 60.22662293 59.43646085 58.49404702 # : 47.03087564 41.40950878 41.34085502 38.89846372 29.5067182 27.06629597 57.09178176 71.65828257 67.27511936 67.48551281 # : 55.02273887 52.91308099 45.10826943 44.8586778 34.3678058 34.769195 59.91054964 73.68407561 69.66134075 68.57719676 # Matlab: 25,90169428 17,82760039 23,46934410 16,38532303 7,40729702 6,95257110 47,58067598 60,22686667 59,43648053 58,49404931 # : 47,03047477 41,40945923 41,33960164 38,89865511 29,50172644 27,06865491 57,09182668 71,65815725 67,27512665 67,48550820 # : 55,02292596 52,91285875 45,10855528 44,85804723 34,36668514 34,76908687 59,91031805 73,68396446 69,66132629 68,57720962 # Transform Sone if transform_sone: matrix = transform2sone(matrix) # v210715 # Python: 0.31726931 0.11815598 0.24452297 0.09450863 0.01167179 0.009812 1.6911791 4.06332931 3.84676603 3.60351463 # : 1.62798518 1.10263162 1.09739697 0.92887876 0.44759842 0.35631529 3.26974511 8.97447943 6.62312431 6.72041945 # : 2.83288863 2.44749871 1.42486669 1.40042797 0.669685 0.69054778 3.97527582 10.327417 7.81439442 7.24868691 # Matlab: 0,31722728 0,11823469 0,24446743 0,09461230 0,01161444 0,00982439 1,69122381 4,06339796 3,84677128 3,60351520 # : 1,62793994 1,10262783 1,09730163 0,92889083 0,44739839 0,35639734 3,26975529 8,97440147 6,62312765 6,72041730 # : 2,83292537 2,44746100 1,42489491 1,40036676 0,66962731 0,69054210 3,97521200 10,32733744 7,81438659 7,24869337 # FEATURES: now we got a Sonogram and extract statistical features # SSD: Statistical Spectrum Descriptors if (extract_ssd or extract_tssd): ssd = calc_statistical_features(matrix) ssd_list.append(ssd.flatten(FLATTEN_ORDER)) # v210715 # Python: 2.97307486 5.10356599 0.65305978 2.35489911 2.439558 0.009812 8.1447095 # : 4.72262845 7.30899976 0.17862996 2.10446264 4.58595337 0.25538117 12.83339251 # : 4.77858109 5.52646859 0.23911764 2.9056742 4.96338019 0.589568 13.6683906 # : 4.43503421 3.69422906 0.41473155 3.06743402 4.33220988 0.88354694 10.89393754 # : 3.77216546 2.3993334 0.84001713 4.35548197 3.65140589 1.01199696 11.07806891 # : 3.60563073 2.09907968 1.49906811 7.07183968 3.35596471 1.00619842 11.2872743 # : 3.56816128 2.20237398 1.69790808 7.57870223 3.33806767 1.10826324 10.84965392 # : 3.43734647 2.38648202 1.59655791 6.86704341 3.23361995 1.10198021 11.89470587 # : 3.18466303 2.39479532 1.99223131 8.83987184 2.8819031 0.93982524 11.28737448 # : 2.90996406 1.85412568 1.97247446 8.36738395 2.68063918 0.81760102 9.64247378 # Matlab: 2,97309758 5,11366933 0,65306558 2,35489605 2,43956735 0,00982439 8,14473582 # : 4,72264163 7,32338449 0,17863061 2,10444843 4,58593777 0,25568703 12,83335168 # : 4,77859306 5,53731457 0,23911126 2,90567055 4,96338616 0,58959588 13,66839858 # : 4,43505068 3,70148292 0,41473410 3,06742263 4,33222037 0,88357883 10,89397920 # : 3,77217541 2,40405654 0,84000183 4,35540491 3,65136495 1,01191651 11,07802201 # : 3,60563459 2,10319516 1,49905911 7,07181623 3,35609824 1,00628652 11,28728291 # : 3,56820841 2,20675908 1,69792784 7,57880557 3,33819690 1,10830805 10,84975850 # : 3,43736757 2,39117736 1,59656951 6,86710630 3,23366165 1,10199096 11,89486723 # : 3,18467212 2,39951286 1,99223621 8,83991021 2,88200015 0,93978494 11,28733449 # : 2,90997546 1,85776617 1,97246361 8,36742039 2,68074853 0,81790606 9,64262886 # values verified # RP: RHYTHM PATTERNS feature_part_xaxis1 = range(0,mod_ampl_limit) # take first (opts.mod_ampl_limit) values of fft result including DC component feature_part_xaxis2 = range(1,mod_ampl_limit+1) # leave DC component and take next (opts.mod_ampl_limit) values of fft result if (include_DC): feature_part_xaxis_rp = feature_part_xaxis1 else: feature_part_xaxis_rp = feature_part_xaxis2 # 2nd FFT fft_size = 2**(nextpow2(matrix.shape[1])) if (mod_ampl_limit >= fft_size): return {"rh":[]} #raise(ValueError("mod_ampl_limit option must be smaller than FFT window size (" + str(fft_size) + ").")) # NOTE: in fact only half of it (256) makes sense due to the symmetry of the FFT result rhythm_patterns = np.zeros((matrix.shape[0], fft_size), dtype=np.complex128) #rhythm_patterns = np.zeros((matrix.shape[0], fft_size), dtype=np.float64) # real_matrix = abs(matrix) for b in range(0,matrix.shape[0]): rhythm_patterns[b,:] = fft(matrix[b,:], fft_size) # tried this instead, but ... #rhythm_patterns[b,:] = fft(real_matrix[b,:], fft_size) # ... no performance improvement #rhythm_patterns[b,:] = rfft(real_matrix[b,:], fft_size) # ... different output values rhythm_patterns = rhythm_patterns / 256 # why 256? # convert from complex128 to float64 (real) rp = np.abs(rhythm_patterns[:,feature_part_xaxis_rp]) # verified # MVD: Modulation Variance Descriptors if extract_mvd: mvd = calc_statistical_features(rp.transpose()) # verified mvd_list.append(mvd.flatten(FLATTEN_ORDER)) # RH: Rhythm Histograms - OPTION 1: before fluctuation_strength_weighting (as in Matlab) if extract_rh: rh = np.sum(np.abs(rhythm_patterns[:,feature_part_xaxis2]),axis=0) #without DC component # verified rh_list.append(rh.flatten(FLATTEN_ORDER)) # final steps for RP: # Fluctuation Strength weighting curve if fluctuation_strength_weighting: # modulation frequency x-axis (after 2nd FFT) # mod_freq_res = resolution of modulation frequency axis (0.17 Hz) mod_freq_res = 1 / (float(segment_size) / samplerate) # modulation frequencies along x-axis from index 0 to 256) mod_freq_axis = mod_freq_res * np.array(feature_part_xaxis_rp) # fluctuation strength curve fluct_curve = 1 / (mod_freq_axis/4 + 4/mod_freq_axis) for b in range(rp.shape[0]): rp[b,:] = rp[b,:] * fluct_curve #[feature_part_xaxis_rp] #values verified # RH: Rhythm Histograms - OPTION 2 (after Fluctuation weighting) if extract_rh2: rh2 = np.sum(rp,axis=0) #TODO: adapt to do always without DC component rh2_list.append(rh2.flatten(FLATTEN_ORDER)) # Gradient+Gauss filter #if extract_rp: # TODO Gradient+Gauss filter #for i in range(1,rp.shape[1]): # rp[:,i-1] = np.abs(rp[:,i] - rp[:,i-1]); # #rp = blur1 * rp * blur2; rp_list.append(rp.flatten(FLATTEN_ORDER)) seg_pos = seg_pos + segment_size * step_width if extract_rp: if return_segment_features: features["rp"] = np.array(rp_list) else: features["rp"] = np.median(np.asarray(rp_list), axis=0) if extract_ssd: if return_segment_features: features["ssd"] = np.array(ssd_list) else: features["ssd"] = np.mean(np.asarray(ssd_list), axis=0) if extract_rh: if return_segment_features: features["rh"] = np.array(rh_list) else: features["rh"] = np.median(np.asarray(rh_list), axis=0) if extract_mvd: if return_segment_features: features["mvd"] = np.array(mvd_list) else: features["mvd"] = np.mean(np.asarray(mvd_list), axis=0) # NOTE: no return_segment_features for temporal features as they measure variation of features over time if extract_tssd: features["tssd"] = calc_statistical_features(np.asarray(ssd_list).transpose()).flatten(FLATTEN_ORDER) if extract_trh: features["trh"] = calc_statistical_features(np.asarray(rh_list).transpose()).flatten(FLATTEN_ORDER) if return_segment_features: # also include the segment positions in the result features["segpos"] = np.array(seg_pos_list) features["timepos"] = features["segpos"] / (samplerate * 1.0) return features # function to self test rp_extract if working properly def self_test(): import doctest #doctest.testmod() doctest.run_docstring_examples(rp_extract, globals(), verbose=True) if __name__ == '__main__': import sys from audiofile_read import * # import our library for reading wav and mp3 files # process file given on command line or default song (included) if len(sys.argv) > 1: if sys.argv[1] == '-test': # RUN DOCSTRING SELF TEST print "Doing self test. If nothing is printed, it is ok." import doctest doctest.run_docstring_examples(rp_extract, globals()) #, verbose=True) exit() # Note: no output means that everything went fine else: audiofile = sys.argv[1] else: audiofile = "music/BoxCat_Games_-_10_-_Epic_Song.mp3" # Read audio file and extract features try: samplerate, samplewidth, wavedata = audiofile_read(audiofile) np.set_printoptions(suppress=True) bark_bands = 24 # choose the number of Bark bands (2..24) mod_ampl_limit = 60 # number modulation frequencies on x-axis feat = rp_extract(wavedata, samplerate, extract_rp=True, extract_ssd=True, extract_tssd=False, extract_rh=True, n_bark_bands=bark_bands, spectral_masking=True, transform_db=True, transform_phon=True, transform_sone=True, fluctuation_strength_weighting=True, skip_leadin_fadeout=1, step_width=1, mod_ampl_limit=mod_ampl_limit) # feat is a dict containing arrays for different feature sets print "Successfully extracted features:" , feat.keys() except ValueError, e: print e exit() print "Rhythm Histogram feature vector:" print feat["rh"] # EXAMPLE on how to plot the features do_plots = False if do_plots: from rp_plot import * plotrp(feat["rp"],rows=bark_bands,cols=mod_ampl_limit) plotrh(feat["rh"]) plotssd(feat["ssd"],rows=bark_bands) # EXAMPLE on how to store RP features in CSV file<|fim▁hole|> # rp.to_csv(filename)<|fim▁end|>
# import pandas as pd # filename = "features.rp.csv" # rp = pd.DataFrame(feat["rp"].reshape([1,feat["rp"].shape[0]]))
<|file_name|>var2str.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- """ translate variance and its formated character which have regularities for example: raw input: v={'aa': 12345, 'bbbb': [1, 2, 3, 4, {'flag': 'vvvv||||xxxxx'}, set(['y', 'x', 'z'])]} after `var2str.var2str(v)` v_str=<aa::12345##bbbb::<1||2||3||4||<flag::vvvv|xxxxx>||<y|||x|||z>>> then reverse back: `var2str.str2var(v_str)` v_var={'aa': '12345', 'bbbb': ['1', '2', '3', '4', {'flag': 'vvvv|xxxxx'}, set(['y', 'x', 'z'])]} NOTATION: 1, KEY of DICT should be string. 2, SET amd TUPLE automatically are transformed to LIST 3, INT/FLOAT/LONG etc. are automatically transformed to STRING 4, SEPERATORS would be replace to '' in character. """ import types # TAKE notation of sequence, which has one order sep_dict = { "dict_sep": "##", # seperator of elements of dict "dict_k_v_sep": "::", # k::v "list_sep": "||", # list seperator "set_sep": "|||", # set seperator "tuple_sep": "||" # tuple seperator } sep_nest = ("<", ">") # better not repeated char, e.x. ("<-", "->") # internal operations sep_values = sep_dict.values() def erase_sep(s): for v in sep_values: s = s.replace(v, "") for v in sep_nest: s=s.replace(v, "") return s _s=sep_nest[0] _e=sep_nest[1] class var2str(object): @staticmethod def var2str(var): if not var: return "" if type(var) == types.DictType: result = [] for key,value in var.items(): v_str = var2str.var2str(value) k_str = erase_sep("{0}".format(key)) result.append("{key}{sep}{value}".format( key=k_str, sep=sep_dict["dict_k_v_sep"], value=v_str)) return _s+sep_dict["dict_sep"].join(result)+_e #return sep_dict["dict_sep"].join(result) elif type(var) == types.ListType: result = [var2str.var2str(v) for v in var] return _s+sep_dict["list_sep"].join(result)+_e #return sep_dict["list_sep"].join(result) elif type(var) == type(set([])): result = [var2str.var2str(v) for v in var] return _s+sep_dict["set_sep"].join(result)+_e #return sep_dict["set_sep"].join(result) elif type(var) == types.TupleType: result = [var2str.var2str(v) for v in var] return _s+sep_dict["tuple_sep"].join(result)+_e #return sep_dict["tuple_sep"].join(result) elif type(var) in [types.StringType, types.IntType, types.LongType, types.FloatType]: return erase_sep("{0}".format(var)) else: raise TypeError("Type is not supported. var: {0}, type: {1}".format( var, type(var))) @staticmethod def str2var(value): # certain the outer nested elements' type if NestType.is_nest_type(value, _s, _e): _var = NestType(value) _var.replace_nest_vars() var = _var.parse_var() if type(var) == types.DictType: for k, v in var.items(): if type(v)==NestType: var[k] = var2str.str2var(str(v)) if type(var) == types.ListType: for n, v in enumerate(var): if type(v) == NestType: var[n] = var2str.str2var(str(v)) if type(var) == type(set()): # because element in set must be hashable, so there is no meaning for # for parsing set pass return var else: return value class NestType(object): def __init__(self, s, s_tag=_s, e_tag=_e): self.value = str(s) self.s_tag = s_tag self.e_tag = e_tag self.replace_s = None @staticmethod def is_nest_type(value, s_tag, e_tag): if (not value.startswith(s_tag) or not value.endswith(e_tag)): return 0 return 1 def _get_obj_str(self, var): return "[NestType]"+str(hash(var)) def has_nest_element(self): if self.replace_s is None: self.replace_nest_vars() return self.repalce_s == self.value def _replace_nest_var(self, s, nest_dic={}): s_len = len(s)<|fim▁hole|> for i in range(s_len): if s[i:i+s_tag_len] == self.s_tag: tag_index +=1 if tag_index == 1: nest_index.append(i) if s[i:i+e_tag_len] == self.e_tag: tag_index -=1 if tag_index == 0: nest_index.append(i) if len(nest_index) == 2: break if len(nest_index) <2: return s nest_index_s = nest_index[0] nest_index_e = nest_index[1] + e_tag_len nest_str = s[nest_index_s:nest_index_e] nest_var = NestType(nest_str, s_tag=self.s_tag, e_tag = self.e_tag) nest_var_str = self._get_obj_str(nest_var) nest_dic[nest_var_str] = nest_var return s[0:nest_index_s] + nest_var_str + s[nest_index_e:] def replace_nest_vars(self): # trim sign in start and end nest_dic = {} if not NestType.is_nest_type(self.value, self.s_tag, self.e_tag): raise Exception( "[ERROR] `{0}` does not match NestType format".format(self.value)) s = _trim_tag(self.value, self.s_tag, self.e_tag) while 1: replace_s = self._replace_nest_var(s,nest_dic) if replace_s == s: break s = replace_s self.replace_s = replace_s self.nest_dic = nest_dic def parse_var(self): """string `replace_s` has no nestType at all""" s = self.replace_s var = None dict_sep = sep_dict["dict_sep"] dict_k_v_sep = sep_dict["dict_k_v_sep"] list_sep = sep_dict["list_sep"] set_sep = sep_dict["set_sep"] if dict_k_v_sep in s: # dict var = {} items = s.split(dict_sep) for item in items: if not item: continue k,v=item.split(dict_k_v_sep) var[k] = self.nest_dic.get(v, v) elif set_sep in s: var = set([self.nest_dic.get(t, t) for t in s.split(set_sep)]) elif list_sep in s: var = [self.nest_dic.get(t, t) for t in s.split(list_sep)] else: # just one string var = s return var def __str__(self): return self.value def __unicode__(self): return self.value def _trim_tag(str, s, e): """trim the `str` off start `s` and end `e`""" return str[len(s):(len(str)-len(e))] def test(): a = {"aa": 12345, "bbbb":[1,2,3,4,{'flag':"vvvv||||世界是我的"},set(['x', 'y','z'])]} #a = {} print a a_str = var2str.var2str(a) print ">>", a_str a_var = var2str.str2var(a_str) print ">>", a_var if __name__ == "__main__": test()<|fim▁end|>
tag_index = 0 s_tag_len, e_tag_len = len(self.s_tag), len(self.e_tag) nest_index =[]
<|file_name|>tuple_alignment.rs<|end_file_name|><|fim▁begin|>enum Foo { Bazlooooooooooong(i32, i32, i32, i32, i32, i32), } fn main() { let long_tuple: (u8, u16, u32, u64, u128, i8, i16, i32, i64, i128, f32, f64, char, bool) = (1u8, 2u16, 3u32, 4u64, 5u128, -1i8, -2i16, -3i32, -4i64, -5i128, 0.1f32, 0.2f64, 'a', true); match 1 { FooSome(1234, true,<|fim▁hole|> } }<|fim▁end|>
5678, false, 91011, "foobar") => 10
<|file_name|>WeaponCraftedInfo.tsx<|end_file_name|><|fim▁begin|>import { t } from 'app/i18next-t';<|fim▁hole|>import { DimCrafted } from 'app/inventory/item-types'; import { percent } from 'app/shell/filters'; import React from 'react'; /** * A progress bar that shows weapon crafting info like the game does. */ export function WeaponCraftedInfo({ craftInfo, className, }: { craftInfo: DimCrafted; className: string; }) { const pct = percent(craftInfo.progress || 0); const progressBarStyle = { width: pct, }; return ( <div className={className}> <div className="objective-progress"> <div className="objective-progress-bar" style={progressBarStyle} /> <div className="objective-description"> {t('MovePopup.WeaponLevel', { level: craftInfo.level })} </div> <div className="objective-text">{pct}</div> </div> </div> ); }<|fim▁end|>
<|file_name|>profile.js<|end_file_name|><|fim▁begin|>module.exports = async ({ client, configJS, Utils: { IsURL }, Constants: { Colors } }, msg, commandData) => { const handleQuit = () => { msg.reply({ embed: { color: Colors.RED, description: `You've exited the profile setup menu!`, }, }); }; if (msg.suffix === "setup") { let m = await msg.reply({ embed: { color: Colors.LIGHT_BLUE, author: { name: `Profile setup for ${msg.author.tag}`, }, title: `Let's setup your GAwesomeBot profile ~~--~~ See it by clicking here`, url: `${configJS.hostingURL}activity/users?q=${encodeURIComponent(`${msg.author.tag}`)}`, thumbnail: { url: msg.author.displayAvatarURL({ size: 64, format: "png" }), }, description: `First of all, do you want to make data such as mutual servers with me and profile fields public?`, footer: { text: msg.author.userDocument.isProfilePublic ? `It's already public now, by answering "yes" you're keeping it that way.` : `It's currently not public, by answering "yes" you're making it public.`, }, }, }); const changes = {}; let message = null; try { message = await client.awaitPMMessage(msg.channel, msg.author); } catch (err) { switch (err.code) { case "AWAIT_QUIT": return handleQuit(); case "AWAIT_EXPIRED": { m = await m.edit({ embed: { color: Colors.LIGHT_ORANGE, description: `You didn't answer in time... We'll keep your profile's publicity the way it currently is.`, footer: { text: `Changed your mind? Type "quit" and restart the process by running "profile setup"`, }, }, }); changes.isProfilePublic = msg.author.userDocument.isProfilePublic; } } } if (message && message.content) changes.isProfilePublic = configJS.yesStrings.includes(message.content.toLowerCase().trim()); m = await msg.reply({ embed: { color: Colors.LIGHT_BLUE, title: `Next, here's your current backround.`, image: { url: IsURL(msg.author.userDocument.profile_background_image) ? msg.author.userDocument.profile_background_image : ``, }, thumbnail: { url: msg.author.displayAvatarURL({ size: 64, format: "png" }), }, author: { name: `Profile setup for ${msg.author.tag}`, }, description: `Your current image URL is: \`\`\`\n${msg.author.userDocument.profile_background_image}\`\`\`\nWould you like a new one? Just paste in a URL.`, footer: { text: `Answer with "." to not change it, or "default" to reset it to the default image. | This message expires in 2 minutes`, }, }, }); try {<|fim▁hole|> case "AWAIT_QUIT": return handleQuit(); case "AWAIT_EXPIRED": { m = await m.edit({ embed: { color: Colors.LIGHT_ORANGE, description: `You didn't answer in time... We'll keep your current profile backround.`, footer: { text: `Changed your mind? Type "quit" and restart the process by running "profile setup"`, }, }, }); changes.profile_background_image = msg.author.userDocument.profile_background_image; } } } if (message) { if (message.content.toLowerCase().trim() === "default") { changes.profile_background_image = "http://i.imgur.com/8UIlbtg.jpg"; } else if (message.content === ".") { changes.profile_background_image = msg.author.userDocument.profile_background_image; } else if (message.content !== "") { changes.profile_background_image = message.content.trim(); } } m = await msg.reply({ embed: { color: Colors.LIGHT_BLUE, title: `Done! That will be your new picture. 🏖`, description: `Now, can you please tell us a little about yourself...? (max 2000 characters)`, thumbnail: { url: msg.author.displayAvatarURL({ size: 64, format: "png" }), }, author: { name: `Profile setup for ${msg.author.tag}`, }, footer: { text: `Answer with "." to not change your bio, or "none" to reset it | This message expires in 5 minutes`, }, }, }); try { message = await client.awaitPMMessage(msg.channel, msg.author, 300000); } catch (err) { message = undefined; switch (err.code) { case "AWAIT_QUIT": return handleQuit(); case "AWAIT_EXPIRED": { m = await m.edit({ embed: { color: Colors.LIGHT_ORANGE, description: `You didn't answer in time... We'll keep your current bio.`, footer: { text: `Changed your mind? Type "quit" and restart the process by running "profile setup"`, }, }, }); if (msg.author.userDocument.profile_fields && msg.author.userDocument.profile_fields.Bio) changes.Bio = msg.author.userDocument.profile_fields.Bio; } } } if (message && message.content) { if (message.content.trim() === ".") { if (msg.author.userDocument.profile_fields && msg.author.userDocument.profile_fields.Bio) changes.Bio = msg.author.userDocument.profile_fields.Bio; else changes.Bio = null; } else if (message.content.toLowerCase().trim() === "none") { changes.Bio = "delete"; } else { changes.Bio = message.content.trim(); } } const userQueryDocument = msg.author.userDocument.query; userQueryDocument.set("isProfilePublic", changes.isProfilePublic) .set("profile_background_image", changes.profile_background_image); if (!msg.author.userDocument.profile_fields) userQueryDocument.set("profile_fields", {}); if (changes.Bio === "delete") { userQueryDocument.remove("profile_fields.Bio"); } else if (changes.Bio) { userQueryDocument.set("profile_fields.Bio", changes.Bio); } await msg.author.userDocument.save().catch(err => { logger.warn(`Failed to save user data for profile setup.`, { usrid: msg.author.id }, err); }); msg.reply({ embed: { color: Colors.GREEN, title: `You're all set! ~~--~~ Click here to see your profile. 👀`, description: `Thanks for your input.`, url: `${configJS.hostingURL}activity/users?q=${encodeURIComponent(`${msg.author.tag}`)}`, footer: { text: `Changed your mind? Run "profile setup" once again!`, }, }, }); } };<|fim▁end|>
message = await client.awaitPMMessage(msg.channel, msg.author, 120000); } catch (err) { message = undefined; switch (err.code) {
<|file_name|>TestVariablesNames.java<|end_file_name|><|fim▁begin|>package jadx.tests.integration.debuginfo; import org.junit.jupiter.api.Test; import jadx.core.dex.nodes.ClassNode; import jadx.tests.api.SmaliTest; import static jadx.tests.api.utils.JadxMatchers.containsOne; import static org.hamcrest.MatcherAssert.assertThat; public class TestVariablesNames extends SmaliTest { // @formatter:off /* public static class TestCls {<|fim▁hole|> String s2 = "i" + i; f2(i, s2); double d = i * 5; String s3 = "d" + d; f3(d, s3); } private void f1(String s) { } private void f2(int i, String i2) { } private void f3(double d, String d2) { } } */ // @formatter:on /** * Parameter register reused in variables assign with different types and names * No variables names in debug info */ @Test public void test() { ClassNode cls = getClassNodeFromSmaliWithPath("debuginfo", "TestVariablesNames"); String code = cls.getCode().toString(); // TODO: don't use current variables naming in tests assertThat(code, containsOne("f1(str);")); assertThat(code, containsOne("f2(i2, \"i\" + i2);")); assertThat(code, containsOne("f3(d, \"d\" + d);")); } }<|fim▁end|>
public void test(String s, int k) { f1(s); int i = k + 3;
<|file_name|>Validation.js<|end_file_name|><|fim▁begin|>var App = angular.module('validationApp', []); App.controller('mainController', function ($scope) { $scope.submitForm = function (isValid) { if (isValid) { alert('Our form is Amazing'); } else { alert('Please enter the values and click submit'); } }; <|fim▁hole|><|fim▁end|>
});
<|file_name|>ComponentModel_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python # This file is part of nexdatas - Tango Server for NeXus data writer # # Copyright (C) 2012-2017 DESY, Jan Kotanski <[email protected]> # # nexdatas is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # # nexdatas is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with nexdatas. If not, see <http://www.gnu.org/licenses/>. # \package test nexdatas # \file ComponentModelTest.py # unittests for field Tags running Tango Server # import unittest import os import sys import random import struct import binascii import time from PyQt5.QtCore import ( Qt, QAbstractItemModel, QModelIndex,) from PyQt5.QtXml import QDomDocument from nxsconfigtool.ComponentModel import ComponentModel from nxsconfigtool.ComponentItem import ComponentItem # if 64-bit machione IS64BIT = (struct.calcsize("P") == 8) if sys.version_info > (3,): long = int unicode = str # test fixture class ComponentModelTest(unittest.TestCase): # constructor # \param methodName name of the test method def __init__(self, methodName): unittest.TestCase.__init__(self, methodName) self._bint = "int64" if IS64BIT else "int32" self._buint = "uint64" if IS64BIT else "uint32" self._bfloat = "float64" if IS64BIT else "float32" # MessageBox text self.text = None # MessageBox title self.title = None # action status self.performed = False try: self.__seed = long(binascii.hexlify(os.urandom(16)), 16) except NotImplementedError: self.__seed = long(time.time() * 256) # self.__seed = 105186230414225794971485160270620812570 self.__rnd = random.Random(self.__seed) # test starter # \brief Common set up def setUp(self): print("\nsetting up...") print("SEED = %s" % self.__seed) # test closer # \brief Common tear down def tearDown(self): print("tearing down ...") # constructor test # \brief It tests default settings def test_constructor(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) qdn.appendChild(kds[-1]) allAttr = False cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) self.assertEqual(cd.parent, None) self.assertEqual(cd.childNumber(), 0) self.assertEqual(cd.node.nodeName(), "#document") ci = cd.child(0) self.assertEqual(ci.parent, cd) self.assertEqual(ci.node, qdn) self.assertEqual(ci.childNumber(), 0) self.assertEqual(ci.node.nodeName(), nname) for k in range(nkids): self.assertTrue(isinstance(ci.child(k), ComponentItem)) self.assertTrue(isinstance(ci.child(k).parent, ComponentItem)) self.assertEqual(ci.child(k).childNumber(), k) self.assertEqual(ci.child(k).node, kds[k]) self.assertEqual(ci.child(k).parent.node, qdn) self.assertEqual(ci.child(k).node.nodeName(), "kid%s" % k) self.assertEqual(ci.child(k).parent, ci) # constructor test # \brief It tests default settings def test_headerData(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) qdn.appendChild(kds[-1]) allAttr = False cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) self.assertEqual(cm.headerData(0, Qt.Vertical), None) hd = cm.headerData(0, Qt.Horizontal) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Name') hd = cm.headerData(0, Qt.Horizontal, Qt.DisplayRole) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Name') hd = cm.headerData(1, Qt.Horizontal) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Type') hd = cm.headerData(1, Qt.Horizontal, Qt.DisplayRole) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Type') hd = cm.headerData(2, Qt.Horizontal) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Value') hd = cm.headerData(2, Qt.Horizontal, Qt.DisplayRole) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Value') hd = cm.headerData(3, Qt.Horizontal) self.assertEqual(hd, None) hd = cm.headerData(3, Qt.Horizontal, Qt.DisplayRole) hd = cm.headerData(-1, Qt.Horizontal) self.assertEqual(hd, None) hd = cm.headerData(-1, Qt.Horizontal, Qt.DisplayRole) self.assertEqual(hd, None) cm.setAttributeView(True) hd = cm.headerData(1, Qt.Horizontal) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Attributes') hd = cm.headerData(1, Qt.Horizontal, Qt.DisplayRole) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Attributes') cm.setAttributeView(False) hd = cm.headerData(1, Qt.Horizontal) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Type') hd = cm.headerData(1, Qt.Horizontal, Qt.DisplayRole) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Type') allAttr = True cm = ComponentModel(doc, allAttr) hd = cm.headerData(1, Qt.Horizontal) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Attributes') hd = cm.headerData(1, Qt.Horizontal, Qt.DisplayRole) self.assertTrue(isinstance(hd, str)) self.assertEqual(hd, 'Attributes') # constructor test # \brief It tests default settings def test_data(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) qdn.appendChild(kds[-1]) allAttr = False cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) self.assertEqual(cm.headerData(0, Qt.Vertical), None) dt = cm.data(QModelIndex()) self.assertEqual(dt, None) for role in range(1, 5): dt = cm.data(cm.rootIndex, role) self.assertEqual(dt, None) dt = cm.data(cm.rootIndex) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(dt, '#document') dt = cm.data(cm.rootIndex, Qt.DisplayRole) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(dt, '#document') # constructor test # \brief It tests default settings def test_data_name(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = False cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) self.assertEqual(cm.headerData(0, Qt.Vertical), None) ri = cm.rootIndex di = cm.index(0, 0, ri) ci = cd.child(0) for n in range(nkids): # kd = ci.child(n) ki0 = cm.index(n, 0, di) dt = cm.data(ki0) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(dt, 'kid%s: myname%s' % (n, n)) ki1 = cm.index(n, 1, di) dt = cm.data(ki1) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(str(dt).strip(), 'mytype%s' % n) ki2 = cm.index(n, 2, di) dt = cm.data(ki2) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(str(dt).strip(), '') ki2 = cm.index(n, -1, di) dt = cm.data(ki2) # self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(dt, None) ki2 = cm.index(n, 3, di) dt = cm.data(ki2) # self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(dt, None) def test_data_name_attr(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = False cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) self.assertEqual(cm.headerData(0, Qt.Vertical), None) ri = cm.rootIndex di = cm.index(0, 0, ri) ci = cd.child(0) for n in range(nkids): # kd = ci.child(n) cm.setAttributeView(False) ki0 = cm.index(n, 0, di) dt = cm.data(ki0) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(dt, 'kid%s: myname%s' % (n, n)) ki1 = cm.index(n, 1, di) dt = cm.data(ki1) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(str(dt).strip(), 'mytype%s' % n) ki2 = cm.index(n, 2, di) dt = cm.data(ki2) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(str(dt).strip(), '') ki2 = cm.index(n, -1, di) dt = cm.data(ki2) # self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(dt, None) ki2 = cm.index(n, 3, di) dt = cm.data(ki2) # self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(dt, None) cm.setAttributeView(True) ki0 = cm.index(n, 0, di) dt = cm.data(ki0) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(dt, 'kid%s: myname%s' % (n, n)) ki1 = cm.index(n, 1, di) dt = cm.data(ki1) self.assertTrue(isinstance(dt, (unicode, str))) s1 = set(str(dt).strip().split(" ")) s2 = set(('units="myunits%s" type="mytype%s" name="myname%s"' % (n, n, n)).split(" ")) self.assertEqual(s1, s2) ki2 = cm.index(n, 2, di) dt = cm.data(ki2) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(str(dt).strip(), '') def test_data_name_attr_true(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = True cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) self.assertEqual(cm.headerData(0, Qt.Vertical), None) ri = cm.rootIndex di = cm.index(0, 0, ri) ci = cd.child(0) for n in range(nkids): # kd = ci.child(n) ki0 = cm.index(n, 0, di) dt = cm.data(ki0) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(dt, 'kid%s: myname%s' % (n, n)) ki1 = cm.index(n, 1, di) dt = cm.data(ki1) self.assertTrue(isinstance(dt, (unicode, str))) s1 = set(str(dt).strip().split(" ")) s2 = set(('units="myunits%s" type="mytype%s" name="myname%s"' % (n, n, n)).split(" ")) self.assertEqual(s1, s2) ki2 = cm.index(n, 2, di) dt = cm.data(ki2) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(str(dt).strip(), '') def test_data_name_text(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = True cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) self.assertEqual(cm.headerData(0, Qt.Vertical), None) ri = cm.rootIndex di = cm.index(0, 0, ri) # ci = cd.child(0) for n in range(nkids): allAttr = not allAttr cm.setAttributeView(allAttr) ki = cm.index(n, 0, di) ti = cm.index(0, 0, ki) dt = cm.data(ti) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(dt, '#text') ti = cm.index(0, 1, ki) dt = cm.data(ti) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(str(dt).strip(), '') ti = cm.index(0, 2, ki) dt = cm.data(ti) self.assertTrue(isinstance(dt, (unicode, str))) self.assertEqual(str(dt).strip(), 'Text %s' % n) def test_flags(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = True cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) self.assertEqual(cm.flags(QModelIndex()), Qt.ItemIsEnabled) ri = cm.rootIndex self.assertEqual( cm.flags(ri), Qt.ItemFlags(QAbstractItemModel.flags(cm, ri) | Qt.ItemIsEnabled | Qt.ItemIsSelectable)) di = cm.index(0, 0, ri) self.assertEqual( cm.flags(di), Qt.ItemFlags(QAbstractItemModel.flags(cm, di) | Qt.ItemIsEnabled | Qt.ItemIsSelectable)) for n in range(nkids): allAttr = not allAttr cm.setAttributeView(allAttr) ki = cm.index(n, 0, di) self.assertEqual( cm.flags(ki), Qt.ItemFlags(QAbstractItemModel.flags(cm, ki) | Qt.ItemIsEnabled | Qt.ItemIsSelectable)) ki = cm.index(n, 1, di) self.assertEqual( cm.flags(ki), Qt.ItemFlags(QAbstractItemModel.flags(cm, ki) | Qt.ItemIsEnabled | Qt.ItemIsSelectable)) ki = cm.index(n, 2, di) self.assertEqual( cm.flags(ki), Qt.ItemFlags(QAbstractItemModel.flags(cm, ki) | Qt.ItemIsEnabled | Qt.ItemIsSelectable)) ki = cm.index(n, 3, di) self.assertEqual(cm.flags(ki), Qt.ItemIsEnabled) ki = cm.index(n, 0, di) ti = cm.index(0, 0, ki) self.assertEqual( cm.flags(ti), Qt.ItemFlags(QAbstractItemModel.flags(cm, ti) | Qt.ItemIsEnabled | Qt.ItemIsSelectable)) ti = cm.index(0, 1, ki) self.assertEqual( cm.flags(ti), Qt.ItemFlags(QAbstractItemModel.flags(cm, ti) | Qt.ItemIsEnabled | Qt.ItemIsSelectable)) ti = cm.index(0, 2, ki) self.assertEqual( cm.flags(ti), Qt.ItemFlags(QAbstractItemModel.flags(cm, ti) | Qt.ItemIsEnabled | Qt.ItemIsSelectable)) ti = cm.index(0, 3, ki) self.assertEqual(cm.flags(ti), Qt.ItemIsEnabled) def test_index(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = True cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) ri = cm.rootIndex di = cm.index(0, 0, ri) self.assertTrue(isinstance(di, QModelIndex)) self.assertEqual(di.row(), 0) self.assertEqual(di.column(), 0) self.assertEqual(di.internalPointer().node, qdn) self.assertEqual(di.internalPointer().parent.node, doc) iv = cm.index(0, 0) self.assertTrue(isinstance(iv, QModelIndex)) self.assertEqual(iv.row(), 0) self.assertEqual(iv.column(), 0) self.assertEqual(iv, di) self.assertEqual(iv.internalPointer(), di.internalPointer()) iv = cm.index(0, 0, QModelIndex()) self.assertTrue(isinstance(iv, QModelIndex)) self.assertEqual(iv.row(), 0) self.assertEqual(iv.column(), 0) self.assertEqual(iv, di) self.assertEqual(iv.internalPointer(), di.internalPointer()) for n in range(nkids): allAttr = not allAttr cm.setAttributeView(allAttr) ki = cm.index(n, 0, di) self.assertTrue(isinstance(ki, QModelIndex)) self.assertEqual(ki.row(), n) self.assertEqual(ki.column(), 0) self.assertEqual(ki.internalPointer().node, kds[n]) self.assertEqual(ki.internalPointer().parent.node, qdn) ki = cm.index(n, 1, di) self.assertTrue(isinstance(ki, QModelIndex)) self.assertEqual(ki.row(), n) self.assertEqual(ki.column(), 1) self.assertEqual(ki.internalPointer().node, kds[n]) self.assertEqual(ki.internalPointer().parent.node, qdn) ki = cm.index(n, 2, di) self.assertTrue(isinstance(ki, QModelIndex)) self.assertEqual(ki.row(), n) self.assertEqual(ki.column(), 2) self.assertEqual(ki.internalPointer().node, kds[n]) self.assertEqual(ki.internalPointer().parent.node, qdn) ki = cm.index(n, 3, di) self.assertTrue(isinstance(ki, QModelIndex)) self.assertEqual(ki.row(), -1) self.assertEqual(ki.column(), -1) self.assertEqual(ki.internalPointer(), None) ki = cm.index(n, 0, di) ti = cm.index(0, 0, ki) self.assertTrue(isinstance(ti, QModelIndex)) self.assertEqual(ti.row(), 0) self.assertEqual(ti.column(), 0) self.assertEqual(ti.internalPointer().node, tkds[n]) self.assertEqual(ti.internalPointer().parent.node, kds[n]) ti = cm.index(0, 1, ki) self.assertTrue(isinstance(ti, QModelIndex)) self.assertEqual(ti.row(), 0) self.assertEqual(ti.column(), 1) self.assertEqual(ti.internalPointer().node, tkds[n]) self.assertEqual(ti.internalPointer().parent.node, kds[n]) ti = cm.index(0, 2, ki) self.assertTrue(isinstance(ti, QModelIndex)) self.assertEqual(ti.row(), 0) self.assertEqual(ti.column(), 2) self.assertEqual(ti.internalPointer().node, tkds[n]) self.assertEqual(ti.internalPointer().parent.node, kds[n]) ti = cm.index(0, 3, ki) self.assertTrue(isinstance(ti, QModelIndex)) self.assertEqual(ti.row(), -1) self.assertEqual(ti.column(), -1) self.assertEqual(ti.internalPointer(), None) def test_parent(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = True cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) ri = cm.rootIndex pri = cm.parent(ri) self.assertTrue(isinstance(pri, QModelIndex)) self.assertEqual(pri.row(), -1) self.assertEqual(pri.column(), -1) self.assertEqual(pri.internalPointer(), None) # avoids showing #document di = cm.index(0, 0, ri) pdi = cm.parent(di) self.assertTrue(isinstance(pdi, QModelIndex)) self.assertEqual(pdi.row(), -1) self.assertEqual(pdi.column(), -1) self.assertEqual(pdi.internalPointer(), None) iv = cm.index(0, 0) piv = cm.parent(iv) self.assertTrue(isinstance(piv, QModelIndex)) self.assertEqual(pdi.row(), -1) self.assertEqual(pdi.column(), -1) self.assertEqual(pdi.internalPointer(), None) for n in range(nkids): allAttr = not allAttr cm.setAttributeView(allAttr) ki = cm.index(n, 0, di) pki = cm.parent(ki) self.assertEqual(pki, di) ki = cm.index(n, 1, di) pki = cm.parent(ki) self.assertEqual(pki, di) ki = cm.index(n, 2, di) pki = cm.parent(ki) self.assertEqual(pki, di) ki = cm.index(n, 3, di) pki = cm.parent(ki) self.assertTrue(isinstance(pki, QModelIndex)) self.assertEqual(pki.row(), -1) self.assertEqual(pki.column(), -1) self.assertEqual(pki.internalPointer(), None) ki = cm.index(n, 0, di) ti = cm.index(0, 0, ki) pti = cm.parent(ti) self.assertEqual(pti, ki) ti = cm.index(0, 1, ki) pti = cm.parent(ti) self.assertEqual(pti, ki) ti = cm.index(0, 2, ki) pti = cm.parent(ti) self.assertEqual(pti, ki) ti = cm.index(0, 3, ki) pti = cm.parent(ti) self.assertTrue(isinstance(pti, QModelIndex)) self.assertEqual(pti.row(), -1) self.assertEqual(pti.column(), -1) self.assertEqual(pti.internalPointer(), None) def test_rowCount(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = True cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) ri = cm.rootIndex self.assertEqual(cm.rowCount(ri), 1) # avoids showing #document di = cm.index(0, 0, ri) self.assertEqual(cm.rowCount(di), nkids) iv = cm.index(0, 0) self.assertEqual(cm.rowCount(iv), nkids) for n in range(nkids): allAttr = not allAttr cm.setAttributeView(allAttr) ki = cm.index(n, 0, di) self.assertEqual(cm.rowCount(ki), 1) ki = cm.index(n, 1, di) self.assertEqual(cm.rowCount(ki), 0) ki = cm.index(n, 2, di) self.assertEqual(cm.rowCount(ki), 0) # invalid index ki = cm.index(n, 3, di) self.assertEqual(cm.rowCount(ki), 1) ki = cm.index(n, 0, di) ti = cm.index(0, 0, ki) self.assertEqual(cm.rowCount(ti), 0) ti = cm.index(0, 1, ki) self.assertEqual(cm.rowCount(ti), 0) ti = cm.index(0, 2, ki) self.assertEqual(cm.rowCount(ti), 0) ti = cm.index(0, 3, ki) self.assertEqual(cm.rowCount(ti), 1) def test_columnCount(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = True cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) ri = cm.rootIndex self.assertEqual(cm.columnCount(ri), 3) # avoids showing #document di = cm.index(0, 0, ri) self.assertEqual(cm.columnCount(di), 3) iv = cm.index(0, 0) self.assertEqual(cm.columnCount(iv), 3) for n in range(nkids): allAttr = not allAttr cm.setAttributeView(allAttr) ki = cm.index(n, 0, di) self.assertEqual(cm.columnCount(ki), 3) ki = cm.index(n, 1, di) self.assertEqual(cm.columnCount(ki), 3) ki = cm.index(n, 2, di) self.assertEqual(cm.columnCount(ki), 3) # invalid index ki = cm.index(n, 3, di) self.assertEqual(cm.columnCount(ki), 3) ki = cm.index(n, 0, di) ti = cm.index(0, 0, ki) self.assertEqual(cm.columnCount(ti), 3) ti = cm.index(0, 1, ki) self.assertEqual(cm.columnCount(ti), 3) ti = cm.index(0, 2, ki) self.assertEqual(cm.columnCount(ti), 3) ti = cm.index(0, 3, ki) self.assertEqual(cm.columnCount(ti), 3) def test_insertItem(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = True cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) ri = cm.rootIndex self.assertEqual(cm.columnCount(ri), 3) # avoids showing #document di = cm.index(0, 0, ri) self.assertEqual(cm.columnCount(di), 3) iv = cm.index(0, 0) self.assertEqual(cm.columnCount(iv), 3) ci = di.internalPointer() self.assertEqual(ci.node, qdn) self.assertEqual(ci.childNumber(), 0) self.assertEqual(ci.node.nodeName(), nname) for k in range(nkids): ks = ci.child(k) self.assertTrue(isinstance(ks, ComponentItem)) self.assertTrue(isinstance(ks.parent, ComponentItem)) self.assertEqual(ks.childNumber(), k) self.assertEqual(ks.node, kds[k]) self.assertEqual(ks.parent.node, qdn) self.assertEqual(ks.node.nodeName(), "kid%s" % k) self.assertEqual(ks.parent, ci) self.assertTrue(isinstance(ks.child(0), ComponentItem)) self.assertTrue(isinstance(ks.child(0).parent, ComponentItem)) self.assertEqual(ks.child(0).childNumber(), 0) self.assertEqual(ks.child(0).node, tkds[k]) self.assertEqual(ks.child(0).parent.node, ks.node) self.assertEqual(ks.child(0).node.nodeName(), "#text") self.assertEqual( ks.child(0).node.toText().data(), '\nText\n %s\n' % k) self.assertEqual(ks.child(0).parent, ks) insd = self.__rnd.randint(0, nkids - 1) inkd = doc.createElement("insertedkid") self.assertTrue(not cm.insertItem(insd, inkd, QModelIndex())) self.assertTrue(cm.insertItem(insd, inkd, di)) for k in range(nkids+1): ks = ci.child(k) if k == insd: self.assertTrue(isinstance(ks, ComponentItem)) self.assertTrue(isinstance(ks.parent, ComponentItem)) self.assertEqual(ks.childNumber(), k) self.assertEqual(ks.node, inkd) self.assertEqual(ks.parent.node, qdn) self.assertEqual(ks.node.nodeName(), "insertedkid") self.assertEqual(ks.parent, ci) continue kk = k if k < insd else k - 1 self.assertTrue(isinstance(ks, ComponentItem)) self.assertTrue(isinstance(ks.parent, ComponentItem)) self.assertEqual(ks.childNumber(), k) self.assertEqual(ks.node, kds[kk]) self.assertEqual(ks.parent.node, qdn) self.assertEqual(ks.node.nodeName(), "kid%s" % kk) self.assertEqual(ks.parent, ci) self.assertTrue(isinstance(ks.child(0), ComponentItem)) self.assertTrue(isinstance(ks.child(0).parent, ComponentItem)) self.assertEqual(ks.child(0).childNumber(), 0) self.assertEqual(ks.child(0).node, tkds[kk]) self.assertEqual(ks.child(0).parent.node, ks.node) self.assertEqual(ks.child(0).node.nodeName(), "#text") self.assertEqual( ks.child(0).node.toText().data(), '\nText\n %s\n' % kk) self.assertEqual(ks.child(0).parent, ks) def test_appendItem(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument() nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn) nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = True cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) ri = cm.rootIndex self.assertEqual(cm.columnCount(ri), 3) # avoids showing #document di = cm.index(0, 0, ri) self.assertEqual(cm.columnCount(di), 3) iv = cm.index(0, 0) self.assertEqual(cm.columnCount(iv), 3) ci = di.internalPointer() self.assertEqual(ci.node, qdn) self.assertEqual(ci.childNumber(), 0) self.assertEqual(ci.node.nodeName(), nname) for k in range(nkids): ks = ci.child(k) self.assertTrue(isinstance(ks, ComponentItem)) self.assertTrue(isinstance(ks.parent, ComponentItem)) self.assertEqual(ks.childNumber(), k) self.assertEqual(ks.node, kds[k]) self.assertEqual(ks.parent.node, qdn) self.assertEqual(ks.node.nodeName(), "kid%s" % k) self.assertEqual(ks.parent, ci) self.assertTrue(isinstance(ks.child(0), ComponentItem)) self.assertTrue(isinstance(ks.child(0).parent, ComponentItem)) self.assertEqual(ks.child(0).childNumber(), 0) self.assertEqual(ks.child(0).node, tkds[k]) self.assertEqual(ks.child(0).parent.node, ks.node) self.assertEqual(ks.child(0).node.nodeName(), "#text") self.assertEqual( ks.child(0).node.toText().data(), '\nText\n %s\n' % k) self.assertEqual(ks.child(0).parent, ks) inkd = doc.createElement("insertedkid") self.assertTrue(not cm.appendItem(inkd, QModelIndex())) self.assertTrue(cm.appendItem(inkd, di)) for k in range(nkids): ks = ci.child(k) self.assertTrue(isinstance(ks, ComponentItem)) self.assertTrue(isinstance(ks.parent, ComponentItem)) self.assertEqual(ks.childNumber(), k) self.assertEqual(ks.node, kds[k]) self.assertEqual(ks.parent.node, qdn) self.assertEqual(ks.node.nodeName(), "kid%s" % k) self.assertEqual(ks.parent, ci) self.assertTrue(isinstance(ks.child(0), ComponentItem)) self.assertTrue(isinstance(ks.child(0).parent, ComponentItem)) self.assertEqual(ks.child(0).childNumber(), 0) self.assertEqual(ks.child(0).node, tkds[k]) self.assertEqual(ks.child(0).parent.node, ks.node) self.assertEqual(ks.child(0).node.nodeName(), "#text") self.assertEqual( ks.child(0).node.toText().data(), '\nText\n %s\n' % k) self.assertEqual(ks.child(0).parent, ks) # print k, ks.childNumber() k = nkids ks = ci.child(k) self.assertTrue(isinstance(ks, ComponentItem)) self.assertTrue(isinstance(ks.parent, ComponentItem)) self.assertEqual(ks.childNumber(), k) self.assertEqual(ks.node, inkd) self.assertEqual(ks.parent.node, qdn) self.assertEqual(ks.node.nodeName(), "insertedkid") self.assertEqual(ks.parent, ci) def test_removeItem(self): fun = sys._getframe().f_code.co_name print("Run: %s.%s() " % (self.__class__.__name__, fun)) doc = QDomDocument()<|fim▁hole|> nkids = self.__rnd.randint(1, 10) kds = [] tkds = [] for n in range(nkids): kds.append(doc.createElement("kid%s" % n)) kds[-1].setAttribute("name", "myname%s" % n) kds[-1].setAttribute("type", "mytype%s" % n) kds[-1].setAttribute("units", "myunits%s" % n) qdn.appendChild(kds[-1]) tkds.append(doc.createTextNode("\nText\n %s\n" % n)) kds[-1].appendChild(tkds[-1]) # print doc allAttr = True cm = ComponentModel(doc, allAttr) self.assertTrue(isinstance(cm, QAbstractItemModel)) self.assertTrue(isinstance(cm.rootIndex, QModelIndex)) cd = cm.rootIndex.internalPointer() self.assertTrue(isinstance(cd, ComponentItem)) self.assertEqual(cm.rootIndex.row(), 0) self.assertEqual(cm.rootIndex.column(), 0) ri = cm.rootIndex self.assertEqual(cm.columnCount(ri), 3) # avoids showing #document di = cm.index(0, 0, ri) self.assertEqual(cm.columnCount(di), 3) iv = cm.index(0, 0) self.assertEqual(cm.columnCount(iv), 3) ci = di.internalPointer() self.assertEqual(ci.node, qdn) self.assertEqual(ci.childNumber(), 0) self.assertEqual(ci.node.nodeName(), nname) for k in range(nkids): ks = ci.child(k) self.assertTrue(isinstance(ks, ComponentItem)) self.assertTrue(isinstance(ks.parent, ComponentItem)) self.assertEqual(ks.childNumber(), k) self.assertEqual(ks.node, kds[k]) self.assertEqual(ks.parent.node, qdn) self.assertEqual(ks.node.nodeName(), "kid%s" % k) self.assertEqual(ks.parent, ci) self.assertTrue(isinstance(ks.child(0), ComponentItem)) self.assertTrue(isinstance(ks.child(0).parent, ComponentItem)) self.assertEqual(ks.child(0).childNumber(), 0) self.assertEqual(ks.child(0).node, tkds[k]) self.assertEqual(ks.child(0).parent.node, ks.node) self.assertEqual(ks.child(0).node.nodeName(), "#text") self.assertEqual( ks.child(0).node.toText().data(), '\nText\n %s\n' % k) self.assertEqual(ks.child(0).parent, ks) rmvd = self.__rnd.randint(0, nkids - 1) # rmkd = ci.child(rmvd) self.assertTrue(not cm.removeItem(rmvd, QModelIndex())) self.assertTrue(cm.removeItem(rmvd, di)) for k in range(nkids): if k == rmvd: continue kk = k if k < rmvd else k - 1 ks = ci.child(kk) self.assertTrue(isinstance(ks, ComponentItem)) self.assertTrue(isinstance(ks.parent, ComponentItem)) self.assertEqual(ks.childNumber(), kk) self.assertEqual(ks.node, kds[k]) self.assertEqual(ks.parent.node, qdn) self.assertEqual(ks.node.nodeName(), "kid%s" % k) self.assertEqual(ks.parent, ci) self.assertTrue(isinstance(ks.child(0), ComponentItem)) self.assertTrue(isinstance(ks.child(0).parent, ComponentItem)) self.assertEqual(ks.child(0).childNumber(), 0) self.assertEqual(ks.child(0).node, tkds[k]) self.assertEqual(ks.child(0).parent.node, ks.node) self.assertEqual(ks.child(0).node.nodeName(), "#text") self.assertEqual( ks.child(0).node.toText().data(), '\nText\n %s\n' % k) self.assertEqual(ks.child(0).parent, ks) if __name__ == '__main__': unittest.main()<|fim▁end|>
nname = "definition" qdn = doc.createElement(nname) doc.appendChild(qdn)
<|file_name|>end.py<|end_file_name|><|fim▁begin|>import pygame from pygame.locals import * from math import sin import states class EndEvent(object): text = [ "Ah, hello there. Welcome to the center of the moon!", "Oh, me? I'm just the man in the moon. I live here.", "Don't act so shocked! It's rude you know.", "I don't get a lot of visitors down here, what with the moon rabbits.", "How did you befriend them? . . . You did befriend them, didn't you?", "I really don't want to have to clean up another set of blood stains.", "Hey, I think I hear them coming. They must really like you!" ] texture = None font = None def __init__(self, pos): self.pos = pos self.start_time = None self.time = 0.0 self.fade = None def update(self, delta, pos, player_pos): self.time += delta pos = (pos[0] + self.pos[0], pos[1] + self.pos[1]) distance = abs(player_pos[0] - pos[0]) + abs(player_pos[1] - pos[1]) if not self.start_time and distance < 5.0: self.start_time = self.time if self.fade != None: self.fade += delta / 4.0 if self.fade > 1.0: raise states.StateChange(states.MainMenuState()) elif self.start_time: count = int((self.time - self.start_time) / 0.05) i = 0 while count > len(EndEvent.text[i]) + 50: count -= len(EndEvent.text[i]) + 50 i += 1 if i >= len(EndEvent.text): self.fade = 0.0 break def render(self, screen, camera, pos): if not EndEvent.texture: EndEvent.texture = pygame.image.load("data/art/maninthemoon.png") EndEvent.texture.set_colorkey((255, 0, 255)) pos = (pos[0] + self.pos[0], pos[1] + self.pos[1]) spos = (pos[0], pos[1] + sin(self.time * 8) / 8) spos = camera.screen_pos(spos) spos = ( spos[0] - EndEvent.texture.get_width() / 2, spos[1] - EndEvent.texture.get_height() / 2 ) screen.blit(self.texture, spos) if self.start_time: if not EndEvent.font: EndEvent.font = pygame.font.Font("data/fonts/Prototype.ttf", 12) count = int((self.time - self.start_time) / 0.05) i = 0 while count > len(EndEvent.text[i]) + 50 and i < len(EndEvent.text) - 1: count -= len(EndEvent.text[i]) + 50 i += 1 words = EndEvent.text[i][:count].split() lines = [""] for word in words: if len(lines[-1]) > 32: lines.append(word) else: lines[-1] += " " + word for i in range(len(lines)): texture = EndEvent.font.render(lines[i], 1, (255, 255, 255)) spos = camera.screen_pos(pos) spos = ( spos[0] - texture.get_width() / 2,<|fim▁hole|> screen.blit(texture, spos) if self.fade != None: a = 255.0 - self.fade * 255.0 screen.fill((a, a, a), special_flags=BLEND_MULT)<|fim▁end|>
spos[1] - texture.get_height() / 2 + i * 20 - 40 )
<|file_name|>PromptSummarizeScores.java<|end_file_name|><|fim▁begin|>/* * Copyright (c) 2015 High Tech Kids. All rights reserved * HighTechKids is on the web at: http://www.hightechkids.org * This code is released under GPL; see LICENSE.txt for details. */ <|fim▁hole|>import java.sql.SQLException; import java.util.Set; import jakarta.servlet.ServletContext; import jakarta.servlet.ServletException; import jakarta.servlet.annotation.WebServlet; import jakarta.servlet.http.HttpServletRequest; import jakarta.servlet.http.HttpServletResponse; import jakarta.servlet.http.HttpSession; import javax.sql.DataSource; import fll.Tournament; import fll.db.Queries; import fll.web.ApplicationAttributes; import fll.web.AuthenticationContext; import fll.web.BaseFLLServlet; import fll.web.SessionAttributes; import fll.web.UserRole; import fll.web.WebUtils; /** * Support for and handle the result from promptSummarizeScores.jsp. */ @WebServlet("/report/PromptSummarizeScores") public class PromptSummarizeScores extends BaseFLLServlet { private static final org.apache.logging.log4j.Logger LOGGER = org.apache.logging.log4j.LogManager.getLogger(); /** * Session variable key for the URL to redirect to after score summarization. */ public static final String SUMMARY_REDIRECT_KEY = "summary_redirect"; @Override protected void processRequest(final HttpServletRequest request, final HttpServletResponse response, final ServletContext application, final HttpSession session) throws IOException, ServletException { final AuthenticationContext auth = SessionAttributes.getAuthentication(session); if (!auth.requireRoles(request, response, session, Set.of(UserRole.HEAD_JUDGE), false)) { return; } if (null != request.getParameter("recompute")) { WebUtils.sendRedirect(application, response, "summarizePhase1.jsp"); } else { final String url = SessionAttributes.getAttribute(session, SUMMARY_REDIRECT_KEY, String.class); LOGGER.debug("redirect is {}", url); if (null == url) { WebUtils.sendRedirect(application, response, "index.jsp"); } else { WebUtils.sendRedirect(application, response, url); } } } /** * Check if summary scores need to be updated. If they do, redirect and set * the session variable SUMMARY_REDIRECT to point to * redirect. * * @param response used to send a redirect * @param application the application context * @param session the session context * @param redirect the page to visit once the scores have been summarized * @return if the summary scores need to be updated, the calling method should * return immediately if this is true as a redirect has been executed. */ public static boolean checkIfSummaryUpdated(final HttpServletResponse response, final ServletContext application, final HttpSession session, final String redirect) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("top check if summary updated"); } final AuthenticationContext auth = SessionAttributes.getAuthentication(session); if (!auth.isHeadJudge()) { // only the head judge can summarize scores, so don't prompt others to summarize // the scores return false; } final DataSource datasource = ApplicationAttributes.getDataSource(application); try (Connection connection = datasource.getConnection()) { final int tournamentId = Queries.getCurrentTournament(connection); final Tournament tournament = Tournament.findTournamentByID(connection, tournamentId); if (tournament.checkTournamentNeedsSummaryUpdate(connection)) { if (LOGGER.isTraceEnabled()) { LOGGER.trace("Needs summary update"); } if (null != session.getAttribute(SUMMARY_REDIRECT_KEY)) { LOGGER.debug("redirect has already been set, it must be the case that the user is skipping summarization, allow it"); return false; } else { session.setAttribute(SUMMARY_REDIRECT_KEY, redirect); WebUtils.sendRedirect(application, response, "/report/promptSummarizeScores.jsp"); return true; } } else { if (LOGGER.isTraceEnabled()) { LOGGER.trace("No updated needed"); } return false; } } catch (final SQLException e) { LOGGER.error(e, e); throw new RuntimeException(e); } catch (final IOException e) { LOGGER.error(e, e); throw new RuntimeException(e); } } }<|fim▁end|>
package fll.web.report; import java.io.IOException; import java.sql.Connection;
<|file_name|>ApplicationStartHandler.java<|end_file_name|><|fim▁begin|>package org.dominokit.domino.api.client;<|fim▁hole|>public interface ApplicationStartHandler { void onApplicationStarted(); }<|fim▁end|>
@FunctionalInterface
<|file_name|>RewritingMergingEntryIterator.java<|end_file_name|><|fim▁begin|>/** * Copyright (c) 2013-2019 Contributors to the Eclipse Foundation * * <p> See the NOTICE file distributed with this work for additional information regarding copyright * ownership. All rights reserved. This program and the accompanying materials are made available * under the terms of the Apache License, Version 2.0 which accompanies this distribution and is * available at http://www.apache.org/licenses/LICENSE-2.0.txt */ package org.locationtech.geowave.core.store.util; import java.util.Iterator; import java.util.Map; import org.locationtech.geowave.core.store.adapter.PersistentAdapterStore; import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter;<|fim▁hole|>import org.locationtech.geowave.core.store.operations.RowDeleter; import org.locationtech.geowave.core.store.operations.RowWriter; import org.slf4j.Logger; import org.slf4j.LoggerFactory; public class RewritingMergingEntryIterator<T> extends MergingEntryIterator<T> { private static final Logger LOGGER = LoggerFactory.getLogger(RewritingMergingEntryIterator.class); private final RowWriter writer; private final RowDeleter deleter; public RewritingMergingEntryIterator( final PersistentAdapterStore adapterStore, final Index index, final Iterator<GeoWaveRow> scannerIt, final Map<Short, RowMergingDataAdapter> mergingAdapters, final RowWriter writer, final RowDeleter deleter) { super(adapterStore, index, scannerIt, null, null, mergingAdapters, null, null); this.writer = writer; this.deleter = deleter; } @Override protected GeoWaveRow mergeSingleRowValues( final GeoWaveRow singleRow, final RowTransform rowTransform) { if (singleRow.getFieldValues().length < 2) { return singleRow; } deleter.delete(singleRow); deleter.flush(); final GeoWaveRow merged = super.mergeSingleRowValues(singleRow, rowTransform); writer.write(merged); return merged; } }<|fim▁end|>
import org.locationtech.geowave.core.store.adapter.RowMergingDataAdapter.RowTransform; import org.locationtech.geowave.core.store.api.Index; import org.locationtech.geowave.core.store.entities.GeoWaveRow;
<|file_name|>raft.go<|end_file_name|><|fim▁begin|>// Copyright 2015 CoreOS, Inc. // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // http://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. package etcdserver import ( "encoding/json" "expvar" "os" "sort" "sync/atomic" "time" pb "github.com/coreos/etcd/etcdserver/etcdserverpb" "github.com/coreos/etcd/pkg/pbutil" "github.com/coreos/etcd/pkg/types" "github.com/coreos/etcd/raft" "github.com/coreos/etcd/raft/raftpb" "github.com/coreos/etcd/rafthttp" "github.com/coreos/etcd/wal" "github.com/coreos/etcd/wal/walpb" "github.com/coreos/etcd/Godeps/_workspace/src/github.com/coreos/pkg/capnslog" ) const ( // Number of entries for slow follower to catch-up after compacting // the raft storage entries. // We expect the follower has a millisecond level latency with the leader. // The max throughput is around 10K. Keep a 5K entries is enough for helping // follower to catch up. numberOfCatchUpEntries = 5000 // The max throughput of etcd will not exceed 100MB/s (100K * 1KB value). // Assuming the RTT is around 10ms, 1MB max size is large enough. maxSizePerMsg = 1 * 1024 * 1024 // Never overflow the rafthttp buffer, which is 4096. // TODO: a better const? maxInflightMsgs = 4096 / 8 ) var ( // indirection for expvar func interface // expvar panics when publishing duplicate name // expvar does not support remove a registered name // so only register a func that calls raftStatus // and change raftStatus as we need. raftStatus func() raft.Status ) func init() { raft.SetLogger(capnslog.NewPackageLogger("github.com/coreos/etcd", "raft")) expvar.Publish("raft.status", expvar.Func(func() interface{} { return raftStatus() })) } type RaftTimer interface { Index() uint64 Term() uint64 } // apply contains entries, snapshot be applied. // After applied all the items, the application needs // to send notification to done chan. type apply struct { entries []raftpb.Entry snapshot raftpb.Snapshot done chan struct{} } type raftNode struct { raft.Node // a chan to send out apply applyc chan apply // TODO: remove the etcdserver related logic from raftNode // TODO: add a state machine interface to apply the commit entries // and do snapshot/recover s *EtcdServer // utility ticker <-chan time.Time raftStorage *raft.MemoryStorage storage Storage // transport specifies the transport to send and receive msgs to members. // Sending messages MUST NOT block. It is okay to drop messages, since // clients should timeout and reissue their messages. // If transport is nil, server will panic. transport rafthttp.Transporter // Cache of the latest raft index and raft term the server has seen index uint64 term uint64 lead uint64 stopped chan struct{} done chan struct{} } func (r *raftNode) run() { r.stopped = make(chan struct{}) r.done = make(chan struct{}) var syncC <-chan time.Time defer r.stop() for { select { case <-r.ticker: r.Tick() case rd := <-r.Ready(): if rd.SoftState != nil { atomic.StoreUint64(&r.lead, rd.SoftState.Lead) if rd.RaftState == raft.StateLeader { syncC = r.s.SyncTicker // TODO: remove the nil checking // current test utility does not provide the stats if r.s.stats != nil { r.s.stats.BecomeLeader() } } else { syncC = nil } } apply := apply{ entries: rd.CommittedEntries, snapshot: rd.Snapshot, done: make(chan struct{}), } select { case r.applyc <- apply: case <-r.stopped: return } if !raft.IsEmptySnap(rd.Snapshot) { if err := r.storage.SaveSnap(rd.Snapshot); err != nil { plog.Fatalf("raft save snapshot error: %v", err) } r.raftStorage.ApplySnapshot(rd.Snapshot) plog.Infof("raft applied incoming snapshot at index %d", rd.Snapshot.Metadata.Index) } if err := r.storage.Save(rd.HardState, rd.Entries); err != nil { plog.Fatalf("raft save state and entries error: %v", err) } r.raftStorage.Append(rd.Entries) r.s.send(rd.Messages) select { case <-apply.done: case <-r.stopped: return } r.Advance() case <-syncC: r.s.sync(defaultSyncTimeout) case <-r.stopped: return } } } func (r *raftNode) apply() chan apply { return r.applyc } func (r *raftNode) stop() { r.Stop() r.transport.Stop() if err := r.storage.Close(); err != nil { plog.Panicf("raft close storage error: %v", err) } close(r.done) } // for testing func (r *raftNode) pauseSending() { p := r.transport.(rafthttp.Pausable) p.Pause() } func (r *raftNode) resumeSending() { p := r.transport.(rafthttp.Pausable) p.Resume() } func startNode(cfg *ServerConfig, cl *cluster, ids []types.ID) (id types.ID, n raft.Node, s *raft.MemoryStorage, w *wal.WAL) { var err error member := cl.MemberByName(cfg.Name) metadata := pbutil.MustMarshal( &pb.Metadata{ NodeID: uint64(member.ID), ClusterID: uint64(cl.ID()), }, ) if err := os.MkdirAll(cfg.SnapDir(), privateDirMode); err != nil { plog.Fatalf("create snapshot directory error: %v", err) } if w, err = wal.Create(cfg.WALDir(), metadata); err != nil { plog.Fatalf("create wal error: %v", err) } peers := make([]raft.Peer, len(ids)) for i, id := range ids { ctx, err := json.Marshal((*cl).Member(id)) if err != nil { plog.Panicf("marshal member should never fail: %v", err) } peers[i] = raft.Peer{ID: uint64(id), Context: ctx} } id = member.ID plog.Infof("starting member %s in cluster %s", id, cl.ID()) s = raft.NewMemoryStorage() c := &raft.Config{ ID: uint64(id), ElectionTick: cfg.ElectionTicks, HeartbeatTick: 1, Storage: s, MaxSizePerMsg: maxSizePerMsg, MaxInflightMsgs: maxInflightMsgs, } n = raft.StartNode(c, peers) raftStatus = n.Status return } func restartNode(cfg *ServerConfig, snapshot *raftpb.Snapshot) (types.ID, *cluster, raft.Node, *raft.MemoryStorage, *wal.WAL) { var walsnap walpb.Snapshot if snapshot != nil { walsnap.Index, walsnap.Term = snapshot.Metadata.Index, snapshot.Metadata.Term } w, id, cid, st, ents := readWAL(cfg.WALDir(), walsnap) plog.Infof("restarting member %s in cluster %s at commit index %d", id, cid, st.Commit) cl := newCluster("") cl.SetID(cid) s := raft.NewMemoryStorage() if snapshot != nil { s.ApplySnapshot(*snapshot) } s.SetHardState(st) s.Append(ents) c := &raft.Config{ ID: uint64(id), ElectionTick: cfg.ElectionTicks, HeartbeatTick: 1, Storage: s, MaxSizePerMsg: maxSizePerMsg, MaxInflightMsgs: maxInflightMsgs, } n := raft.RestartNode(c) raftStatus = n.Status return id, cl, n, s, w } func restartAsStandaloneNode(cfg *ServerConfig, snapshot *raftpb.Snapshot) (types.ID, *cluster, raft.Node, *raft.MemoryStorage, *wal.WAL) { var walsnap walpb.Snapshot if snapshot != nil { walsnap.Index, walsnap.Term = snapshot.Metadata.Index, snapshot.Metadata.Term } w, id, cid, st, ents := readWAL(cfg.WALDir(), walsnap) // discard the previously uncommitted entries for i, ent := range ents { if ent.Index > st.Commit { plog.Infof("discarding %d uncommitted WAL entries ", len(ents)-i) ents = ents[:i] break } } // force append the configuration change entries toAppEnts := createConfigChangeEnts(getIDs(snapshot, ents), uint64(id), st.Term, st.Commit) ents = append(ents, toAppEnts...) // force commit newly appended entries err := w.Save(raftpb.HardState{}, toAppEnts) if err != nil { plog.Fatalf("%v", err) } if len(ents) != 0 { st.Commit = ents[len(ents)-1].Index } plog.Printf("forcing restart of member %s in cluster %s at commit index %d", id, cid, st.Commit) cl := newCluster("") cl.SetID(cid) s := raft.NewMemoryStorage() if snapshot != nil { s.ApplySnapshot(*snapshot) } s.SetHardState(st) s.Append(ents) c := &raft.Config{ ID: uint64(id), ElectionTick: cfg.ElectionTicks, HeartbeatTick: 1, Storage: s, MaxSizePerMsg: maxSizePerMsg, MaxInflightMsgs: maxInflightMsgs, } n := raft.RestartNode(c) raftStatus = n.Status return id, cl, n, s, w } // getIDs returns an ordered set of IDs included in the given snapshot and // the entries. The given snapshot/entries can contain two kinds of // ID-related entry: // - ConfChangeAddNode, in which case the contained ID will be added into the set. // - ConfChangeAddRemove, in which case the contained ID will be removed from the set. func getIDs(snap *raftpb.Snapshot, ents []raftpb.Entry) []uint64 { ids := make(map[uint64]bool) if snap != nil { for _, id := range snap.Metadata.ConfState.Nodes { ids[id] = true } } for _, e := range ents { if e.Type != raftpb.EntryConfChange { continue } var cc raftpb.ConfChange pbutil.MustUnmarshal(&cc, e.Data) switch cc.Type {<|fim▁hole|> case raftpb.ConfChangeAddNode: ids[cc.NodeID] = true case raftpb.ConfChangeRemoveNode: delete(ids, cc.NodeID) default: plog.Panicf("ConfChange Type should be either ConfChangeAddNode or ConfChangeRemoveNode!") } } sids := make(types.Uint64Slice, 0) for id := range ids { sids = append(sids, id) } sort.Sort(sids) return []uint64(sids) } // createConfigChangeEnts creates a series of Raft entries (i.e. // EntryConfChange) to remove the set of given IDs from the cluster. The ID // `self` is _not_ removed, even if present in the set. // If `self` is not inside the given ids, it creates a Raft entry to add a // default member with the given `self`. func createConfigChangeEnts(ids []uint64, self uint64, term, index uint64) []raftpb.Entry { ents := make([]raftpb.Entry, 0) next := index + 1 found := false for _, id := range ids { if id == self { found = true continue } cc := &raftpb.ConfChange{ Type: raftpb.ConfChangeRemoveNode, NodeID: id, } e := raftpb.Entry{ Type: raftpb.EntryConfChange, Data: pbutil.MustMarshal(cc), Term: term, Index: next, } ents = append(ents, e) next++ } if !found { m := Member{ ID: types.ID(self), RaftAttributes: RaftAttributes{PeerURLs: []string{"http://localhost:7001", "http://localhost:2380"}}, } ctx, err := json.Marshal(m) if err != nil { plog.Panicf("marshal member should never fail: %v", err) } cc := &raftpb.ConfChange{ Type: raftpb.ConfChangeAddNode, NodeID: self, Context: ctx, } e := raftpb.Entry{ Type: raftpb.EntryConfChange, Data: pbutil.MustMarshal(cc), Term: term, Index: next, } ents = append(ents, e) } return ents }<|fim▁end|>
<|file_name|>DataInfo.java<|end_file_name|><|fim▁begin|>package org.apache.velocity.tools.view; /* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ /** * <p>ToolInfo implementation to handle "primitive" data types. * It currently supports String, Number, and Boolean data.</p> * * <p>An example of data elements specified in your toolbox.xml * might be: * <pre> * &lt;data type="string"&gt; * &lt;key&gt;app_name&lt;/key&gt; * &lt;value&gt;FooWeb Deluxe&lt;/value&gt; * &lt;/data&gt; * &lt;data type="number"&gt; * &lt;key&gt;app_version&lt;/key&gt; * &lt;value&gt;4.2&lt;/value&gt; * &lt;/data&gt; * &lt;data type="boolean"&gt; * &lt;key&gt;debug&lt;/key&gt; * &lt;value&gt;true&lt;/value&gt; * &lt;/data&gt; * &lt;data type="number"&gt; * &lt;key&gt;screen_width&lt;/key&gt; * &lt;value&gt;400&lt;/value&gt; * &lt;/data&gt; * </pre></p> * * @author Nathan Bubna * @deprecated Use {@link org.apache.velocity.tools.config.Data} * @version $Id: DataInfo.java 651469 2008-04-25 00:46:13Z nbubna $ */ @Deprecated public class DataInfo implements ToolInfo { public static final String TYPE_STRING = "string"; public static final String TYPE_NUMBER = "number"; public static final String TYPE_BOOLEAN = "boolean"; private static final int TYPE_ID_STRING = 0; private static final int TYPE_ID_NUMBER = 1; private static final int TYPE_ID_BOOLEAN = 2; private String key = null; private int type_id = TYPE_ID_STRING; private Object data = null; <|fim▁hole|> public DataInfo() {} /*********************** Mutators *************************/ public void setKey(String key) { this.key = key; } public void setType(String type) { if (TYPE_BOOLEAN.equalsIgnoreCase(type)) { this.type_id = TYPE_ID_BOOLEAN; } else if (TYPE_NUMBER.equalsIgnoreCase(type)) { this.type_id = TYPE_ID_NUMBER; } else /* if no type or type="string" */ { this.type_id = TYPE_ID_STRING; } } public void setValue(String value) { if (type_id == TYPE_ID_BOOLEAN) { this.data = Boolean.valueOf(value); } else if (type_id == TYPE_ID_NUMBER) { if (value.indexOf('.') >= 0) { this.data = new Double(value); } else { this.data = new Integer(value); } } else /* type is "string" */ { this.data = value; } } /*********************** Accessors *************************/ public String getKey() { return key; } public String getClassname() { return data != null ? data.getClass().getName() : null; } /** * Returns the data. Always returns the same * object since the data is a constant. Initialization * data is ignored. */ public Object getInstance(Object initData) { return data; } }<|fim▁end|>
<|file_name|>SetBkColor.cpp<|end_file_name|><|fim▁begin|>// ƒwƒbƒ_ƒtƒ@ƒCƒ‹‚̃Cƒ“ƒNƒ‹[ƒh #include <windows.h> // •W€WindowsAPI #include <tchar.h> // TCHARŒ^ #include <string.h> // C•¶Žš—ñˆ— // ŠÖ”‚̃vƒƒgƒ^ƒCƒvéŒ¾ LRESULT CALLBACK WindowProc(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam); // ƒEƒBƒ“ƒhƒEƒƒbƒZ[ƒW‚ɑ΂µ‚ēƎ©‚̏ˆ—‚ð‚·‚é‚æ‚¤‚É’è‹`‚µ‚½ƒR[ƒ‹ƒoƒbƒNŠÖ”WindowProc. // _tWinMainŠÖ”‚Ì’è‹` int WINAPI _tWinMain(HINSTANCE hInstance, HINSTANCE hPrevInstance, LPTSTR lpCmdLine, int nShowCmd) { // •ϐ”‚̐錾 HWND hWnd; // CreateWindow‚ō쐬‚µ‚½ƒEƒBƒ“ƒhƒE‚̃EƒBƒ“ƒhƒEƒnƒ“ƒhƒ‹‚ðŠi”[‚·‚éHWNDŒ^•ϐ”hWnd. MSG msg; // ƒEƒBƒ“ƒhƒEƒƒbƒZ[ƒWî•ñ‚ðŠi”[‚·‚éMSG\‘¢‘ÌŒ^•ϐ”msg. WNDCLASS wc; // ƒEƒBƒ“ƒhƒEƒNƒ‰ƒXî•ñ‚ð‚à‚ÂWNDCLASS\‘¢‘ÌŒ^•ϐ”wc. // ƒEƒBƒ“ƒhƒEƒNƒ‰ƒX‚̐ݒè wc.lpszClassName = _T("SetBkColor"); // ƒEƒBƒ“ƒhƒEƒNƒ‰ƒX–¼‚Í"SetBkColor". wc.style = CS_HREDRAW | CS_VREDRAW; // ƒXƒ^ƒCƒ‹‚ÍCS_HREDRAW | CS_VREDRAW. wc.lpfnWndProc = WindowProc; // ƒEƒBƒ“ƒhƒEƒvƒƒV[ƒWƒƒ‚͓Ǝ©‚̏ˆ—‚ð’è‹`‚µ‚½WindowProc. wc.hInstance = hInstance; // ƒCƒ“ƒXƒ^ƒ“ƒXƒnƒ“ƒhƒ‹‚Í_tWinMain‚̈ø”. wc.hIcon = LoadIcon(NULL, IDI_APPLICATION); // ƒAƒCƒRƒ“‚̓AƒvƒŠƒP[ƒVƒ‡ƒ“Šù’è‚Ì‚à‚Ì. wc.hCursor = LoadCursor(NULL, IDC_ARROW); // ƒJ[ƒ\ƒ‹‚Í–îˆó. wc.hbrBackground = (HBRUSH)GetStockObject(WHITE_BRUSH); // ”wŒi‚Í”’ƒuƒ‰ƒV. wc.lpszMenuName = NULL; // ƒƒjƒ…[‚͂Ȃµ. wc.cbClsExtra = 0; // 0‚Å‚¢‚¢. wc.cbWndExtra = 0; // 0‚Å‚¢‚¢. // ƒEƒBƒ“ƒhƒEƒNƒ‰ƒX‚Ì“o˜^ if (!RegisterClass(&wc)) { // RegisterClass‚ŃEƒBƒ“ƒhƒEƒNƒ‰ƒX‚ð“o˜^‚µ, 0‚ª•Ô‚Á‚½‚çƒGƒ‰[. // ƒGƒ‰[ˆ— MessageBox(NULL, _T("RegisterClass failed!"), _T("SetBkColor"), MB_OK | MB_ICONHAND); // MessageBox‚Å"RegisterClass failed!"‚ƃGƒ‰[ƒƒbƒZ[ƒW‚ð•\ަ. return -1; // ˆÙíI—¹(1) } // ƒEƒBƒ“ƒhƒE‚̍쐬 hWnd = CreateWindow(_T("SetBkColor"), _T("SetBkColor"), WS_OVERLAPPEDWINDOW, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, CW_USEDEFAULT, NULL, NULL, hInstance, NULL); // CreateWindow‚Å, ã‚Å“o˜^‚µ‚½"SetBkColor"ƒEƒBƒ“ƒhƒEƒNƒ‰ƒX‚̃EƒBƒ“ƒhƒE‚ðì¬. if (hWnd == NULL) { // ƒEƒBƒ“ƒhƒE‚̍쐬‚ÉŽ¸”s‚µ‚½‚Æ‚«. // ƒGƒ‰[ˆ— MessageBox(NULL, _T("CreateWindow failed!"), _T("SetBkColor"), MB_OK | MB_ICONHAND); // MessageBox‚Å"CreateWindow failed!"‚ƃGƒ‰[ƒƒbƒZ[ƒW‚ð•\ަ. return -2; // ˆÙíI—¹(2) } // ƒEƒBƒ“ƒhƒE‚Ì•\ަ ShowWindow(hWnd, SW_SHOW); // ShowWindow‚ÅSW_SHOW‚ðŽw’肵‚ăEƒBƒ“ƒhƒE‚Ì•\ަ. // ƒƒbƒZ[ƒWƒ‹[ƒv while (GetMessage(&msg, NULL, 0, 0) > 0) { // GetMessage‚сƒbƒZ[ƒW‚ðŽæ“¾, –ß‚è’l‚ª0‚æ‚è‘å‚«‚¢ŠÔ‚̓‹[ƒv‚µ‘±‚¯‚é. // ƒEƒBƒ“ƒhƒEƒƒbƒZ[ƒW‚Ì‘—o DispatchMessage(&msg); // DispatchMessage‚Ŏ󂯎æ‚Á‚½ƒƒbƒZ[ƒW‚ðƒEƒBƒ“ƒhƒEƒvƒƒV[ƒWƒƒ(‚±‚̏ꍇ‚͓Ǝ©‚É’è‹`‚µ‚½WindowProc)‚É‘—o. } // ƒvƒƒOƒ‰ƒ€‚̏I—¹ return (int)msg.wParam; // I—¹ƒR[ƒh(msg.wParam)‚ð–ß‚è’l‚Æ‚µ‚ĕԂ·. } // WindowProcŠÖ”‚Ì’è‹` LRESULT CALLBACK WindowProc(HWND hwnd, UINT uMsg, WPARAM wParam, LPARAM lParam) { // ƒEƒBƒ“ƒhƒEƒƒbƒZ[ƒW‚ɑ΂µ‚ēƎ©‚̏ˆ—‚ð‚·‚é‚æ‚¤‚É’è‹`‚µ‚½ƒEƒBƒ“ƒhƒEƒvƒƒV[ƒWƒƒ. // ƒEƒBƒ“ƒhƒEƒƒbƒZ[ƒW‚ɑ΂·‚鏈—. switch (uMsg) { // switch-casa•¶‚ÅuMsg‚Ì’l‚²‚Ƃɏˆ—‚ðU‚蕪‚¯‚é. // ƒEƒBƒ“ƒhƒE‚̍쐬‚ªŠJŽn‚³‚ê‚½Žž. case WM_CREATE: // ƒEƒBƒ“ƒhƒE‚̍쐬‚ªŠJŽn‚³‚ê‚½Žž.(uMsg‚ªWM_CREATE‚ÌŽž.) // WM_CREATEƒuƒƒbƒN { // ƒEƒBƒ“ƒhƒEì¬¬Œ÷ return 0; // return•¶‚Å0‚ð•Ô‚µ‚Ä, ƒEƒBƒ“ƒhƒEì¬¬Œ÷‚Æ‚·‚é. } // Šù’è‚̏ˆ—‚ÖŒü‚©‚¤. break; // break‚Å”²‚¯‚Ä, Šù’è‚̏ˆ—(DefWindowProc)‚ÖŒü‚©‚¤. // ƒEƒBƒ“ƒhƒE‚ª”jŠü‚³‚ê‚½Žž. case WM_DESTROY: // ƒEƒBƒ“ƒhƒE‚ª”jŠü‚³‚ê‚½Žž.(uMsg‚ªWM_DESTROY‚ÌŽž.) // WM_DESTROYƒuƒƒbƒN { // I—¹ƒƒbƒZ[ƒW‚Ì‘—M. PostQuitMessage(0); // PostQuitMessage‚ŏI—¹ƒR[ƒh‚ð0‚Æ‚µ‚ÄWM_QUITƒƒbƒZ[ƒW‚𑗐M.(‚·‚邯ƒƒbƒZ[ƒWƒ‹[ƒv‚ÌGetMessage‚Ì–ß‚è’l‚ª0‚ɂȂé‚Ì‚Å, ƒƒbƒZ[ƒWƒ‹[ƒv‚©‚甲‚¯‚é.) } // Šù’è‚̏ˆ—‚ÖŒü‚©‚¤. break; // break‚Å”²‚¯‚Ä, Šù’è‚̏ˆ—(DefWindowProc)‚ÖŒü‚©‚¤. // ‰æ–ʂ̕`‰æ‚ð—v‹‚³‚ê‚½Žž. case WM_PAINT: // ‰æ–ʂ̕`‰æ‚ð—v‹‚³‚ê‚½Žž.(uMsg‚ªWM_PAINT‚ÌŽž.) // WM_PAINTƒuƒƒbƒN { // ‚±‚̃uƒƒbƒN‚̃[ƒJƒ‹•ϐ”E”z—ñ‚̐錾‚Ə‰Šú‰». HDC hDC; // ƒfƒoƒCƒXƒRƒ“ƒeƒLƒXƒgƒnƒ“ƒhƒ‹‚ðŠi”[‚·‚éHDCŒ^•ϐ”hDC. PAINTSTRUCT ps; // ƒyƒCƒ“ƒgî•ñ‚ðŠÇ—‚·‚éPAINTSTRUCT\‘¢‘ÌŒ^‚̕ϐ”ps. TCHAR tszText[] = _T("ABCDE"); // TCHARŒ^”z—ñtszText‚ð"ABCDE"‚ŏ‰Šú‰». size_t uiLen = 0; // tszText‚Ì’·‚³‚ðŠi”[‚·‚邽‚ß‚Ìsize_tŒ^•ϐ”uiLen‚ð0‚ɏ‰Šú‰». // ƒEƒBƒ“ƒhƒE‚Ì•`‰æŠJŽn hDC = BeginPaint(hwnd, &ps); // BeginPaint‚Å‚±‚̃EƒBƒ“ƒhƒE‚Ì•`‰æ‚̏€”õ‚ð‚·‚é. –ß‚è’l‚ɂ̓fƒoƒCƒXƒRƒ“ƒeƒLƒXƒgƒnƒ“ƒhƒ‹‚ª•Ô‚é‚Ì‚Å, hDC‚ÉŠi”[. // ”wŒiF‚̐ݒè SetBkColor(hDC, RGB(0x00, 0x00, 0xff)); // SetBkColor‚ՂðƒZƒbƒg. // •`‰æF‚̐ݒè SetTextColor(hDC, RGB(0xff, 0x00, 0x00)); // SetTextColor‚ŐԂðƒZƒbƒg. // •¶Žš—ñ‚Ì•`‰æ<|fim▁hole|> // ƒEƒBƒ“ƒhƒE‚Ì•`‰æI—¹ EndPaint(hwnd, &ps); // EndPaint‚Å‚±‚̃EƒBƒ“ƒhƒE‚Ì•`‰æˆ—‚ðI—¹‚·‚é. } // Šù’è‚̏ˆ—‚ÖŒü‚©‚¤. break; // break‚Å”²‚¯‚Ä, Šù’è‚̏ˆ—(DefWindowProc)‚ÖŒü‚©‚¤. // ã‹LˆÈŠO‚ÌŽž. default: // ã‹LˆÈŠO‚Ì’l‚ÌŽž‚ÌŠù’菈—. // Šù’è‚̏ˆ—‚ÖŒü‚©‚¤. break; // break‚Å”²‚¯‚Ä, Šù’è‚̏ˆ—(DefWindowProc)‚ÖŒü‚©‚¤. } // ‚ ‚Ƃ͊ù’è‚̏ˆ—‚É”C‚¹‚é. return DefWindowProc(hwnd, uMsg, wParam, lParam); // –ß‚è’l‚àŠÜ‚ßDefWindowProc‚ÉŠù’è‚̏ˆ—‚ð”C‚¹‚é. }<|fim▁end|>
uiLen = _tcslen(tszText); // _tcslen‚ÅtszText‚Ì’·‚³‚ðŽæ“¾‚µ, uiLen‚ÉŠi”[. TextOut(hDC, 50, 50, tszText, (int)uiLen); // TextOut‚ŃEƒBƒ“ƒhƒEhwnd‚̍À•W(50, 50)‚̈ʒu‚ÉtszText‚ð•`‰æ.
<|file_name|>test_connect_helpers.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*- # # test_connect_helpers.py # # This file is part of NEST. # # Copyright (C) 2004 The NEST Initiative # # NEST is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 2 of the License, or # (at your option) any later version. #<|fim▁hole|># GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with NEST. If not, see <http://www.gnu.org/licenses/>. import numpy as np import scipy.stats import nest from scipy.stats import truncexpon try: from mpi4py import MPI haveMPI4Py = True except ImportError: haveMPI4Py = False def gather_data(data_array): ''' Gathers data from all mpi processes by collecting all element in a list if data is a list and summing all elements to one numpy-array if data is one numpy-array. Returns gathered data if rank of current mpi node is zero and None otherwise. ''' if haveMPI4Py: data_array_list = MPI.COMM_WORLD.gather(data_array, root=0) if MPI.COMM_WORLD.Get_rank() == 0: if isinstance(data_array, list): gathered_data = [ item for sublist in data_array_list for item in sublist] else: gathered_data = sum(data_array_list) return gathered_data else: return None else: return data_array def bcast_data(data): """ Broadcasts data from the root MPI node to all other nodes. """ if haveMPI4Py: data = MPI.COMM_WORLD.bcast(data, root=0) return data def is_array(data): ''' Returns True if data is a list or numpy-array and False otherwise. ''' return isinstance(data, (list, np.ndarray, np.generic)) def mpi_barrier(): if haveMPI4Py: MPI.COMM_WORLD.Barrier() def mpi_assert(data_original, data_test, TestCase): ''' Compares data_original and data_test using assertTrue from the TestCase. ''' data_original = gather_data(data_original) # only test if on rank 0 if data_original is not None: if isinstance(data_original, (np.ndarray, np.generic)) \ and isinstance(data_test, (np.ndarray, np.generic)): TestCase.assertTrue(np.allclose(data_original, data_test)) else: TestCase.assertTrue(data_original == data_test) def all_equal(x): ''' Tests if all elements in a list are equal. Returns True or False ''' return x.count(x[0]) == len(x) def get_connectivity_matrix(pop1, pop2): ''' Returns a connectivity matrix describing all connections from pop1 to pop2 such that M_ij describes the connection between the jth neuron in pop1 to the ith neuron in pop2. ''' M = np.zeros((len(pop2), len(pop1))) connections = nest.GetConnections(pop1, pop2) index_dic = {} for count, node in enumerate(pop1): index_dic[node.get('global_id')] = count for count, node in enumerate(pop2): index_dic[node.get('global_id')] = count for source, target in zip(connections.sources(), connections.targets()): M[index_dic[target]][index_dic[source]] += 1 return M def get_weighted_connectivity_matrix(pop1, pop2, label): ''' Returns a weighted connectivity matrix describing all connections from pop1 to pop2 such that M_ij describes the connection between the jth neuron in pop1 to the ith neuron in pop2. Only works without multapses. ''' M = np.zeros((len(pop2), len(pop1))) connections = nest.GetConnections(pop1, pop2) sources = connections.get('source') targets = connections.get('target') weights = connections.get(label) index_dic = {} for count, node in enumerate(pop1): index_dic[node.get('global_id')] = count for count, node in enumerate(pop2): index_dic[node.get('global_id')] = count for counter, weight in enumerate(weights): source_id = sources[counter] target_id = targets[counter] M[index_dic[target_id]][index_dic[source_id]] += weight return M def check_synapse(params, values, syn_params, TestCase): for i, param in enumerate(params): syn_params[param] = values[i] TestCase.setUpNetwork(TestCase.conn_dict, syn_params) for i, param in enumerate(params): conns = nest.GetConnections(TestCase.pop1, TestCase.pop2) conn_params = conns.get(param) TestCase.assertTrue(all_equal(conn_params)) TestCase.assertTrue(conn_params[0] == values[i]) # copied from Masterthesis, Daniel Hjertholm def counter(x, fan, source_pop, target_pop): ''' Count similar elements in list. Parameters ---------- x: Any list. Return values ------------- list containing counts of similar elements. ''' N_p = len(source_pop) if fan == 'in' else len(target_pop) # of pool nodes. start = min(x) counts = [0] * N_p for elem in x: counts[elem - start] += 1 return counts def get_degrees(fan, pop1, pop2): M = get_connectivity_matrix(pop1, pop2) if fan == 'in': degrees = np.sum(M, axis=1) elif fan == 'out': degrees = np.sum(M, axis=0) return degrees # adapted from Masterthesis, Daniel Hjertholm def get_expected_degrees_fixedDegrees(N, fan, len_source_pop, len_target_pop): N_d = len_target_pop if fan == 'in' else len_source_pop # of driver nodes. N_p = len_source_pop if fan == 'in' else len_target_pop # of pool nodes. expected_degree = N_d * N / float(N_p) expected = [expected_degree] * N_p return expected # adapted from Masterthesis, Daniel Hjertholm def get_expected_degrees_totalNumber(N, fan, len_source_pop, len_target_pop): expected_indegree = [N / float(len_target_pop)] * len_target_pop expected_outdegree = [N / float(len_source_pop)] * len_source_pop if fan == 'in': return expected_indegree elif fan == 'out': return expected_outdegree # copied from Masterthesis, Daniel Hjertholm def get_expected_degrees_bernoulli(p, fan, len_source_pop, len_target_pop): ''' Calculate expected degree distribution. Degrees with expected number of observations below e_min are combined into larger bins. Return values ------------- 2D array. The four columns contain degree, expected number of observation, actual number observations, and the number of bins combined. ''' n = len_source_pop if fan == 'in' else len_target_pop n_p = len_target_pop if fan == 'in' else len_source_pop mid = int(round(n * p)) e_min = 5 # Combine from front. data_front = [] cumexp = 0.0 bins_combined = 0 for degree in range(mid): cumexp += scipy.stats.binom.pmf(degree, n, p) * n_p bins_combined += 1 if cumexp < e_min: if degree == mid - 1: if len(data_front) == 0: raise RuntimeWarning('Not enough data') deg, exp, obs, num = data_front[-1] data_front[-1] = (deg, exp + cumexp, obs, num + bins_combined) else: continue else: data_front.append((degree - bins_combined + 1, cumexp, 0, bins_combined)) cumexp = 0.0 bins_combined = 0 # Combine from back. data_back = [] cumexp = 0.0 bins_combined = 0 for degree in reversed(range(mid, n + 1)): cumexp += scipy.stats.binom.pmf(degree, n, p) * n_p bins_combined += 1 if cumexp < e_min: if degree == mid: if len(data_back) == 0: raise RuntimeWarning('Not enough data') deg, exp, obs, num = data_back[-1] data_back[-1] = (degree, exp + cumexp, obs, num + bins_combined) else: continue else: data_back.append((degree, cumexp, 0, bins_combined)) cumexp = 0.0 bins_combined = 0 data_back.reverse() expected = np.array(data_front + data_back) if fan == 'out': assert (sum(expected[:, 3]) == len_target_pop + 1) else: # , 'Something is wrong' assert (sum(expected[:, 3]) == len_source_pop + 1) # np.hstack((np.asarray(data_front)[0], np.asarray(data_back)[0])) return expected # adapted from Masterthesis, Daniel Hjertholm def reset_seed(seed, nr_threads): ''' Reset the simulator and seed the PRNGs. Parameters ---------- seed: PRNG seed value. ''' nest.ResetKernel() nest.SetKernelStatus({'local_num_threads': nr_threads, 'rng_seed': seed}) # copied from Masterthesis, Daniel Hjertholm def chi_squared_check(degrees, expected, distribution=None): ''' Create a single network and compare the resulting degree distribution with the expected distribution using Pearson's chi-squared GOF test. Parameters ---------- seed : PRNG seed value. control: Boolean value. If True, _generate_multinomial_degrees will be used instead of _get_degrees. Return values ------------- chi-squared statistic. p-value from chi-squared test. ''' if distribution in ('pairwise_bernoulli', 'symmetric_pairwise_bernoulli'): observed = {} for degree in degrees: if degree not in observed: observed[degree] = 1 else: observed[degree] += 1 # Add observations to data structure, combining multiple observations # where necessary. expected[:, 2] = 0.0 for row in expected: for i in range(int(row[3])): deg = int(row[0]) + i if deg in observed: row[2] += observed[deg] # ddof: adjustment to the degrees of freedom. df = k-1-ddof return scipy.stats.chisquare(np.array(expected[:, 2]), np.array(expected[:, 1])) else: # ddof: adjustment to the degrees of freedom. df = k-1-ddof return scipy.stats.chisquare(np.array(degrees), np.array(expected))<|fim▁end|>
# NEST is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
<|file_name|>Interfaz_Factura.java<|end_file_name|><|fim▁begin|>/* * To change this license header, choose License Headers in Project Properties. * To change this template file, choose Tools | Templates * and open the template in the editor. */ package modelo.formularios; import controlador.dbConnection; import java.sql.Connection; import java.sql.PreparedStatement; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import javax.swing.JOptionPane; /** * * @author Eisner López Acevedo <eisner.lopez at gmail.com> */ public class Interfaz_Factura { private final dbConnection myLink = new dbConnection();<|fim▁hole|> ResultSet rs = null; PreparedStatement pst = null; public boolean mostrarFactura(String Buscar) { String[] registro = new String[8]; querySQL = "SELECT `factura_cabina`.`factura_id`, " + "`factura_cabina`.`cant_dia`, " + "`factura_cabina`.`fecha`, " + "`factura_cabina`.`impuesto_cabina`, " + "`factura_cabina`.`precio_total_cabina`, " + "`factura_cabina`.`cabina_cabina_id`, " + "`factura_cabina`.`colaborador_empleado_id`, " + "`factura_cabina`.`numero_factura`" + "FROM `pct3`.`factura_cabina`" + "WHERE " + "`factura_cabina`.`numero_factura` = '" + Buscar + "'" + "order by `factura_cabina`.`numero_factura`;"; try { Statement st = conexion.createStatement(); rs = st.executeQuery(querySQL); while (rs.next()) { registro[0] = rs.getString(1); registro[1] = rs.getString(2); registro[2] = rs.getString(3); registro[3] = rs.getString(4); registro[4] = rs.getString(5); registro[5] = rs.getString(6); registro[6] = rs.getString(7); registro[7] = rs.getString(8); } } catch (SQLException sqle) { JOptionPane.showConfirmDialog(null, sqle); } return false; } }<|fim▁end|>
private final Connection conexion = dbConnection.getConnection(); private String querySQL = "";
<|file_name|>ItemPhaseClothHelmet.java<|end_file_name|><|fim▁begin|>/*************************************************************************** * Temporal Convergence * Copyright (C) 2017 * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2.1 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 * USA<|fim▁hole|> **************************************************************************/ package daxum.temporalconvergence.item; import net.minecraft.inventory.EntityEquipmentSlot; import net.minecraft.item.ItemArmor; public class ItemPhaseClothHelmet extends ItemArmor { public ItemPhaseClothHelmet() { super(ModItems.PHASE_CLOTH_ARMOR, 0, EntityEquipmentSlot.HEAD); setRegistryName("phase_cloth_helmet"); setUnlocalizedName("phase_cloth_helmet"); setCreativeTab(ModItems.TEMPCONVTAB); } }<|fim▁end|>
<|file_name|>index.js<|end_file_name|><|fim▁begin|>import React from "react"; import { connect } from "react-redux"; import { withRouter, Route } from "react-router"; import { Link } from "react-router-dom"; import { Entry } from "../../pages/entry"; class BlogCard extends React.Component { render() { return ( <div> <h2>{this.props.title}</h2> <p>{this.props.content}</p> <Link to={`/blog/${this.props.slug}`}>Read more..</Link> </div> );<|fim▁hole|><|fim▁end|>
} } export default withRouter(BlogCard);