prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>GuiSolidifier.java<|end_file_name|><|fim▁begin|>package com.teambrmodding.neotech.client.gui.machines.processors;
import com.teambr.bookshelf.client.gui.GuiColor;
import com.teambr.bookshelf.client.gui.GuiTextFormat;
import com.teambr.bookshelf.client.gui.component.control.GuiComponentItemStackButton;
import com.teambr.bookshelf.client.gui.component.display.GuiComponentColoredZone;
import com.teambr.bookshelf.client.gui.component.display.GuiComponentFluidTank;
import com.teambr.bookshelf.client.gui.component.display.GuiComponentTextureAnimated;
import com.teambr.bookshelf.network.PacketManager;
import com.teambr.bookshelf.util.ClientUtils;
import com.teambr.bookshelf.util.EnergyUtils;
import com.teambrmodding.neotech.client.gui.machines.GuiAbstractMachine;
import com.teambrmodding.neotech.collections.EnumInputOutputMode;
import com.teambrmodding.neotech.common.container.machines.processors.ContainerSolidifier;
import com.teambrmodding.neotech.common.tiles.MachineProcessor;
import com.teambrmodding.neotech.common.tiles.machines.processors.TileSolidifier;
import com.teambrmodding.neotech.lib.Reference;
import net.minecraft.entity.player.EntityPlayer;
import net.minecraft.util.EnumFacing;
import net.minecraft.util.ResourceLocation;
import net.minecraftforge.energy.CapabilityEnergy;
import javax.annotation.Nullable;
import java.awt.*;
import java.util.ArrayList;
import java.util.List;
/**
* This file was created for NeoTech
*
* NeoTech is licensed under the
* Creative Commons Attribution-NonCommercial-ShareAlike 4.0 International License:
* http://creativecommons.org/licenses/by-nc-sa/4.0/
*
* @author Paul Davis - pauljoda
* @since 2/17/2017
*/
public class GuiSolidifier extends GuiAbstractMachine<ContainerSolidifier> {
protected TileSolidifier solidifier;
public GuiSolidifier(EntityPlayer player, TileSolidifier solidifier) {
super(new ContainerSolidifier(player.inventory, solidifier), 175, 165, "neotech.electricSolidifier.title",
new ResourceLocation(Reference.MOD_ID, "textures/gui/electricSolidifier.png"), solidifier, player);
this.solidifier = solidifier;
addComponents();
}
/**
* This will be called after the GUI has been initialized and should be where you add all components.
*/
@Override
protected void addComponents() {
if(solidifier != null) {
// Progress Arrow
components.add(new GuiComponentTextureAnimated(this, 95, 35, 176, 80,
24, 17, GuiComponentTextureAnimated.ANIMATION_DIRECTION.RIGHT) {
@Override
protected int getCurrentProgress(int scale) {
return ((MachineProcessor)machine).getCookProgressScaled(24);
}
});
// Power Bar
components.add(new GuiComponentTextureAnimated(this, 16, 12, 176, 97,
16, 62, GuiComponentTextureAnimated.ANIMATION_DIRECTION.UP) {
@Override
protected int getCurrentProgress(int scale) {
return machine.getEnergyStored() * scale / machine.getMaxEnergyStored();
}
/**
* Used to determine if a dynamic tooltip is needed at runtime<|fim▁hole|> *
* @param mouseX Mouse X Pos
* @param mouseY Mouse Y Pos
* @return A list of string to display
*/
@Nullable
@Override
public List<String> getDynamicToolTip(int mouseX, int mouseY) {
List<String> toolTip = new ArrayList<>();
EnergyUtils.addToolTipInfo(machine.getCapability(CapabilityEnergy.ENERGY, null),
toolTip, machine.energyStorage.getMaxInsert(), machine.energyStorage.getMaxExtract());
return toolTip;
}
});
// Input Tanks
components.add(new GuiComponentFluidTank(this, 40, 12, 49, 62, solidifier.tanks[TileSolidifier.TANK]){
/**
* Used to determine if a dynamic tooltip is needed at runtime
*
* @param mouseX Mouse X Pos
* @param mouseY Mouse Y Pos
* @return A list of string to display
*/
@Nullable
@Override
public List<String> getDynamicToolTip(int mouseX, int mouseY) {
List<String> toolTip = new ArrayList<>();
toolTip.add(solidifier.tanks[TileSolidifier.TANK].getFluid() != null ?
GuiColor.ORANGE + solidifier.tanks[TileSolidifier.TANK].getFluid().getLocalizedName() :
GuiColor.RED + ClientUtils.translate("neotech.text.empty"));
toolTip.add(ClientUtils.formatNumber(solidifier.tanks[TileSolidifier.TANK].getFluidAmount()) + " / " +
ClientUtils.formatNumber(solidifier.tanks[TileSolidifier.TANK].getCapacity()) + " mb");
toolTip.add("");
toolTip.add(GuiColor.GRAY + "" + GuiTextFormat.ITALICS + ClientUtils.translate("neotech.text.clearTank"));
return toolTip;
}
/**
* Called when the mouse is pressed
*
* @param x Mouse X Position
* @param y Mouse Y Position
* @param button Mouse Button
*/
@Override
public void mouseDown(int x, int y, int button) {
if(ClientUtils.isCtrlPressed() && ClientUtils.isShiftPressed()) {
solidifier.tanks[TileSolidifier.TANK].setFluid(null);
PacketManager.updateTileWithClientInfo(solidifier);
}
}
});
components.add(new GuiComponentColoredZone(this, 39, 11, 51, 63, new Color(0, 0, 0, 0)){
/**
* Override this to change the color
*
* @return The color, by default the passed color
*/
@Override
protected Color getDynamicColor() {
Color color = new Color(0, 0, 0, 0);
// Checking if input is enabled
for(EnumFacing dir : EnumFacing.values()) {
if(machine.getModeForSide(dir) == EnumInputOutputMode.ALL_MODES) {
color = EnumInputOutputMode.ALL_MODES.getHighlightColor();
break;
}
else if(machine.getModeForSide(dir) == EnumInputOutputMode.INPUT_ALL)
color = EnumInputOutputMode.INPUT_ALL.getHighlightColor();
}
// Color was assigned
if(color.getAlpha() != 0)
color = new Color(color.getRed(), color.getGreen(), color.getBlue(), 80);
return color;
}
});
// Output Item
components.add(new GuiComponentColoredZone(this, 127, 29, 28, 28, new Color(0, 0, 0, 0)){
/**
* Override this to change the color
*
* @return The color, by default the passed color
*/
@Override
protected Color getDynamicColor() {
Color color = new Color(0, 0, 0, 0);
// Checking if input is enabled
for(EnumFacing dir : EnumFacing.values()) {
if(machine.getModeForSide(dir) == EnumInputOutputMode.ALL_MODES) {
color = EnumInputOutputMode.ALL_MODES.getHighlightColor();
break;
}
else if(machine.getModeForSide(dir) == EnumInputOutputMode.OUTPUT_ALL)
color = EnumInputOutputMode.OUTPUT_ALL.getHighlightColor();
}
// Color was assigned
if(color.getAlpha() != 0)
color = new Color(color.getRed(), color.getGreen(), color.getBlue(), 80);
return color;
}
});
// Item Stack Button
components.add(new GuiComponentItemStackButton(this, 96, 54, 224, 111, 22, 22,
solidifier.currentMode.getDisplayStack()) {
@Override
protected void doAction() {
solidifier.toggleMode();
solidifier.sendValueToServer(TileSolidifier.UPDATE_MODE_NBT, 0);
setDisplayStack(solidifier.currentMode.getDisplayStack());
}
});
}
}
}<|fim▁end|> | |
<|file_name|>view_environment.py<|end_file_name|><|fim▁begin|># coding: utf-8
# Copyright (c) Pymatgen Development Team.
# Distributed under the terms of the MIT License.
from __future__ import division, unicode_literals
"""
Script to visualize the model coordination environments
"""
__author__ = "David Waroquiers"
__copyright__ = "Copyright 2012, The Materials Project"
__version__ = "2.0"
__maintainer__ = "David Waroquiers"
__email__ = "[email protected]"
__date__ = "Feb 20, 2016"
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import AllCoordinationGeometries
from pymatgen.analysis.chemenv.coordination_environments.coordination_geometries import SEPARATION_PLANE
from pymatgen.analysis.chemenv.utils.scripts_utils import visualize
from pymatgen.analysis.chemenv.utils.coordination_geometry_utils import Plane
import numpy as np
if __name__ == '__main__':
print('+-------------------------------------------------------+\n'
'| Development script of the ChemEnv utility of pymatgen |\n'
'| Visualization of the model coordination environments |\n'
'+-------------------------------------------------------+\n')
allcg = AllCoordinationGeometries()
vis = None
while True:
cg_symbol = raw_input('Enter symbol of the geometry you want to see, "l" to see the list '
'of existing geometries or "q" to quit : ')
if cg_symbol == 'q':
break
if cg_symbol == 'l':
print(allcg.pretty_print(maxcn=13, additional_info={'nb_hints': True}))
continue
try:
cg = allcg[cg_symbol]
except LookupError:
print('Wrong geometry, try again ...')
continue
print(cg.name)
for ipoint, point in enumerate(cg.points):
print('Point #{:d} : {} {} {}'.format(ipoint, repr(point[0]), repr(point[1]), repr(point[2])))
print('Algorithms used :')
for ialgo, algo in enumerate(cg.algorithms):
print('Algorithm #{:d} :'.format(ialgo))
print(algo)
print('')
# Visualize the separation plane of a given algorithm
sepplane = False<|fim▁hole|> test = raw_input('Enter index of the algorithm for which you want to visualize the plane : ')
if test != '':
try:
ialgo = int(test)
algo = cg.algorithms[ialgo]
sepplane = True
except:
print('Unable to determine the algorithm/separation_plane you want '
'to visualize for this geometry. Continues without ...')
myfactor = 3.0
if vis is None:
vis = visualize(cg=cg, zoom=1.0, myfactor=myfactor)
else:
vis = visualize(cg=cg, vis=vis, myfactor=myfactor)
cg_points = [myfactor*np.array(pp) for pp in cg.points]
cg_central_site = myfactor*np.array(cg.central_site)
if sepplane:
pts = [cg_points[ii] for ii in algo.plane_points]
if algo.minimum_number_of_points == 2:
pts.append(cg_central_site)
centre = cg_central_site
else:
centre = np.sum(pts, axis=0) / len(pts)
factor = 1.5
target_dist = max([np.dot(pp-centre, pp-centre) for pp in cg_points])
current_dist = np.dot(pts[0] - centre, pts[0] - centre)
factor = factor * target_dist / current_dist
plane = Plane.from_npoints(points=pts)
p1 = centre + factor * (pts[0] - centre)
perp = factor * np.cross(pts[0] - centre, plane.normal_vector)
p2 = centre + perp
p3 = centre - factor * (pts[0] - centre)
p4 = centre - perp
vis.add_faces([[p1, p2, p3, p4]], [1.0, 0.0, 0.0], opacity=0.5)
target_radius = 0.25
radius = 1.5 * target_radius
if algo.minimum_number_of_points == 2:
vis.add_partial_sphere(coords=cg_central_site, radius=radius,
color=[1.0, 0.0, 0.0], start=0, end=360,
opacity=0.5)
for pp in pts:
vis.add_partial_sphere(coords=pp, radius=radius,
color=[1.0, 0.0, 0.0], start=0, end=360,
opacity=0.5)
ps1 = [cg_points[ii] for ii in algo.point_groups[0]]
ps2 = [cg_points[ii] for ii in algo.point_groups[1]]
for pp in ps1:
vis.add_partial_sphere(coords=pp, radius=radius,
color=[0.0, 1.0, 0.0], start=0, end=360,
opacity=0.5)
for pp in ps2:
vis.add_partial_sphere(coords=pp, radius=radius,
color=[0.0, 0.0, 1.0], start=0, end=360,
opacity=0.5)
vis.show()<|fim▁end|> | if any([algo.algorithm_type == SEPARATION_PLANE for algo in cg.algorithms]): |
<|file_name|>EPGWindow.py<|end_file_name|><|fim▁begin|># Copyright (C) 2013 Lunatixz
#
#
# This file is part of PseudoTV.
#
# PseudoTV is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# PseudoTV is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with PseudoTV. If not, see <http://www.gnu.org/licenses/>.
import xbmc, xbmcgui, xbmcaddon
import subprocess, os
import time, threading
import datetime, traceback
import sys, re
import urllib
import urllib2
import fanarttv
from Playlist import Playlist
from Globals import *
from Channel import Channel
from ChannelList import ChannelList
from FileAccess import FileLock, FileAccess
from xml.etree import ElementTree as ET
from fanarttv import *
from Downloader import *
class EPGWindow(xbmcgui.WindowXMLDialog):
def __init__(self, *args, **kwargs):
self.focusRow = 0
self.focusIndex = 0
self.focusTime = 0
self.focusEndTime = 0
self.shownTime = 0
self.centerChannel = 0
self.rowCount = 6
self.channelButtons = [None] * self.rowCount
self.buttonCache = []
self.buttonCount = 0
self.actionSemaphore = threading.BoundedSemaphore()
self.lastActionTime = time.time()
self.channelLogos = ''
self.textcolor = "FFFFFFFF"
self.focusedcolor = "FF7d7d7d"
self.clockMode = 0
self.textfont = "font14"
self.startup = time.time()
self.showingInfo = False
self.infoOffset = 0
self.infoOffsetV = 0
self.Downloader = Downloader()
self.log('Using EPG Coloring = ' + str(REAL_SETTINGS.getSetting('EPGcolor_enabled')))
self.AltmediaPath = xbmc.translatePath(os.path.join(ADDON_INFO, 'resources', 'skins', 'default', 'media')) + '/'
#Set skin media folder, else default
if os.path.exists(xbmc.translatePath(os.path.join(ADDON_INFO, 'resources', 'skins', Skin_Select, 'media'))):
self.mediaPath = xbmc.translatePath(os.path.join(ADDON_INFO, 'resources', 'skins', Skin_Select, 'media')) + '/'
else:
self.mediaPath = self.AltmediaPath
self.log('Mediapath is ' + self.mediaPath)
# Use the given focus and non-focus textures if they exist. Otherwise use the defaults.
if os.path.exists(self.mediaPath + BUTTON_FOCUS):
self.textureButtonFocus = self.mediaPath + BUTTON_FOCUS
elif xbmc.skinHasImage(self.mediaPath + BUTTON_FOCUS):
self.textureButtonFocus = self.mediaPath + BUTTON_FOCUS
else:
self.textureButtonFocus = 'pstvlButtonFocus.png'
if os.path.exists(self.mediaPath + BUTTON_NO_FOCUS):
self.textureButtonNoFocus = self.mediaPath + BUTTON_NO_FOCUS
elif xbmc.skinHasImage(self.mediaPath + BUTTON_NO_FOCUS):
self.textureButtonNoFocus = self.mediaPath + BUTTON_NO_FOCUS
else:
self.textureButtonNoFocus = 'pstvlButtonNoFocus.png'
for i in range(self.rowCount):
self.channelButtons[i] = []
self.clockMode = ADDON_SETTINGS.getSetting("ClockMode")
self.toRemove = []
def onFocus(self, controlid):
pass
# set the time labels
def setTimeLabels(self, thetime):
self.log('setTimeLabels')
now = datetime.datetime.fromtimestamp(thetime)
self.getControl(104).setLabel(now.strftime('%A, %b %d'))
delta = datetime.timedelta(minutes=30)
for i in range(3):
if self.clockMode == "0":
self.getControl(101 + i).setLabel(now.strftime("%I:%M%p").lower())
else:
self.getControl(101 + i).setLabel(now.strftime("%H:%M"))
now = now + delta
self.log('setTimeLabels return')
self.log('thetime ' + str(now))
def log(self, msg, level = xbmc.LOGDEBUG):
log('EPGWindow: ' + msg, level)
def logDebug(self, msg, level = xbmc.LOGDEBUG):
if REAL_SETTINGS.getSetting('enable_Debug') == "true":
log('EPGWindow: ' + msg, level)
def onInit(self):
self.log('onInit')
timex, timey = self.getControl(120).getPosition()
timew = self.getControl(120).getWidth()
timeh = self.getControl(120).getHeight()
#Set timebar path, else use alt. path
if os.path.exists(xbmc.translatePath(os.path.join(ADDON_INFO, 'resources', 'skins', Skin_Select, 'media', TIME_BAR))):
self.currentTimeBar = xbmcgui.ControlImage(timex, timey, timew, timeh, self.mediaPath + TIME_BAR)
else:
self.currentTimeBar = xbmcgui.ControlImage(timex, timey, timew, timeh, self.AltmediaPath + TIME_BAR)
self.log('Mediapath Time_Bar = ' + self.mediaPath + TIME_BAR)
self.addControl(self.currentTimeBar)
### Skin labels, Set textcolor, focusedcolor and font. Rowcount todo ###
try:
textcolor = int(self.getControl(100).getLabel(), 16)
if textcolor > 0:
self.textcolor = hex(textcolor)[2:]
self.logDebug("onInit.Self.textcolor = " + str(self.textcolor))
except:
pass
try:
focusedcolor = int(self.getControl(99).getLabel(), 16)
if focusedcolor > 0:
self.focusedcolor = hex(focusedcolor)[2:]
self.logDebug("onInit.Self.focusedcolor = " + str(self.focusedcolor))
except:
pass
try:
self.textfont = self.getControl(105).getLabel()
self.logDebug("onInit.Self.textfont = " + str(self.textfont))
except:
pass
# try:
# self.rowCount = self.getControl(106).getLabel()
# self.logDebug("onInit, Self.rowCount = " + str(self.rowCount))
# except:
# pass
##################################################################
try:
if self.setChannelButtons(time.time(), self.MyOverlayWindow.currentChannel) == False:
self.log('Unable to add channel buttons')
return
curtime = time.time()
self.focusIndex = -1
basex, basey = self.getControl(113).getPosition()
baseh = self.getControl(113).getHeight()
basew = self.getControl(113).getWidth()
# set the button that corresponds to the currently playing show
for i in range(len(self.channelButtons[2])):
left, top = self.channelButtons[2][i].getPosition()
width = self.channelButtons[2][i].getWidth()
left = left - basex
starttime = self.shownTime + (left / (basew / 5400.0))
endtime = starttime + (width / (basew / 5400.0))
if curtime >= starttime and curtime <= endtime:
self.focusIndex = i
self.setFocus(self.channelButtons[2][i])
self.focusTime = int(time.time())
self.focusEndTime = endtime
break
# If nothing was highlighted, just select the first button
if self.focusIndex == -1:
self.focusIndex = 0
self.setFocus(self.channelButtons[2][0])
left, top = self.channelButtons[2][0].getPosition()
width = self.channelButtons[2][0].getWidth()
left = left - basex
starttime = self.shownTime + (left / (basew / 5400.0))
endtime = starttime + (width / (basew / 5400.0))
self.focusTime = int(starttime + 30)
self.focusEndTime = endtime
self.focusRow = 2
self.setShowInfo()
except:
self.log("Unknown EPG Initialization Exception", xbmc.LOGERROR)
self.log(traceback.format_exc(), xbmc.LOGERROR)
try:
self.close()
except:
self.log("Error closing", xbmc.LOGERROR)
self.MyOverlayWindow.sleepTimeValue = 1
self.MyOverlayWindow.startSleepTimer()
return
self.log('onInit return')
# setup all channel buttons for a given time
def setChannelButtons(self, starttime, curchannel, singlerow = -1):
self.log('setChannelButtons ' + str(starttime) + ', ' + str(curchannel))
self.centerChannel = self.MyOverlayWindow.fixChannel(curchannel)
# This is done twice to guarantee we go back 2 channels. If the previous 2 channels
# aren't valid, then doing a fix on curchannel - 2 may result in going back only
# a single valid channel.
curchannel = self.MyOverlayWindow.fixChannel(curchannel - 1, False)
curchannel = self.MyOverlayWindow.fixChannel(curchannel - 1, False)
starttime = self.roundToHalfHour(int(starttime))
self.setTimeLabels(starttime)
self.shownTime = starttime
basex, basey = self.getControl(111).getPosition()
basew = self.getControl(111).getWidth()
tmpx, tmpy = self.getControl(110 + self.rowCount).getPosition()
timex, timey = self.getControl(120).getPosition()
timew = self.getControl(120).getWidth()
timeh = self.getControl(120).getHeight()
basecur = curchannel
self.toRemove.append(self.currentTimeBar)
myadds = []
for i in range(self.rowCount):
if singlerow == -1 or singlerow == i:
self.setButtons(starttime, basecur, i)
myadds.extend(self.channelButtons[i])
basecur = self.MyOverlayWindow.fixChannel(basecur + 1)
basecur = curchannel
for i in range(self.rowCount):
self.getControl(301 + i).setLabel(self.MyOverlayWindow.channels[basecur - 1].name)
basecur = self.MyOverlayWindow.fixChannel(basecur + 1)
for i in range(self.rowCount):
try:
self.getControl(311 + i).setLabel(str(curchannel))
except:
pass
try:
if REAL_SETTINGS.getSetting("ColorEPG") == "true":
self.getControl(321 + i).setImage(self.channelLogos + self.MyOverlayWindow.channels[curchannel - 1].name + '_c.png')
else:
self.getControl(321 + i).setImage(self.channelLogos + self.MyOverlayWindow.channels[curchannel - 1].name + '.png')
except:
pass
curchannel = self.MyOverlayWindow.fixChannel(curchannel + 1)
if time.time() >= starttime and time.time() < starttime + 5400:
dif = int((starttime + 5400 - time.time()))
self.currentTimeBar.setPosition(int((basex + basew - 2) - (dif * (basew / 5400.0))), timey)
else:
if time.time() < starttime:
self.currentTimeBar.setPosition(basex + 2, timey)
else:
self.currentTimeBar.setPosition(basex + basew - 2 - timew, timey)
myadds.append(self.currentTimeBar)
try:
self.removeControls(self.toRemove)
except:
for cntrl in self.toRemove:
try:
self.removeControl(cntrl)
except:
pass
self.addControls(myadds)
self.toRemove = []
self.log('setChannelButtons return')
# round the given time down to the nearest half hour
def roundToHalfHour(self, thetime):
n = datetime.datetime.fromtimestamp(thetime)
delta = datetime.timedelta(minutes=30)
if n.minute > 29:
n = n.replace(minute=30, second=0, microsecond=0)
else:
n = n.replace(minute=0, second=0, microsecond=0)
return time.mktime(n.timetuple())
# create the buttons for the specified channel in the given row
def setButtons(self, starttime, curchannel, row):
self.log('setButtons ' + str(starttime) + ", " + str(curchannel) + ", " + str(row))
try:
curchannel = self.MyOverlayWindow.fixChannel(curchannel)
basex, basey = self.getControl(111 + row).getPosition()
baseh = self.getControl(111 + row).getHeight()
basew = self.getControl(111 + row).getWidth()
chtype = int(ADDON_SETTINGS.getSetting('Channel_' + str(curchannel) + '_type'))
self.lastExitTime = (ADDON_SETTINGS.getSetting("LastExitTime"))
self.log('chtype = ' + str(chtype))
if xbmc.Player().isPlaying() == False:
self.log('No video is playing, not adding buttons')
self.closeEPG()
return False
# Backup all of the buttons to an array
self.toRemove.extend(self.channelButtons[row])
del self.channelButtons[row][:]
# if the channel is paused, then only 1 button needed
nowDate = datetime.datetime.now()
self.log("setbuttonnowtime " + str(nowDate))
if self.MyOverlayWindow.channels[curchannel - 1].isPaused:
self.channelButtons[row].append(xbmcgui.ControlButton(basex, basey, basew, baseh, self.MyOverlayWindow.channels[curchannel - 1].getCurrentTitle() + " (paused)", focusTexture=self.textureButtonFocus, noFocusTexture=self.textureButtonNoFocus, alignment=4, textColor=self.textcolor, focusedColor=self.focusedcolor))
else:
# Find the show that was running at the given time
# Use the current time and show offset to calculate it
# At timedif time, channelShowPosition was playing at channelTimes
# The only way this isn't true is if the current channel is curchannel since
# it could have been fast forwarded or rewinded (rewound)?
if curchannel == self.MyOverlayWindow.currentChannel: #currentchannel epg
#Live TV pull date from the playlist entry
if chtype == 8:
playlistpos = int(xbmc.PlayList(xbmc.PLAYLIST_VIDEO).getposition())
#episodetitle is actually the start time of each show that the playlist gets from channellist.py
tmpDate = self.MyOverlayWindow.channels[curchannel - 1].getItemtimestamp(playlistpos)
self.log("setButtons.setbuttonnowtime2 " + str(tmpDate))
t = time.strptime(tmpDate, '%Y-%m-%d %H:%M:%S')
epochBeginDate = time.mktime(t)
#beginDate = datetime.datetime(t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec)
#videotime = (nowDate - beginDate).seconds
videotime = time.time() - epochBeginDate
reftime = time.time()
else:
playlistpos = int(xbmc.PlayList(xbmc.PLAYLIST_VIDEO).getposition())
videotime = xbmc.Player().getTime()
reftime = time.time()
else:
#Live TV pull date from the playlist entry
if chtype == 8:
playlistpos = self.MyOverlayWindow.channels[curchannel - 1].playlistPosition
#playlistpos = int(xbmc.PlayList(xbmc.PLAYLIST_VIDEO).getposition())
#episodetitle is actually the start time of each show that the playlist gets from channellist.py
tmpDate = self.MyOverlayWindow.channels[curchannel - 1].getItemtimestamp(playlistpos)
self.log("setButtons.setbuttonnowtime2 " + str(tmpDate))
t = time.strptime(tmpDate, '%Y-%m-%d %H:%M:%S')
epochBeginDate = time.mktime(t)
#beginDate = datetime.datetime(t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec)
#videotime = (nowDate - beginDate).seconds
#loop to ensure we get the current show in the playlist
while epochBeginDate + self.MyOverlayWindow.channels[curchannel - 1].getItemDuration(playlistpos) < time.time():
epochBeginDate += self.MyOverlayWindow.channels[curchannel - 1].getItemDuration(playlistpos)
playlistpos = self.MyOverlayWindow.channels[curchannel - 1].fixPlaylistIndex(playlistpos + 1)
videotime = time.time() - epochBeginDate
reftime = time.time()
else:
playlistpos = self.MyOverlayWindow.channels[curchannel - 1].playlistPosition #everyotherchannel epg
videotime = self.MyOverlayWindow.channels[curchannel - 1].showTimeOffset
reftime = self.MyOverlayWindow.channels[curchannel - 1].lastAccessTime
self.log('videotime & reftime + starttime + channel === ' + str(videotime) + ', ' + str(reftime) + ', ' + str(starttime) + ', ' + str(curchannel))
# normalize reftime to the beginning of the video
reftime -= videotime
while reftime > starttime:
playlistpos -= 1
# No need to check bounds on the playlistpos, the duration function makes sure it is correct
reftime -= self.MyOverlayWindow.channels[curchannel - 1].getItemDuration(playlistpos)
<|fim▁hole|> playlistpos += 1
# create a button for each show that runs in the next hour and a half
endtime = starttime + 5400
totaltime = 0
totalloops = 0
while reftime < endtime and totalloops < 1000:
xpos = int(basex + (totaltime * (basew / 5400.0)))
tmpdur = self.MyOverlayWindow.channels[curchannel - 1].getItemDuration(playlistpos)
shouldskip = False
# this should only happen the first time through this loop
# it shows the small portion of the show before the current one
if reftime < starttime:
tmpdur -= starttime - reftime
reftime = starttime
if tmpdur < 60 * 3:
shouldskip = True
# Don't show very short videos
if self.MyOverlayWindow.hideShortItems and shouldskip == False and chtype <= 7:
if self.MyOverlayWindow.channels[curchannel - 1].getItemDuration(playlistpos) < self.MyOverlayWindow.shortItemLength:
shouldskip = True
tmpdur = 0
else:
nextlen = self.MyOverlayWindow.channels[curchannel - 1].getItemDuration(playlistpos + 1)
prevlen = self.MyOverlayWindow.channels[curchannel - 1].getItemDuration(playlistpos - 1)
if nextlen < 60:
tmpdur += nextlen / 2
if prevlen < 60:
tmpdur += prevlen / 2
width = int((basew / 5400.0) * tmpdur)
if width < 30 and shouldskip == False:
width = 30
tmpdur = int(30.0 / (basew / 5400.0))
if width + xpos > basex + basew:
width = basex + basew - xpos
if shouldskip == False and width >= 30:
mylabel = self.MyOverlayWindow.channels[curchannel - 1].getItemTitle(playlistpos)
mygenre = self.MyOverlayWindow.channels[curchannel - 1].getItemgenre(playlistpos)
chtype = int(ADDON_SETTINGS.getSetting('Channel_' + str(curchannel) + '_type'))
self.logDebug('setButtons.mygenre = ' + str(mygenre))
if REAL_SETTINGS.getSetting('EPGcolor_enabled') == '1':
if FileAccess.exists(EPGGENRE_LOC + mygenre + '.png'):
self.textureButtonNoFocusGenre = (EPGGENRE_LOC + mygenre + '.png')
else:
self.textureButtonNoFocusGenre = (EPGGENRE_LOC + 'Unknown' + '.png')
self.channelButtons[row].append(xbmcgui.ControlButton(xpos, basey, width, baseh, mylabel, focusTexture=self.textureButtonFocus, noFocusTexture=self.textureButtonNoFocusGenre, alignment=4, font=self.textfont, textColor=self.textcolor, focusedColor=self.focusedcolor))
elif REAL_SETTINGS.getSetting('EPGcolor_enabled') == '2':
if FileAccess.exists(EPGGENRE_LOC + str(chtype) + '.png'):
self.textureButtonNoFocusChtype = (EPGGENRE_LOC + str(chtype) + '.png')
else:
self.textureButtonNoFocusGenre = (EPGGENRE_LOC + 'Unknown' + '.png')
self.channelButtons[row].append(xbmcgui.ControlButton(xpos, basey, width, baseh, mylabel, focusTexture=self.textureButtonFocus, noFocusTexture=self.textureButtonNoFocusChtype, alignment=4, font=self.textfont, textColor=self.textcolor, focusedColor=self.focusedcolor))
else:
self.channelButtons[row].append(xbmcgui.ControlButton(xpos, basey, width, baseh, mylabel, focusTexture=self.textureButtonFocus, noFocusTexture=self.textureButtonNoFocus, alignment=4, font=self.textfont, textColor=self.textcolor, focusedColor=self.focusedcolor))
totaltime += tmpdur
reftime += tmpdur
playlistpos += 1
totalloops += 1
if totalloops >= 1000:
self.log("Broken big loop, too many loops, reftime is " + str(reftime) + ", endtime is " + str(endtime))
# If there were no buttons added, show some default button
if len(self.channelButtons[row]) == 0:
self.channelButtons[row].append(xbmcgui.ControlButton(basex, basey, basew, baseh, self.MyOverlayWindow.channels[curchannel - 1].name, focusTexture=self.textureButtonFocus, noFocusTexture=self.textureButtonNoFocus, alignment=4, textColor=self.textcolor, focusedColor=self.focusedcolor))
except:
self.log("Exception in setButtons", xbmc.LOGERROR)
self.log(traceback.format_exc(), xbmc.LOGERROR)
self.log('setButtons return')
return True
def onAction(self, act):
self.log('onAction ' + str(act.getId()))
if self.actionSemaphore.acquire(False) == False:
self.log('Unable to get semaphore')
return
action = act.getId()
try:
if action in ACTION_PREVIOUS_MENU:
self.closeEPG()
if self.showingInfo:
self.infoOffset = 0
self.infoOffsetV = 0
elif action == ACTION_MOVE_DOWN:
self.GoDown()
if self.showingInfo:
self.infoOffsetV -= 1
elif action == ACTION_MOVE_UP:
self.GoUp()
if self.showingInfo:
self.infoOffsetV += 1
elif action == ACTION_MOVE_LEFT:
self.GoLeft()
if self.showingInfo:
self.infoOffset -= 1
elif action == ACTION_MOVE_RIGHT:
self.GoRight()
if self.showingInfo:
self.infoOffset += 1
elif action == ACTION_STOP:
self.closeEPG()
if self.showingInfo:
self.infoOffset = 0
self.infoOffsetV = 0
elif action == ACTION_SELECT_ITEM:
lastaction = time.time() - self.lastActionTime
if self.showingInfo:
self.infoOffset = 0
self.infoOffsetV = 0
if lastaction >= 2:
self.selectShow()
self.closeEPG()
self.lastActionTime = time.time()
elif action == ACTION_MOVE_DOWN:
self.GoDown()
if self.showingInfo:
self.infoOffsetV -= 1
elif action == ACTION_PAGEDOWN:
self.GoPgDown()
elif action == ACTION_MOVE_UP:
self.GoUp()
if self.showingInfo:
self.infoOffsetV += 1
elif action == ACTION_PAGEUP:
self.GoPgUp()
except:
self.log("Unknown EPG Exception", xbmc.LOGERROR)
self.log(traceback.format_exc(), xbmc.LOGERROR)
try:
self.close()
except:
self.log("Error closing", xbmc.LOGERROR)
self.MyOverlayWindow.sleepTimeValue = 1
self.MyOverlayWindow.startSleepTimer()
return
self.actionSemaphore.release()
self.log('onAction return')
def closeEPG(self):
self.log('closeEPG')
try:
self.removeControl(self.currentTimeBar)
self.MyOverlayWindow.startSleepTimer()
except:
pass
self.close()
def onControl(self, control):
self.log('onControl')
# Run when a show is selected, so close the epg and run the show
def onClick(self, controlid):
self.log('onClick')
if self.actionSemaphore.acquire(False) == False:
self.log('Unable to get semaphore')
return
lastaction = time.time() - self.lastActionTime
if lastaction >= 2:
try:
selectedbutton = self.getControl(controlid)
except:
self.actionSemaphore.release()
self.log('onClick unknown controlid ' + str(controlid))
return
for i in range(self.rowCount):
for x in range(len(self.channelButtons[i])):
mycontrol = 0
mycontrol = self.channelButtons[i][x]
if selectedbutton == mycontrol:
self.focusRow = i
self.focusIndex = x
self.selectShow()
self.closeEPG()
self.lastActionTime = time.time()
self.actionSemaphore.release()
self.log('onClick found button return')
return
self.lastActionTime = time.time()
self.closeEPG()
self.actionSemaphore.release()
self.log('onClick return')
def GoPgDown(self):
self.log('GoPgDown')
newchannel = self.centerChannel
for x in range(0, 6):
newchannel = self.MyOverlayWindow.fixChannel(newchannel + 1)
self.setChannelButtons(self.shownTime, self.MyOverlayWindow.fixChannel(newchannel))
self.setProperButton(0)
self.log('GoPgDown return')
def GoPgUp(self):
self.log('GoPgUp')
newchannel = self.centerChannel
for x in range(0, 6):
newchannel = self.MyOverlayWindow.fixChannel(newchannel - 1, False)
self.setChannelButtons(self.shownTime, self.MyOverlayWindow.fixChannel(newchannel))
self.setProperButton(0)
self.log('GoPgUp return')
def GoDown(self):
self.log('goDown')
# change controls to display the proper junks
if self.focusRow == self.rowCount - 1:
self.setChannelButtons(self.shownTime, self.MyOverlayWindow.fixChannel(self.centerChannel + 1))
self.focusRow = self.rowCount - 2
self.setProperButton(self.focusRow + 1)
self.log('goDown return')
def GoUp(self):
self.log('goUp')
# same as godown
# change controls to display the proper junks
if self.focusRow == 0:
self.setChannelButtons(self.shownTime, self.MyOverlayWindow.fixChannel(self.centerChannel - 1, False))
self.focusRow = 1
self.setProperButton(self.focusRow - 1)
self.log('goUp return')
def GoLeft(self):
self.log('goLeft')
basex, basey = self.getControl(111 + self.focusRow).getPosition()
basew = self.getControl(111 + self.focusRow).getWidth()
# change controls to display the proper junks
if self.focusIndex == 0:
left, top = self.channelButtons[self.focusRow][self.focusIndex].getPosition()
width = self.channelButtons[self.focusRow][self.focusIndex].getWidth()
left = left - basex
starttime = self.shownTime + (left / (basew / 5400.0))
self.setChannelButtons(self.shownTime - 1800, self.centerChannel)
curbutidx = self.findButtonAtTime(self.focusRow, starttime + 30)
if(curbutidx - 1) >= 0:
self.focusIndex = curbutidx - 1
else:
self.focusIndex = 0
else:
self.focusIndex -= 1
left, top = self.channelButtons[self.focusRow][self.focusIndex].getPosition()
width = self.channelButtons[self.focusRow][self.focusIndex].getWidth()
left = left - basex
starttime = self.shownTime + (left / (basew / 5400.0))
endtime = starttime + (width / (basew / 5400.0))
self.setFocus(self.channelButtons[self.focusRow][self.focusIndex])
self.setShowInfo()
self.focusEndTime = endtime
self.focusTime = starttime + 30
self.log('goLeft return')
def GoRight(self):
self.log('goRight')
basex, basey = self.getControl(111 + self.focusRow).getPosition()
basew = self.getControl(111 + self.focusRow).getWidth()
# change controls to display the proper junks
if self.focusIndex == len(self.channelButtons[self.focusRow]) - 1:
left, top = self.channelButtons[self.focusRow][self.focusIndex].getPosition()
width = self.channelButtons[self.focusRow][self.focusIndex].getWidth()
left = left - basex
starttime = self.shownTime + (left / (basew / 5400.0))
self.setChannelButtons(self.shownTime + 1800, self.centerChannel)
curbutidx = self.findButtonAtTime(self.focusRow, starttime + 30)
if(curbutidx + 1) < len(self.channelButtons[self.focusRow]):
self.focusIndex = curbutidx + 1
else:
self.focusIndex = len(self.channelButtons[self.focusRow]) - 1
else:
self.focusIndex += 1
left, top = self.channelButtons[self.focusRow][self.focusIndex].getPosition()
width = self.channelButtons[self.focusRow][self.focusIndex].getWidth()
left = left - basex
starttime = self.shownTime + (left / (basew / 5400.0))
endtime = starttime + (width / (basew / 5400.0))
self.setFocus(self.channelButtons[self.focusRow][self.focusIndex])
self.setShowInfo()
self.focusEndTime = endtime
self.focusTime = starttime + 30
self.log('goRight return')
def findButtonAtTime(self, row, selectedtime):
self.log('findButtonAtTime ' + str(row))
basex, basey = self.getControl(111 + row).getPosition()
baseh = self.getControl(111 + row).getHeight()
basew = self.getControl(111 + row).getWidth()
for i in range(len(self.channelButtons[row])):
left, top = self.channelButtons[row][i].getPosition()
width = self.channelButtons[row][i].getWidth()
left = left - basex
starttime = self.shownTime + (left / (basew / 5400.0))
endtime = starttime + (width / (basew / 5400.0))
if selectedtime >= starttime and selectedtime <= endtime:
return i
return -1
# based on the current focus row and index, find the appropriate button in
# the new row to set focus to
def setProperButton(self, newrow, resetfocustime = False):
self.log('setProperButton ' + str(newrow))
self.focusRow = newrow
basex, basey = self.getControl(111 + newrow).getPosition()
baseh = self.getControl(111 + newrow).getHeight()
basew = self.getControl(111 + newrow).getWidth()
for i in range(len(self.channelButtons[newrow])):
left, top = self.channelButtons[newrow][i].getPosition()
width = self.channelButtons[newrow][i].getWidth()
left = left - basex
starttime = self.shownTime + (left / (basew / 5400.0))
endtime = starttime + (width / (basew / 5400.0))
if self.focusTime >= starttime and self.focusTime <= endtime:
self.focusIndex = i
self.setFocus(self.channelButtons[newrow][i])
self.setShowInfo()
self.focusEndTime = endtime
if resetfocustime:
self.focusTime = starttime + 30
self.log('setProperButton found button return')
return
self.focusIndex = 0
self.setFocus(self.channelButtons[newrow][0])
left, top = self.channelButtons[newrow][0].getPosition()
width = self.channelButtons[newrow][0].getWidth()
left = left - basex
starttime = self.shownTime + (left / (basew / 5400.0))
endtime = starttime + (width / (basew / 5400.0))
self.focusEndTime = endtime
if resetfocustime:
self.focusTime = starttime + 30
self.setShowInfo()
self.log('setProperButton return')
def setShowInfo(self):
self.log('setShowInfo')
self.showingInfo = True
basex, basey = self.getControl(111 + self.focusRow).getPosition()
baseh = self.getControl(111 + self.focusRow).getHeight()
basew = self.getControl(111 + self.focusRow).getWidth()
# use the selected time to set the video
left, top = self.channelButtons[self.focusRow][self.focusIndex].getPosition()
width = self.channelButtons[self.focusRow][self.focusIndex].getWidth()
left = left - basex + (width / 2)
starttime = self.shownTime + (left / (basew / 5400.0))
chnoffset = self.focusRow - 2
newchan = self.centerChannel
while chnoffset != 0:
if chnoffset > 0:
newchan = self.MyOverlayWindow.fixChannel(newchan + 1, True)
chnoffset -= 1
else:
newchan = self.MyOverlayWindow.fixChannel(newchan - 1, False)
chnoffset += 1
plpos = self.determinePlaylistPosAtTime(starttime, newchan)
if plpos == -1:
self.log('Unable to find the proper playlist to set from EPG')
return
if REAL_SETTINGS.getSetting("tvdb.enabled") == "true" and REAL_SETTINGS.getSetting("tmdb.enabled") == "true" and REAL_SETTINGS.getSetting("fandb.enabled") == "true":
self.apis = True
else:
self.apis = False
if REAL_SETTINGS.getSetting("art.enable") == "true" or REAL_SETTINGS.getSetting("Live.art.enable") == "true":
if self.infoOffset > 0:
self.getControl(522).setLabel('COMING UP:')
elif self.infoOffset < 0:
self.getControl(522).setLabel('ALREADY SEEN:')
elif self.infoOffset == 0 and self.infoOffsetV == 0:
self.getControl(522).setLabel('NOW WATCHING:')
elif self.infoOffsetV < 0 and self.infoOffset == 0:
self.getControl(522).setLabel('ON NOW:')
elif self.infoOffsetV > 0 and self.infoOffset == 0:
self.getControl(522).setLabel('ON NOW:')
elif self.infoOffset == 0 and self.infoOffsetV == 0:
self.getControl(522).setLabel('NOW WATCHING:')
else:
self.getControl(522).setLabel('NOW WATCHING:')
tvdbid = 0
imdbid = 0
dbid = 0
Artpath = xbmc.translatePath(os.path.join(ART_LOC))
self.logDebug('setShowInfo.Artpath.1 = ' + uni(Artpath))
mediapath = uni(self.MyOverlayWindow.channels[newchan - 1].getItemFilename(plpos))
self.logDebug('setShowInfo.mediapath.1 = ' + uni(mediapath))
chtype = int(ADDON_SETTINGS.getSetting('Channel_' + str(newchan) + '_type'))
genre = uni(self.MyOverlayWindow.channels[newchan - 1].getItemgenre(plpos))
title = uni(self.MyOverlayWindow.channels[newchan - 1].getItemTitle(plpos))
LiveID = uni(self.MyOverlayWindow.channels[newchan - 1].getItemLiveID(plpos))
self.logDebug('setShowInfo.LiveID.1 = ' + uni(LiveID))
try:
type1 = str(self.getControl(507).getLabel())
self.logDebug('setShowInfo.type1 = ' + str(type1))
except:
pass
try:
type2 = str(self.getControl(509).getLabel())
self.logDebug('setShowInfo.type2 = ' + str(type2))
except:
pass
jpg = ['banner', 'fanart', 'folder', 'landscape', 'poster']
png = ['character', 'clearart', 'logo']
if type1 in jpg:
type1EXT = (type1 + '.jpg')
else:
type1EXT = (type1 + '.png')
self.logDebug('setShowInfo.type1.ext = ' + str(type1EXT))
if type2 in jpg:
type2EXT = (type2 + '.jpg')
else:
type2EXT = (type2 + '.png')
self.logDebug('setShowInfo.type2.ext = ' + str(type2EXT))
#rename art types for script.artwork.downloader
arttype1 = type1.replace("folder", "poster").replace("landscape", "thumb").replace("character", "characterart").replace("logo", "clearlogo")
arttype2 = type2.replace("folder", "poster").replace("landscape", "thumb").replace("character", "characterart").replace("logo", "clearlogo")
if not 'LiveID' in LiveID:
try:
LiveLST = LiveID.split("|", 4)
self.logDebug('setShowInfo.LiveLST = ' + str(LiveLST))
imdbid = LiveLST[0]
self.logDebug('setShowInfo.LiveLST.imdbid.1 = ' + str(imdbid))
imdbid = imdbid.split('imdb_', 1)[-1]
self.logDebug('setShowInfo.LiveLST.imdbid.2 = ' + str(imdbid))
tvdbid = LiveLST[1]
self.logDebug('setShowInfo.LiveLST.tvdbid.1 = ' + str(tvdbid))
tvdbid = tvdbid.split('tvdb_', 1)[-1]
self.logDebug('setShowInfo.LiveLST.tvdbid.2 = ' + str(tvdbid))
SBCP = LiveLST[2]
self.logDebug('setShowInfo.LiveLST.SBCP = ' + str(SBCP))
if 'dbid_' in LiveLST[3]:
dbidTYPE = LiveLST[3]
self.logDebug('setShowInfo.LiveLST.dbidTYPE.1 = ' + str(dbidTYPE))
dbidTYPE = dbidTYPE.split('dbid_', 1)[-1]
self.logDebug('setShowInfo.LiveLST.dbidTYPE.2 = ' + str(dbidTYPE))
dbid = dbidTYPE.split(',')[0]
self.logDebug('setShowInfo.LiveLST.dbid = ' + str(dbid))
type = dbidTYPE.split(',', 1)[-1]
self.logDebug('setShowInfo.LiveLST.type = ' + str(type))
if arttype1 == 'thumb' and type == 'tvshow':
arttype1 = ('tv' + arttype1)
if arttype2 == 'thumb' and type == 'tvshow':
arttype2 = ('tv' + arttype2)
if type == 'tvshow':
id = tvdbid
elif type == 'movie':
id = imdbid
else:
Unaired = LiveLST[3]
self.logDebug('setShowInfo.LiveLST.Unaired = ' + str(Unaired))
except:
self.log('setShowInfo.LiveLST Failed')
pass
try:
#Try, and pass if label isn't found (Backward compatibility with PTV Skins)
#Sickbeard/Couchpotato
if SBCP == 'SB':
self.getControl(511).setImage(self.mediaPath + 'SB.png')
elif SBCP == 'CP':
self.getControl(511).setImage(self.mediaPath + 'CP.png')
else:
self.getControl(511).setImage(self.mediaPath + 'NA.png')
except:
self.getControl(511).setImage(self.mediaPath + 'NA.png')
pass
try:
#Try, and pass if label isn't found (Backward compatibility with PTV Skins)
#Unaired/aired
if Unaired == 'NEW':
self.getControl(512).setImage(self.mediaPath + 'NEW.png')
elif Unaired == 'OLD':
self.getControl(512).setImage(self.mediaPath + 'OLD.png')
else:
self.getControl(512).setImage(self.mediaPath + 'NA.png')
except:
self.getControl(512).setImage(self.mediaPath + 'NA.png')
pass
if REAL_SETTINGS.getSetting("art.enable") == "true":
self.log('setShowInfo.Dynamic artwork enabled')
if chtype <= 7:
mediapathSeason, filename = os.path.split(mediapath)
self.logDebug('setShowInfo.mediapathSeason = ' + uni(mediapathSeason))
mediapathSeries = os.path.dirname(mediapathSeason)
self.logDebug('setShowInfo.mediapathSeries = ' + uni(mediapathSeries))
mediapathSeries1 = ascii(mediapathSeries + '/' + type1EXT)
mediapathSeason1 = ascii(mediapathSeason + '/' + type1EXT)
if FileAccess.exists(mediapathSeries1):
self.getControl(508).setImage(mediapathSeries1)
elif FileAccess.exists(mediapathSeason1):
self.getControl(508).setImage(mediapathSeason1)
else:
self.getControl(508).setImage(self.mediaPath + type1 + '.png')
# if REAL_SETTINGS.getSetting("EnableDown") == "1" and (REAL_SETTINGS.getSetting("TVFileSys") == "0" or REAL_SETTINGS.getSetting("MovieFileSys") == "0") and self.apis == True:
# self.Downloader.ArtDownloader(type, id, type1, type1EXT, Mpath1, Ipath1)
# elif REAL_SETTINGS.getSetting("EnableDown") == "1" and (REAL_SETTINGS.getSetting("TVFileSys") == "1" or REAL_SETTINGS.getSetting("MovieFileSys") == "1") and self.apis == True:
# self.Downloader.ArtDownloader(type, id, type2, type2EXT, Mpath1, Ipath1)
if REAL_SETTINGS.getSetting("EnableDown") == "2" and REAL_SETTINGS.getSetting("EnableDownSilent") == "false" and chtype != 7:
xbmc.executebuiltin('XBMC.runscript(script.artwork.downloader, mode=gui, mediatype='+type+', dbid='+dbid+', '+arttype1+')')
elif REAL_SETTINGS.getSetting("EnableDown") == "2" and REAL_SETTINGS.getSetting("EnableDownSilent") == "true" and chtype != 7:
xbmc.executebuiltin('XBMC.runscript(script.artwork.downloader, silent=true, mediatype='+type+', dbid='+dbid+', '+arttype1+')')
mediapathSeries2 = ascii(mediapathSeries + '/' + type2EXT)
mediapathSeason2 = ascii(mediapathSeason + '/' + type2EXT)
if FileAccess.exists(mediapathSeries2):
self.getControl(510).setImage(mediapathSeries2)
elif FileAccess.exists(mediapathSeason2):
self.getControl(510).setImage(mediapathSeason2)
else:
self.getControl(510).setImage(self.mediaPath + type2 + '.png')
# if REAL_SETTINGS.getSetting("EnableDown") == "1" and (REAL_SETTINGS.getSetting("TVFileSys") == "0" or REAL_SETTINGS.getSetting("MovieFileSys") == "0") and self.apis == True:
# self.Downloader.ArtDownloader(type, id, type2, type2EXT, Mpath2, Ipath2)
# elif REAL_SETTINGS.getSetting("EnableDown") == "1" and (REAL_SETTINGS.getSetting("TVFileSys") == "1" or REAL_SETTINGS.getSetting("MovieFileSys") == "1") and self.apis == True:
# self.Downloader.ArtDownloader(type, id, type2, type2EXT, Mpath2, Ipath2)
if REAL_SETTINGS.getSetting("EnableDown") == "2" and REAL_SETTINGS.getSetting("EnableDownSilent") == "false" and chtype != 7:
xbmc.executebuiltin('XBMC.runscript(script.artwork.downloader, mode=gui, mediatype='+type+', dbid='+dbid+', '+arttype2+')')
elif REAL_SETTINGS.getSetting("EnableDown") == "2" and REAL_SETTINGS.getSetting("EnableDownSilent") == "true" and chtype != 7:
xbmc.executebuiltin('XBMC.runscript(script.artwork.downloader, silent=true, mediatype='+type+', dbid='+dbid+', '+arttype2+')')
#LiveTV w/ TVDBID via Fanart.TV
elif chtype == 8:
if REAL_SETTINGS.getSetting('Live.art.enable') == 'true' and self.apis == True:
try:
print '1'
# if tvdbid != 0 and genre != 'Movie': #TV
# elif imdbid != 0 and genre == 'Movie':#Movie
except:
self.getControl(508).setImage(self.mediaPath + type1 + '.png')
self.getControl(510).setImage(self.mediaPath + type2 + '.png')
pass
else:#fallback all artwork because live art disabled
self.getControl(508).setImage(self.mediaPath + type1 + '.png')
self.getControl(510).setImage(self.mediaPath + type2 + '.png')
elif chtype == 9:
self.getControl(508).setImage(self.mediaPath + 'EPG.Internet.508.png')
self.getControl(510).setImage(self.mediaPath + 'EPG.Internet.510.png')
elif chtype == 10:
self.getControl(508).setImage(self.mediaPath + 'EPG.Youtube.508.png')
self.getControl(510).setImage(self.mediaPath + 'EPG.Youtube.510.png')
elif chtype == 11:
self.getControl(508).setImage(self.mediaPath + 'EPG.RSS.508.png')
self.getControl(510).setImage(self.mediaPath + 'EPG.RSS.510.png')
elif chtype == 13:
self.getControl(508).setImage(self.mediaPath + 'EPG.LastFM.508.png')
self.getControl(510).setImage(self.mediaPath + 'EPG.LastFM.510.png')
self.getControl(500).setLabel(self.MyOverlayWindow.channels[newchan - 1].getItemTitle(plpos))
#code to display "Live TV" instead of date (date does confirm sync)
#if chtype == 8:
# self.getControl(501).setLabel("LiveTV")
#else:
self.getControl(501).setLabel(self.MyOverlayWindow.channels[newchan - 1].getItemEpisodeTitle(plpos))
self.getControl(502).setLabel(self.MyOverlayWindow.channels[newchan - 1].getItemDescription(plpos))
if REAL_SETTINGS.getSetting("ColorEPG") == "true":
self.getControl(503).setImage(self.channelLogos + ascii(self.MyOverlayWindow.channels[newchan - 1].name) + '_c.png')
else:
self.getControl(503).setImage(self.channelLogos + ascii(self.MyOverlayWindow.channels[newchan - 1].name) + '.png')
self.log('setShowInfo return')
# using the currently selected button, play the proper shows
def selectShow(self):
self.log('selectShow')
basex, basey = self.getControl(111 + self.focusRow).getPosition()
baseh = self.getControl(111 + self.focusRow).getHeight()
basew = self.getControl(111 + self.focusRow).getWidth()
# use the selected time to set the video
left, top = self.channelButtons[self.focusRow][self.focusIndex].getPosition()
width = self.channelButtons[self.focusRow][self.focusIndex].getWidth()
left = left - basex + (width / 2)
starttime = self.shownTime + (left / (basew / 5400.0))
chnoffset = self.focusRow - 2
newchan = self.centerChannel
nowDate = datetime.datetime.now()
while chnoffset != 0:
if chnoffset > 0:
newchan = self.MyOverlayWindow.fixChannel(newchan + 1, True)
chnoffset -= 1
else:
newchan = self.MyOverlayWindow.fixChannel(newchan - 1, False)
chnoffset += 1
plpos = self.determinePlaylistPosAtTime(starttime, newchan)
chtype = int(ADDON_SETTINGS.getSetting('Channel_' + str(newchan) + '_type'))
if plpos == -1:
self.log('Unable to find the proper playlist to set from EPG', xbmc.LOGERROR)
return
timedif = (time.time() - self.MyOverlayWindow.channels[newchan - 1].lastAccessTime)
pos = self.MyOverlayWindow.channels[newchan - 1].playlistPosition
showoffset = self.MyOverlayWindow.channels[newchan - 1].showTimeOffset
#code added for "LiveTV" types
#Get the Start time of the show from "episodeitemtitle"
#we just passed this from channellist.py ; just a fill in to get value
#Start at the beginning of the playlist get the first epoch date
#position pos of the playlist convert the string add until we get to the current item in the playlist
if chtype == 8:
tmpDate = self.MyOverlayWindow.channels[newchan - 1].getItemtimestamp(pos)
self.log("selectshow tmpdate " + str(tmpDate))
t = time.strptime(tmpDate, '%Y-%m-%d %H:%M:%S')
epochBeginDate = time.mktime(t)
#beginDate = datetime.datetime(t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec)
#loop till we get to the current show
while epochBeginDate + self.MyOverlayWindow.channels[newchan - 1].getItemDuration(pos) < time.time():
epochBeginDate += self.MyOverlayWindow.channels[newchan - 1].getItemDuration(pos)
pos = self.MyOverlayWindow.channels[newchan - 1].fixPlaylistIndex(pos + 1)
self.log('live tv while loop')
# adjust the show and time offsets to properly position inside the playlist
else:
while showoffset + timedif > self.MyOverlayWindow.channels[newchan - 1].getItemDuration(pos):
self.log('duration ' + str(self.MyOverlayWindow.channels[newchan - 1].getItemDuration(pos)))
timedif -= self.MyOverlayWindow.channels[newchan - 1].getItemDuration(pos) - showoffset
pos = self.MyOverlayWindow.channels[newchan - 1].fixPlaylistIndex(pos + 1)
showoffset = 0
self.log('pos + plpos ' + str(pos) +', ' + str(plpos))
if self.MyOverlayWindow.currentChannel == newchan:
if plpos == xbmc.PlayList(xbmc.PLAYLIST_MUSIC).getposition():
self.log('selectShow return current show')
return
if chtype == 8:
self.log('selectShow return current LiveTV channel')
return
if pos != plpos:
if chtype == 8:
self.log('selectShow return different LiveTV channel')
return
else:
self.MyOverlayWindow.channels[newchan - 1].setShowPosition(plpos)
self.MyOverlayWindow.channels[newchan - 1].setShowTime(0)
self.MyOverlayWindow.channels[newchan - 1].setAccessTime(time.time())
self.MyOverlayWindow.newChannel = newchan
self.log('selectShow return')
def determinePlaylistPosAtTime(self, starttime, channel):
self.log('determinePlaylistPosAtTime ' + str(starttime) + ', ' + str(channel))
channel = self.MyOverlayWindow.fixChannel(channel)
chtype = int(ADDON_SETTINGS.getSetting('Channel_' + str(channel) + '_type'))
self.lastExitTime = ADDON_SETTINGS.getSetting("LastExitTime")
nowDate = datetime.datetime.now()
# if the channel is paused, then it's just the current item
if self.MyOverlayWindow.channels[channel - 1].isPaused:
self.log('determinePlaylistPosAtTime paused return')
return self.MyOverlayWindow.channels[channel - 1].playlistPosition
else:
# Find the show that was running at the given time
# Use the current time and show offset to calculate it
# At timedif time, channelShowPosition was playing at channelTimes
# The only way this isn't true is if the current channel is curchannel since
# it could have been fast forwarded or rewinded (rewound)?
if channel == self.MyOverlayWindow.currentChannel: #currentchannel epg
#Live TV pull date from the playlist entry
if chtype == 8:
playlistpos = int(xbmc.PlayList(xbmc.PLAYLIST_VIDEO).getposition())
#episodetitle is actually the start time of each show that the playlist gets from channellist.py
tmpDate = self.MyOverlayWindow.channels[channel - 1].getItemtimestamp(playlistpos)
self.log("setbuttonnowtime2 " + str(tmpDate))
t = time.strptime(tmpDate, '%Y-%m-%d %H:%M:%S')
epochBeginDate = time.mktime(t)
#beginDate = datetime.datetime(t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec)
#videotime = (nowDate - beginDate).seconds
videotime = time.time() - epochBeginDate
reftime = time.time()
else:
playlistpos = int(xbmc.PlayList(xbmc.PLAYLIST_VIDEO).getposition())
videotime = xbmc.Player().getTime()
reftime = time.time()
else:
#Live TV pull date from the playlist entry
if chtype == 8:
playlistpos = self.MyOverlayWindow.channels[channel - 1].playlistPosition
#playlistpos = int(xbmc.PlayList(xbmc.PLAYLIST_VIDEO).getposition())
#episodetitle is actually the start time of each show that the playlist gets from channellist.py
tmpDate = self.MyOverlayWindow.channels[channel - 1].getItemtimestamp(playlistpos)
self.log("setbuttonnowtime2 " + str(tmpDate))
t = time.strptime(tmpDate, '%Y-%m-%d %H:%M:%S')
epochBeginDate = time.mktime(t)
#beginDate = datetime.datetime(t.tm_year, t.tm_mon, t.tm_mday, t.tm_hour, t.tm_min, t.tm_sec)
#videotime = (nowDate - beginDate).seconds
while epochBeginDate + self.MyOverlayWindow.channels[channel - 1].getItemDuration(playlistpos) < time.time():
epochBeginDate += self.MyOverlayWindow.channels[channel - 1].getItemDuration(playlistpos)
playlistpos = self.MyOverlayWindow.channels[channel - 1].fixPlaylistIndex(playlistpos + 1)
videotime = time.time() - epochBeginDate
self.log('videotime ' + str(videotime))
reftime = time.time()
else:
playlistpos = self.MyOverlayWindow.channels[channel - 1].playlistPosition
videotime = self.MyOverlayWindow.channels[channel - 1].showTimeOffset
reftime = self.MyOverlayWindow.channels[channel - 1].lastAccessTime
# normalize reftime to the beginning of the video
reftime -= videotime
while reftime > starttime:
playlistpos -= 1
reftime -= self.MyOverlayWindow.channels[channel - 1].getItemDuration(playlistpos)
while reftime + self.MyOverlayWindow.channels[channel - 1].getItemDuration(playlistpos) < starttime:
reftime += self.MyOverlayWindow.channels[channel - 1].getItemDuration(playlistpos)
playlistpos += 1
self.log('determinePlaylistPosAtTime return' + str(self.MyOverlayWindow.channels[channel - 1].fixPlaylistIndex(playlistpos)))
return self.MyOverlayWindow.channels[channel - 1].fixPlaylistIndex(playlistpos)<|fim▁end|> | while reftime + self.MyOverlayWindow.channels[curchannel - 1].getItemDuration(playlistpos) < starttime:
reftime += self.MyOverlayWindow.channels[curchannel - 1].getItemDuration(playlistpos) |
<|file_name|>initMultiple.js<|end_file_name|><|fim▁begin|>$(function () {
$('.imageUploadMultiple').each(function (index, item) {
var $item = $(item);
var $group = $item.closest('.form-group');
var $innerGroup = $item.find('.form-group');
var $errors = $item.find('.errors');
var $input = $item.find('.imageValue');
var flow = new Flow({
target: $item.data('target'),
testChunks: false,
chunkSize: 1024 * 1024 * 1024,
query: {
_token: $item.data('token')
}
});
var updateValue = function () {
var values = [];
$item.find('img[data-value]').each(function () {
values.push($(this).data('value'));
});
$input.val(values.join(','));
};
flow.assignBrowse($item.find('.imageBrowse'));
flow.on('filesSubmitted', function (file) {
flow.upload();
});
flow.on('fileSuccess', function (file, message) {
flow.removeFile(file);
$errors.html('');
$group.removeClass('has-error');
var result = $.parseJSON(message);
$innerGroup.append('<div class="col-xs-6 col-md-3 imageThumbnail"><div class="thumbnail">' +
'<img data-value="' + result.value + '" src="' + result.url + '" />' +
'<a href="#" class="imageRemove">Remove</a></div></div>');
updateValue();
});
flow.on('fileError', function (file, message) {
flow.removeFile(file);
var response = $.parseJSON(message);
var errors = '';
$.each(response, function (index, error) {
errors += '<p class="help-block">' + error + '</p>'
});
$errors.html(errors);
$group.addClass('has-error');
});
$item.on('click', '.imageRemove', function (e) {
e.preventDefault();
$(this).closest('.imageThumbnail').remove();
updateValue();
});
$innerGroup.sortable({<|fim▁hole|> });
});<|fim▁end|> | onUpdate: function () {
updateValue();
}
}); |
<|file_name|>0003_auto__add_field_calendar_slug.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Adding field 'Calendar.slug'
db.add_column('schedule_calendar', 'slug',
self.gf('django.db.models.fields.SlugField')(max_length=255, null=True, blank=True),
keep_default=False)
def backwards(self, orm):
# Deleting field 'Calendar.slug'
db.delete_column('schedule_calendar', 'slug')
models = {
'auth.group': {
'Meta': {'object_name': 'Group'},
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '80'}),
'permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'})
},
'auth.permission': {
'Meta': {'ordering': "('content_type__app_label', 'content_type__model', 'codename')", 'unique_together': "(('content_type', 'codename'),)", 'object_name': 'Permission'},
'codename': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'content_type': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['contenttypes.ContentType']"}),<|fim▁hole|> 'name': ('django.db.models.fields.CharField', [], {'max_length': '50'})
},
'auth.user': {
'Meta': {'object_name': 'User'},
'date_joined': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'email': ('django.db.models.fields.EmailField', [], {'max_length': '75', 'blank': 'True'}),
'first_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'groups': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Group']", 'symmetrical': 'False', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'is_active': ('django.db.models.fields.BooleanField', [], {'default': 'True'}),
'is_staff': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'is_superuser': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'last_login': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now'}),
'last_name': ('django.db.models.fields.CharField', [], {'max_length': '30', 'blank': 'True'}),
'password': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'user_permissions': ('django.db.models.fields.related.ManyToManyField', [], {'to': "orm['auth.Permission']", 'symmetrical': 'False', 'blank': 'True'}),
'username': ('django.db.models.fields.CharField', [], {'unique': 'True', 'max_length': '30'})
},
'contenttypes.contenttype': {
'Meta': {'ordering': "('name',)", 'unique_together': "(('app_label', 'model'),)", 'object_name': 'ContentType', 'db_table': "'django_content_type'"},
'app_label': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'model': ('django.db.models.fields.CharField', [], {'max_length': '100'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '100'})
},
'schedule.calendar': {
'Meta': {'ordering': "('-modified', '-created')", 'object_name': 'Calendar'},
'created': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'data': ('jsonfield.fields.JSONField', [], {'null': 'True', 'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'modified': ('django.db.models.fields.DateTimeField', [], {'default': 'datetime.datetime.now', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '255'}),
'slug': ('django.db.models.fields.SlugField', [], {'max_length': '255', 'null': 'True', 'blank': 'True'}),
'user': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['auth.User']"})
}
}
complete_apps = ['schedule']<|fim▁end|> | 'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}), |
<|file_name|>delimited-token-groups.rs<|end_file_name|><|fim▁begin|>// pp-exact
#![feature(rustc_attrs)]
macro_rules! mac { ($($tt : tt) *) => () }
mac! {
struct S { field1 : u8, field2 : u16, } impl Clone for S
{
fn clone() -> S
{
panic! () ;
}
}
}
<|fim▁hole|>mac! {
a(aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa
aaaaaaaa aaaaaaaa) a
[aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa
aaaaaaaa aaaaaaaa] a
{
aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa
aaaaaaaa aaaaaaaa aaaaaaaa
} a
}
mac!(aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa
aaaaaaaa aaaaaaaa);
mac![aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa
aaaaaaaa aaaaaaaa];
mac! {
aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa
aaaaaaaa aaaaaaaa
}
#[rustc_dummy(aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa
aaaaaaaa aaaaaaaa aaaaaaaa)]
#[rustc_dummy[aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa
aaaaaaaa aaaaaaaa aaaaaaaa]]
#[rustc_dummy {
aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa
aaaaaaaa aaaaaaaa
}]
#[rustc_dummy =
"aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa aaaaaaaa"]
fn main() { }<|fim▁end|> | |
<|file_name|>dda.hpp<|end_file_name|><|fim▁begin|>/* Copyright (c) 2010 by CodeSourcery. All rights reserved. */
#ifndef vsip_core_dda_hpp_
#define vsip_core_dda_hpp_
#include <vsip/core/static_assert.hpp>
#include <vsip/core/block_traits.hpp>
#include <vsip/core/metaprogramming.hpp>
#include <vsip/core/storage.hpp>
#include <vsip/core/domain_utils.hpp>
#include <vsip/core/block_copy.hpp>
#include <vsip/core/us_block.hpp>
#include <vsip/core/view_traits.hpp>
#include <vsip/core/assign_local.hpp>
#include <vsip/core/adjust_layout.hpp>
#include <vsip/dda.hpp>
namespace vsip
{
namespace dda
{
namespace impl
{
using namespace vsip::impl;
/// @group Data Access Tags {
/// Direct_access_tag -- use direct access to block data
/// (data, stride member functions).
struct Direct_access_tag {};
/// Reorder_access_tag -- use direct access to block data, but reorder data
/// to match requested dimension-order.
struct Reorder_access_tag {};
/// Copy_access_tag -- copy block data (either using direct access if
/// available, or just get/put).
struct Copy_access_tag {};
/// Flexible_access_tag -- determine whether to use direct or copy access
/// at runtime.
struct Flexible_access_tag {};
/// }
#if VSIP_IMPL_REF_IMPL
template <typename Block, typename L>
struct Choose_access
{
typedef typename vsip::impl::remove_const<Block>::type block_type;
typedef typename get_block_layout<block_type>::type block_layout_type;
typedef typename
conditional<supports_dda<block_type>::value &&
is_same<block_layout_type, L>::value,
Direct_access_tag, Copy_access_tag>::type
type;
};
#endif
struct direct; // Use dda::Data directly on block.
struct local; // Use dda::Data on get_local_block of block
struct remap; // Use dda::Data on reorganized block.
template <typename Block, typename L>
struct Choose_impl_tag
{
static dimension_type const dim = L::dim;
typedef typename vsip::impl::remove_const<Block>::type block_type;
typedef typename block_type::value_type value_type;
typedef typename block_type::map_type map_type;
typedef typename get_block_layout<block_type>::type block_layout_type;
static bool const local_equiv =
is_layout_compatible<value_type, L, block_layout_type>::value &&
is_same<Replicated_map<dim>, map_type>::value;
static bool const equiv = local_equiv &&<|fim▁hole|>
typedef typename
conditional<is_local, direct,
typename conditional<local_equiv, local,
remap>::type>::type
type;
};
/// Low-level data access class.
///
/// Template parameters:
///
/// :Block: is a block that supports the data access interface indicated
/// by `AT`.
/// :LP: is a layout policy compatible with access tag `AT` and block
/// `Block`.
/// :AT: is a valid data access tag,
///
/// (Each specializtion may provide additional requirements).
///
/// Member Functions:
/// ...
///
/// Notes:
/// Accessor does not hold a block reference/pointer, it
/// is provided to each member function by the caller. This allows
/// the caller to make policy decisions, such as reference counting.
template <typename Block,
typename LP,
typename AT,
typename Impl = typename Choose_impl_tag<Block, LP>::type>
class Accessor;
/// Specialization for low-level direct data access.
///
/// Template parameters:
/// BLOCK to be a block that supports direct access via member
/// functions ptr() and stride().
/// LP is a layout policy describing the desired layout. It is should
/// match the inherent layout of the block. Specifying a layout
/// not directly supported by the block is an error and results in
/// undefined behavior.
template <typename Block,
typename LP>
class Accessor<Block, LP, Direct_access_tag, direct>
{
// Compile time typedefs.
public:
static dimension_type const dim = LP::dim;
typedef typename Block::value_type value_type;
typedef typename LP::order_type order_type;
static pack_type const packing = LP::packing;
static storage_format_type const storage_format = LP::storage_format;
typedef Storage<storage_format, value_type> storage_type;
typedef typename storage_type::type non_const_ptr_type;
typedef typename storage_type::const_type const_ptr_type;
typedef typename
vsip::impl::conditional<vsip::impl::is_modifiable_block<Block>::value,
non_const_ptr_type,
const_ptr_type>::type ptr_type;
static int const CT_Cost = 0;
static bool const CT_Mem_not_req = true;
static bool const CT_Xfer_not_req = true;
static int cost (Block const& /*block*/, LP const& /*layout*/)
{ return CT_Cost; }
static size_t mem_required (Block const& /*block*/, LP const& /*layout*/)
{ return 0; }
static size_t xfer_required(Block const& /*block*/, LP const& /*layout*/)
{ return !CT_Xfer_not_req; }
// Constructor and destructor.
public:
Accessor(Block&, non_const_ptr_type = non_const_ptr_type()) {}
~Accessor() {}
void begin(Block*, bool) {}
void end(Block*, bool) {}
int cost() const { return CT_Cost; }
// Direct data acessors.
public:
ptr_type ptr(Block * blk) const { return blk->ptr();}
stride_type stride(Block* blk, dimension_type d) const { return blk->stride(dim, d);}
length_type size (Block* blk, dimension_type d) const { return blk->size(dim, d);}
length_type size (Block* blk) const { return blk->size();}
};
/// Specialization for distributed blocks with matching layout.
/// Use get_local_block().
template <typename Block,
typename LP>
class Accessor<Block, LP, Direct_access_tag, local>
{
typedef typename remove_const<Block>::type non_const_block_type;
typedef typename add_const<Block>::type const_block_type;
typedef typename conditional<is_const<Block>::value,
typename Distributed_local_block<non_const_block_type>::type const,
typename Distributed_local_block<non_const_block_type>::type>::type
local_block_type;
public:
static dimension_type const dim = LP::dim;
typedef typename Block::value_type value_type;
typedef typename LP::order_type order_type;
static pack_type const packing = LP::packing;
static storage_format_type const storage_format = LP::storage_format;
typedef Storage<storage_format, value_type> storage_type;
typedef typename storage_type::type non_const_ptr_type;
typedef typename storage_type::const_type const_ptr_type;
typedef typename
vsip::impl::conditional<vsip::impl::is_modifiable_block<Block>::value,
non_const_ptr_type,
const_ptr_type>::type ptr_type;
static int const CT_Cost = 0;
static bool const CT_Mem_not_req = true;
static bool const CT_Xfer_not_req = true;
static int cost(Block const&, LP const&) { return CT_Cost;}
static size_t mem_required (Block const&, LP const&) { return 0;}
static size_t xfer_required(Block const&, LP const&) { return !CT_Xfer_not_req;}
Accessor(Block &b, non_const_ptr_type = non_const_ptr_type())
: block_(get_local_block(const_cast<non_const_block_type &>(b))) {}
~Accessor() {}
void begin(Block*, bool) {}
void end(Block*, bool) {}
int cost() const { return CT_Cost;}
ptr_type ptr(Block *) const { return block_.ptr();}
stride_type stride(Block *, dimension_type d) const { return block_.stride(dim, d);}
length_type size(Block *, dimension_type d) const { return block_.size(dim, d);}
length_type size(Block *) const { return block_.size();}
private:
local_block_type &block_;
};
/// Specialization for copied direct data access.
///
/// Template parameters:
/// :Block: to be a block.
/// :LP: is a layout policy describing the desired layout.
/// The desired layout can be different from the block's layout.
///
/// Notes:
/// When the desired layout packing format is either packing::unit_stride or
/// packing::unknown, the packing format used will be packing::dense.
template <typename Block,
typename LP>
class Accessor<Block, LP, Copy_access_tag, direct>
{
// Compile time typedefs.
public:
static dimension_type const dim = LP::dim;
typedef typename Block::value_type value_type;
typedef typename LP::order_type order_type;
static pack_type const packing =
LP::packing == unit_stride || LP::packing == any_packing
? dense : LP::packing;
static storage_format_type const storage_format = LP::storage_format;
typedef Layout<dim, order_type, packing, storage_format> actual_layout_type;
typedef Allocated_storage<storage_format, value_type> storage_type;
typedef typename storage_type::type non_const_ptr_type;
typedef typename storage_type::const_type const_ptr_type;
typedef typename
vsip::impl::conditional<vsip::impl::is_modifiable_block<Block>::value,
non_const_ptr_type,
const_ptr_type>::type ptr_type;
static int const CT_Cost = 2;
static bool const CT_Mem_not_req = false;
static bool const CT_Xfer_not_req = false;
static int cost(Block const&, LP const&)
{ return CT_Cost; }
static size_t mem_required (Block const& block, LP const&)
{ return sizeof(typename Block::value_type) * block.size(); }
static size_t xfer_required(Block const&, LP const&)
{ return !CT_Xfer_not_req; }
// Constructor and destructor.
public:
Accessor(Block & blk, non_const_ptr_type buffer = non_const_ptr_type())
: layout_ (extent<dim>(blk)),
storage_ (layout_.total_size(), buffer)
{}
~Accessor()
{ storage_.deallocate(layout_.total_size());}
void begin(Block* blk, bool sync)
{
if (sync)
Block_copy_to_ptr<LP::dim, Block, order_type, packing, storage_format>::
copy(blk, layout_, storage_.ptr());
}
void end(Block* blk, bool sync)
{
if (sync)
Block_copy_from_ptr<LP::dim, Block, order_type, packing, storage_format>::
copy(blk, layout_, storage_.ptr());
}
int cost() const { return CT_Cost; }
ptr_type ptr(Block*) { return storage_.ptr();}
const_ptr_type ptr(Block*) const { return storage_.ptr();}
stride_type stride(Block*, dimension_type d) const { return layout_.stride(d);}
length_type size(Block* blk, dimension_type d) const { return blk->size(Block::dim, d);}
length_type size(Block* blk) const { return blk->size();}
private:
Applied_layout<actual_layout_type> layout_;
storage_type storage_;
};
template <typename B, typename L, typename A>
class Accessor<B, L, A, remap>
{
public:
typedef typename B::value_type value_type;
typedef typename view_of<B>::type dist_view_type;
typedef Allocated_storage<L::storage_format, value_type> storage_type;
typedef typename storage_type::type non_const_ptr_type;
typedef typename storage_type::const_type const_ptr_type;
typedef typename
vsip::impl::conditional<vsip::impl::is_modifiable_block<B>::value,
non_const_ptr_type,
const_ptr_type>::type ptr_type;
typedef Us_block<B::dim, value_type, L, Local_map> block_type;
typedef typename view_of<block_type>::type local_view_type;
typedef Accessor<block_type, L, A> data_access_type;
public:
static int const CT_Cost = 2;
static bool const CT_Mem_not_req = false;
static bool const CT_Xfer_not_req = false;
static int cost(B const&, L const&)
{ return CT_Cost;}
static size_t mem_required (B const & block, L const&)
{ return sizeof(typename B::value_type) * block.size();}
static size_t xfer_required(B const &, L const &)
{ return !CT_Xfer_not_req;}
Accessor(B &b, non_const_ptr_type buffer = non_const_ptr_type())
: storage_(b.size(), buffer),
block_(block_domain<B::dim>(b), storage_.ptr()),
ext_(block_)
{}
~Accessor()
{ storage_.deallocate(block_.size());}
void begin(B *b, bool sync)
{
if (sync) assign_local(block_, *b);
ext_.begin(&block_, sync);
}
void end(B *b, bool sync)
{
ext_.end(&block_, sync);
if (sync) assign_local_if<is_modifiable_block<B>::value>(*b, block_);
}
int cost() const { return CT_Cost;}
ptr_type ptr(B*) { return ext_.ptr(&block_);}
const_ptr_type ptr(B*) const { return ext_.ptr(&block_);}
stride_type stride(B*, dimension_type d) const { return ext_.stride(&block_, d);}
length_type size(B *b, dimension_type d) const { return ext_.size(&block_, d);}
length_type size(B *b) const { return ext_.size(&block_);}
private:
storage_type storage_;
mutable block_type block_;
data_access_type ext_;
};
template <typename AT> struct Cost { static int const value = 10; };
template <> struct Cost<Direct_access_tag> { static int const value = 0; };
template <> struct Cost<Copy_access_tag> { static int const value = 2; };
} // namespace vsip::dda::impl
} // namespace vsip::dda
} // namespace vsip
#endif<|fim▁end|> | adjust_type<Local_map, map_type>::equiv;
static bool const is_local = is_same<Local_map, map_type>::value; |
<|file_name|>trimFilename.tsx<|end_file_name|><|fim▁begin|>function trimFilename(filename: string) {
const pieces = filename.split(/\//g);<|fim▁hole|><|fim▁end|> | return pieces[pieces.length - 1];
}
export default trimFilename; |
<|file_name|>error.go<|end_file_name|><|fim▁begin|>// Copyright 2015 The etcd Authors
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package rpctypes
import (
"google.golang.org/grpc/codes"
"google.golang.org/grpc/status"
)
// server-side error
var (
ErrGRPCEmptyKey = status.New(codes.InvalidArgument, "etcdserver: key is not provided").Err()
ErrGRPCKeyNotFound = status.New(codes.InvalidArgument, "etcdserver: key not found").Err()
ErrGRPCValueProvided = status.New(codes.InvalidArgument, "etcdserver: value is provided").Err()
ErrGRPCLeaseProvided = status.New(codes.InvalidArgument, "etcdserver: lease is provided").Err()
ErrGRPCTooManyOps = status.New(codes.InvalidArgument, "etcdserver: too many operations in txn request").Err()
ErrGRPCDuplicateKey = status.New(codes.InvalidArgument, "etcdserver: duplicate key given in txn request").Err()
ErrGRPCInvalidClientAPIVersion = status.New(codes.InvalidArgument, "etcdserver: invalid client api version").Err()
ErrGRPCInvalidSortOption = status.New(codes.InvalidArgument, "etcdserver: invalid sort option").Err()
ErrGRPCCompacted = status.New(codes.OutOfRange, "etcdserver: mvcc: required revision has been compacted").Err()
ErrGRPCFutureRev = status.New(codes.OutOfRange, "etcdserver: mvcc: required revision is a future revision").Err()
ErrGRPCNoSpace = status.New(codes.ResourceExhausted, "etcdserver: mvcc: database space exceeded").Err()
ErrGRPCLeaseNotFound = status.New(codes.NotFound, "etcdserver: requested lease not found").Err()
ErrGRPCLeaseExist = status.New(codes.FailedPrecondition, "etcdserver: lease already exists").Err()
ErrGRPCLeaseTTLTooLarge = status.New(codes.OutOfRange, "etcdserver: too large lease TTL").Err()
ErrGRPCWatchCanceled = status.New(codes.Canceled, "etcdserver: watch canceled").Err()
ErrGRPCMemberExist = status.New(codes.FailedPrecondition, "etcdserver: member ID already exist").Err()
ErrGRPCPeerURLExist = status.New(codes.FailedPrecondition, "etcdserver: Peer URLs already exists").Err()
ErrGRPCMemberNotEnoughStarted = status.New(codes.FailedPrecondition, "etcdserver: re-configuration failed due to not enough started members").Err()
ErrGRPCMemberBadURLs = status.New(codes.InvalidArgument, "etcdserver: given member URLs are invalid").Err()
ErrGRPCMemberNotFound = status.New(codes.NotFound, "etcdserver: member not found").Err()
ErrGRPCMemberNotLearner = status.New(codes.FailedPrecondition, "etcdserver: can only promote a learner member").Err()
ErrGRPCLearnerNotReady = status.New(codes.FailedPrecondition, "etcdserver: can only promote a learner member which is in sync with leader").Err()
ErrGRPCTooManyLearners = status.New(codes.FailedPrecondition, "etcdserver: too many learner members in cluster").Err()
ErrGRPCRequestTooLarge = status.New(codes.InvalidArgument, "etcdserver: request is too large").Err()
ErrGRPCRequestTooManyRequests = status.New(codes.ResourceExhausted, "etcdserver: too many requests").Err()
ErrGRPCRootUserNotExist = status.New(codes.FailedPrecondition, "etcdserver: root user does not exist").Err()
ErrGRPCRootRoleNotExist = status.New(codes.FailedPrecondition, "etcdserver: root user does not have root role").Err()
ErrGRPCUserAlreadyExist = status.New(codes.FailedPrecondition, "etcdserver: user name already exists").Err()
ErrGRPCUserEmpty = status.New(codes.InvalidArgument, "etcdserver: user name is empty").Err()
ErrGRPCUserNotFound = status.New(codes.FailedPrecondition, "etcdserver: user name not found").Err()
ErrGRPCRoleAlreadyExist = status.New(codes.FailedPrecondition, "etcdserver: role name already exists").Err()
ErrGRPCRoleNotFound = status.New(codes.FailedPrecondition, "etcdserver: role name not found").Err()
ErrGRPCRoleEmpty = status.New(codes.InvalidArgument, "etcdserver: role name is empty").Err()
ErrGRPCAuthFailed = status.New(codes.InvalidArgument, "etcdserver: authentication failed, invalid user ID or password").Err()
ErrGRPCPermissionNotGiven = status.New(codes.InvalidArgument, "etcdserver: permission not given").Err()
ErrGRPCPermissionDenied = status.New(codes.PermissionDenied, "etcdserver: permission denied").Err()
ErrGRPCRoleNotGranted = status.New(codes.FailedPrecondition, "etcdserver: role is not granted to the user").Err()
ErrGRPCPermissionNotGranted = status.New(codes.FailedPrecondition, "etcdserver: permission is not granted to the role").Err()
ErrGRPCAuthNotEnabled = status.New(codes.FailedPrecondition, "etcdserver: authentication is not enabled").Err()
ErrGRPCInvalidAuthToken = status.New(codes.Unauthenticated, "etcdserver: invalid auth token").Err()
ErrGRPCInvalidAuthMgmt = status.New(codes.InvalidArgument, "etcdserver: invalid auth management").Err()
ErrGRPCAuthOldRevision = status.New(codes.InvalidArgument, "etcdserver: revision of auth store is old").Err()
ErrGRPCNoLeader = status.New(codes.Unavailable, "etcdserver: no leader").Err()
ErrGRPCNotLeader = status.New(codes.FailedPrecondition, "etcdserver: not leader").Err()
ErrGRPCLeaderChanged = status.New(codes.Unavailable, "etcdserver: leader changed").Err()
ErrGRPCNotCapable = status.New(codes.FailedPrecondition, "etcdserver: not capable").Err()
ErrGRPCStopped = status.New(codes.Unavailable, "etcdserver: server stopped").Err()
ErrGRPCTimeout = status.New(codes.Unavailable, "etcdserver: request timed out").Err()
ErrGRPCTimeoutDueToLeaderFail = status.New(codes.Unavailable, "etcdserver: request timed out, possibly due to previous leader failure").Err()
ErrGRPCTimeoutDueToConnectionLost = status.New(codes.Unavailable, "etcdserver: request timed out, possibly due to connection lost").Err()
ErrGRPCUnhealthy = status.New(codes.Unavailable, "etcdserver: unhealthy cluster").Err()
ErrGRPCCorrupt = status.New(codes.DataLoss, "etcdserver: corrupt cluster").Err()
ErrGRPCNotSupportedForLearner = status.New(codes.FailedPrecondition, "etcdserver: rpc not supported for learner").Err()
ErrGRPCBadLeaderTransferee = status.New(codes.FailedPrecondition, "etcdserver: bad leader transferee").Err()
ErrGRPCWrongDowngradeVersionFormat = status.New(codes.InvalidArgument, "etcdserver: wrong downgrade target version format").Err()
ErrGRPCInvalidDowngradeTargetVersion = status.New(codes.InvalidArgument, "etcdserver: invalid downgrade target version").Err()
ErrGRPCClusterVersionUnavailable = status.New(codes.FailedPrecondition, "etcdserver: cluster version not found during downgrade").Err()
ErrGRPCDowngradeInProcess = status.New(codes.FailedPrecondition, "etcdserver: cluster has a downgrade job in progress").Err()
ErrGRPCNoInflightDowngrade = status.New(codes.FailedPrecondition, "etcdserver: no inflight downgrade job").Err()
ErrGRPCCanceled = status.New(codes.Canceled, "etcdserver: request canceled").Err()
ErrGRPCDeadlineExceeded = status.New(codes.DeadlineExceeded, "etcdserver: context deadline exceeded").Err()
errStringToError = map[string]error{
ErrorDesc(ErrGRPCEmptyKey): ErrGRPCEmptyKey,
ErrorDesc(ErrGRPCKeyNotFound): ErrGRPCKeyNotFound,
ErrorDesc(ErrGRPCValueProvided): ErrGRPCValueProvided,
ErrorDesc(ErrGRPCLeaseProvided): ErrGRPCLeaseProvided,
ErrorDesc(ErrGRPCTooManyOps): ErrGRPCTooManyOps,
ErrorDesc(ErrGRPCDuplicateKey): ErrGRPCDuplicateKey,
ErrorDesc(ErrGRPCInvalidSortOption): ErrGRPCInvalidSortOption,
ErrorDesc(ErrGRPCCompacted): ErrGRPCCompacted,
ErrorDesc(ErrGRPCFutureRev): ErrGRPCFutureRev,
ErrorDesc(ErrGRPCNoSpace): ErrGRPCNoSpace,
ErrorDesc(ErrGRPCLeaseNotFound): ErrGRPCLeaseNotFound,
ErrorDesc(ErrGRPCLeaseExist): ErrGRPCLeaseExist,
ErrorDesc(ErrGRPCLeaseTTLTooLarge): ErrGRPCLeaseTTLTooLarge,
ErrorDesc(ErrGRPCMemberExist): ErrGRPCMemberExist,
ErrorDesc(ErrGRPCPeerURLExist): ErrGRPCPeerURLExist,
ErrorDesc(ErrGRPCMemberNotEnoughStarted): ErrGRPCMemberNotEnoughStarted,
ErrorDesc(ErrGRPCMemberBadURLs): ErrGRPCMemberBadURLs,
ErrorDesc(ErrGRPCMemberNotFound): ErrGRPCMemberNotFound,
ErrorDesc(ErrGRPCMemberNotLearner): ErrGRPCMemberNotLearner,
ErrorDesc(ErrGRPCLearnerNotReady): ErrGRPCLearnerNotReady,
ErrorDesc(ErrGRPCTooManyLearners): ErrGRPCTooManyLearners,
ErrorDesc(ErrGRPCRequestTooLarge): ErrGRPCRequestTooLarge,
ErrorDesc(ErrGRPCRequestTooManyRequests): ErrGRPCRequestTooManyRequests,
ErrorDesc(ErrGRPCRootUserNotExist): ErrGRPCRootUserNotExist,
ErrorDesc(ErrGRPCRootRoleNotExist): ErrGRPCRootRoleNotExist,
ErrorDesc(ErrGRPCUserAlreadyExist): ErrGRPCUserAlreadyExist,
ErrorDesc(ErrGRPCUserEmpty): ErrGRPCUserEmpty,
ErrorDesc(ErrGRPCUserNotFound): ErrGRPCUserNotFound,
ErrorDesc(ErrGRPCRoleAlreadyExist): ErrGRPCRoleAlreadyExist,
ErrorDesc(ErrGRPCRoleNotFound): ErrGRPCRoleNotFound,
ErrorDesc(ErrGRPCRoleEmpty): ErrGRPCRoleEmpty,
ErrorDesc(ErrGRPCAuthFailed): ErrGRPCAuthFailed,
ErrorDesc(ErrGRPCPermissionDenied): ErrGRPCPermissionDenied,
ErrorDesc(ErrGRPCRoleNotGranted): ErrGRPCRoleNotGranted,
ErrorDesc(ErrGRPCPermissionNotGranted): ErrGRPCPermissionNotGranted,
ErrorDesc(ErrGRPCAuthNotEnabled): ErrGRPCAuthNotEnabled,
ErrorDesc(ErrGRPCInvalidAuthToken): ErrGRPCInvalidAuthToken,
ErrorDesc(ErrGRPCInvalidAuthMgmt): ErrGRPCInvalidAuthMgmt,
ErrorDesc(ErrGRPCAuthOldRevision): ErrGRPCAuthOldRevision,
ErrorDesc(ErrGRPCNoLeader): ErrGRPCNoLeader,
ErrorDesc(ErrGRPCNotLeader): ErrGRPCNotLeader,
ErrorDesc(ErrGRPCLeaderChanged): ErrGRPCLeaderChanged,
ErrorDesc(ErrGRPCNotCapable): ErrGRPCNotCapable,
ErrorDesc(ErrGRPCStopped): ErrGRPCStopped,
ErrorDesc(ErrGRPCTimeout): ErrGRPCTimeout,
ErrorDesc(ErrGRPCTimeoutDueToLeaderFail): ErrGRPCTimeoutDueToLeaderFail,
ErrorDesc(ErrGRPCTimeoutDueToConnectionLost): ErrGRPCTimeoutDueToConnectionLost,
ErrorDesc(ErrGRPCUnhealthy): ErrGRPCUnhealthy,
ErrorDesc(ErrGRPCCorrupt): ErrGRPCCorrupt,
ErrorDesc(ErrGRPCNotSupportedForLearner): ErrGRPCNotSupportedForLearner,
ErrorDesc(ErrGRPCBadLeaderTransferee): ErrGRPCBadLeaderTransferee,
ErrorDesc(ErrGRPCClusterVersionUnavailable): ErrGRPCClusterVersionUnavailable,
ErrorDesc(ErrGRPCWrongDowngradeVersionFormat): ErrGRPCWrongDowngradeVersionFormat,
ErrorDesc(ErrGRPCInvalidDowngradeTargetVersion): ErrGRPCInvalidDowngradeTargetVersion,
ErrorDesc(ErrGRPCDowngradeInProcess): ErrGRPCDowngradeInProcess,
ErrorDesc(ErrGRPCNoInflightDowngrade): ErrGRPCNoInflightDowngrade,
}
)
// client-side error
var (
ErrEmptyKey = Error(ErrGRPCEmptyKey)
ErrKeyNotFound = Error(ErrGRPCKeyNotFound)<|fim▁hole|> ErrInvalidSortOption = Error(ErrGRPCInvalidSortOption)
ErrCompacted = Error(ErrGRPCCompacted)
ErrFutureRev = Error(ErrGRPCFutureRev)
ErrNoSpace = Error(ErrGRPCNoSpace)
ErrLeaseNotFound = Error(ErrGRPCLeaseNotFound)
ErrLeaseExist = Error(ErrGRPCLeaseExist)
ErrLeaseTTLTooLarge = Error(ErrGRPCLeaseTTLTooLarge)
ErrMemberExist = Error(ErrGRPCMemberExist)
ErrPeerURLExist = Error(ErrGRPCPeerURLExist)
ErrMemberNotEnoughStarted = Error(ErrGRPCMemberNotEnoughStarted)
ErrMemberBadURLs = Error(ErrGRPCMemberBadURLs)
ErrMemberNotFound = Error(ErrGRPCMemberNotFound)
ErrMemberNotLearner = Error(ErrGRPCMemberNotLearner)
ErrMemberLearnerNotReady = Error(ErrGRPCLearnerNotReady)
ErrTooManyLearners = Error(ErrGRPCTooManyLearners)
ErrRequestTooLarge = Error(ErrGRPCRequestTooLarge)
ErrTooManyRequests = Error(ErrGRPCRequestTooManyRequests)
ErrRootUserNotExist = Error(ErrGRPCRootUserNotExist)
ErrRootRoleNotExist = Error(ErrGRPCRootRoleNotExist)
ErrUserAlreadyExist = Error(ErrGRPCUserAlreadyExist)
ErrUserEmpty = Error(ErrGRPCUserEmpty)
ErrUserNotFound = Error(ErrGRPCUserNotFound)
ErrRoleAlreadyExist = Error(ErrGRPCRoleAlreadyExist)
ErrRoleNotFound = Error(ErrGRPCRoleNotFound)
ErrRoleEmpty = Error(ErrGRPCRoleEmpty)
ErrAuthFailed = Error(ErrGRPCAuthFailed)
ErrPermissionDenied = Error(ErrGRPCPermissionDenied)
ErrRoleNotGranted = Error(ErrGRPCRoleNotGranted)
ErrPermissionNotGranted = Error(ErrGRPCPermissionNotGranted)
ErrAuthNotEnabled = Error(ErrGRPCAuthNotEnabled)
ErrInvalidAuthToken = Error(ErrGRPCInvalidAuthToken)
ErrAuthOldRevision = Error(ErrGRPCAuthOldRevision)
ErrInvalidAuthMgmt = Error(ErrGRPCInvalidAuthMgmt)
ErrNoLeader = Error(ErrGRPCNoLeader)
ErrNotLeader = Error(ErrGRPCNotLeader)
ErrLeaderChanged = Error(ErrGRPCLeaderChanged)
ErrNotCapable = Error(ErrGRPCNotCapable)
ErrStopped = Error(ErrGRPCStopped)
ErrTimeout = Error(ErrGRPCTimeout)
ErrTimeoutDueToLeaderFail = Error(ErrGRPCTimeoutDueToLeaderFail)
ErrTimeoutDueToConnectionLost = Error(ErrGRPCTimeoutDueToConnectionLost)
ErrUnhealthy = Error(ErrGRPCUnhealthy)
ErrCorrupt = Error(ErrGRPCCorrupt)
ErrBadLeaderTransferee = Error(ErrGRPCBadLeaderTransferee)
ErrClusterVersionUnavailable = Error(ErrGRPCClusterVersionUnavailable)
ErrWrongDowngradeVersionFormat = Error(ErrGRPCWrongDowngradeVersionFormat)
ErrInvalidDowngradeTargetVersion = Error(ErrGRPCInvalidDowngradeTargetVersion)
ErrDowngradeInProcess = Error(ErrGRPCDowngradeInProcess)
ErrNoInflightDowngrade = Error(ErrGRPCNoInflightDowngrade)
)
// EtcdError defines gRPC server errors.
// (https://github.com/grpc/grpc-go/blob/master/rpc_util.go#L319-L323)
type EtcdError struct {
code codes.Code
desc string
}
// Code returns grpc/codes.Code.
// TODO: define clientv3/codes.Code.
func (e EtcdError) Code() codes.Code {
return e.code
}
func (e EtcdError) Error() string {
return e.desc
}
func Error(err error) error {
if err == nil {
return nil
}
verr, ok := errStringToError[ErrorDesc(err)]
if !ok { // not gRPC error
return err
}
ev, ok := status.FromError(verr)
var desc string
if ok {
desc = ev.Message()
} else {
desc = verr.Error()
}
return EtcdError{code: ev.Code(), desc: desc}
}
func ErrorDesc(err error) string {
if s, ok := status.FromError(err); ok {
return s.Message()
}
return err.Error()
}<|fim▁end|> | ErrValueProvided = Error(ErrGRPCValueProvided)
ErrLeaseProvided = Error(ErrGRPCLeaseProvided)
ErrTooManyOps = Error(ErrGRPCTooManyOps)
ErrDuplicateKey = Error(ErrGRPCDuplicateKey) |
<|file_name|>ViewConfiguration.cpp<|end_file_name|><|fim▁begin|>/**
* Copyright (c) 2013-2014 Quentin Smetz <[email protected]>, Sebastien
* Jodogne <[email protected]>
*
* Permission is hereby granted, free of charge, to any person
* obtaining a copy of this software and associated documentation
* files (the "Software"), to deal in the Software without
* restriction, including without limitation the rights to use, copy,
* modify, merge, publish, distribute, sublicense, and/or sell copies<|fim▁hole|> * of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
* BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
* ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
* CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
**/
//!
//! \file ViewConfiguration.cpp
//! \brief The ViewConfiguration.cpp file contains the definition of non-inline
//! methods of the ViewConfiguration class.
//!
//! \author Quentin Smetz
//!
#include "ViewConfiguration.h"
using namespace std;
// Constructor
ViewConfiguration::ViewConfiguration()
{}
// Destructor
ViewConfiguration::~ViewConfiguration()
{}
// The 'setHounsfield' method
void ViewConfiguration::setHounsfield(Range const& hounsfield, Range const& hounsfieldMaxRange)
{
m_hounsfield = hounsfield;
m_hounsfieldMaxRange = hounsfieldMaxRange;
}
// The 'setColormap' method
void ViewConfiguration::setColormap(Colormap const& colormap)
{
m_colormap = colormap;
}
// The 'setTranslation' method
void ViewConfiguration::setTranslation(Vector3D const& translation)
{
m_translation = translation;
}
// The 'setRotation' method
void ViewConfiguration::setRotation(Vector3D const& rotation)
{
m_rotation = rotation;
}<|fim▁end|> | |
<|file_name|>discord.rs<|end_file_name|><|fim▁begin|>use inth_oauth2::provider::Provider;
use inth_oauth2::token::{Bearer, Refresh};
use inth_oauth2::Client;
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub struct Discord;
#[derive(RustcDecodable)]<|fim▁hole|>pub struct DiscordUser {
pub username: String,
pub verified: bool,
pub mfa_enabled: bool,
pub id: String,
pub avatar: String,
pub discriminator: String,
pub email: String
}
impl Provider for Discord {
type Lifetime = Refresh;
type Token = Bearer<Refresh>;
fn auth_uri() -> &'static str { "https://discordapp.com/api/oauth2/authorize" }
fn token_uri() -> &'static str { "https://discordapp.com/api/oauth2/token" }
}
pub const DISCORD_SCOPES: &'static str = "identify email guilds";
pub fn get_client() -> Client<Discord> {
Client::<Discord>::new(
// XXX don't commit these to git that would be very bad
// String::from(""),
// String::from(""),
// Some(String::from(""))
)
}<|fim▁end|> | |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|>__VERSION__="ete2-2.2rev1026"
# -*- coding: utf-8 -*-
# #START_LICENSE###########################################################
#
#
# This file is part of the Environment for Tree Exploration program
# (ETE). http://ete.cgenomics.org
#
# ETE is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ETE is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
# or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
# License for more details.
#
# You should have received a copy of the GNU General Public License
# along with ETE. If not, see <http://www.gnu.org/licenses/>.
#
#
# ABOUT THE ETE PACKAGE
# =====================
#
# ETE is distributed under the GPL copyleft license (2008-2011).
#
# If you make use of ETE in published work, please cite:
#
# Jaime Huerta-Cepas, Joaquin Dopazo and Toni Gabaldon.
# ETE: a python Environment for Tree Exploration. Jaime BMC<|fim▁hole|># Bioinformatics 2010,:24doi:10.1186/1471-2105-11-24
#
# Note that extra references to the specific methods implemented in
# the toolkit are available in the documentation.
#
# More info at http://ete.cgenomics.org
#
#
# #END_LICENSE#############################################################
from clustertree import *
__all__ = clustertree.__all__<|fim▁end|> | |
<|file_name|>build.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 The Exonum Team
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//<|fim▁hole|>// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
use exonum_build::ProtobufGenerator;
fn main() {
ProtobufGenerator::with_mod_name("protobuf_mod.rs")
.with_input_dir("src/proto")
.with_crypto()
.generate();
}<|fim▁end|> | |
<|file_name|>geturi.js<|end_file_name|><|fim▁begin|><|fim▁hole|> var vars = [], hash;
var hashes = window.location.href.slice(window.location.href.indexOf('?') + 1).split('&');
for (var i = 0; i < hashes.length; i++) {
hash = hashes[i].split('=');
vars.push(hash[0]);
vars[hash[0]] = hash[1];
}
return vars;
}<|fim▁end|> | function getUrlVars() { |
<|file_name|>comment_crlf_newline.rs<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | // rustfmt-normalize_comments: true
/* Block comments followed by CRLF newlines should not an extra newline at the end */
/* Something else */ |
<|file_name|>application.js<|end_file_name|><|fim▁begin|>// This is a manifest file that'll be compiled into application.js, which will include all the files
// listed below.
//
// Any JavaScript/Coffee file within this directory, lib/assets/javascripts, vendor/assets/javascripts,
// or any plugin's vendor/assets/javascripts directory can be referenced here using a relative path.
//
// It's not advisable to add code directly here, but if you do, it'll appear at the bottom of the
// compiled file.
//
// Read Sprockets README (https://github.com/rails/sprockets#sprockets-directives) for details<|fim▁hole|>//= require jquery.serializejson
//= require jquery.transit
//= require underscore
//= require moment
//= require backbone
//= require_tree ./utils
//= require maildog
//= require_tree ../templates
//= require_tree ./mixins
//= require_tree ./models
//= require_tree ./collections
//= require_tree ./views
//= require_tree ./routers
//= require_tree .<|fim▁end|> | // about supported directives.
//
//= require jquery
//= require jquery_ujs |
<|file_name|>simple_fun_#3_late_ride.py<|end_file_name|><|fim▁begin|>#Kunal Gautam
#Codewars : @Kunalpod
#Problem name: Simple Fun #3: Late Ride
#Problem level: 7 kyu
def late_ride(n):<|fim▁hole|><|fim▁end|> | return sum([int(x) for x in list(str(n//60)+str(n%60))]) |
<|file_name|>miner.cpp<|end_file_name|><|fim▁begin|>// Copyright (c) 2009-2010 Satoshi Nakamoto
// Copyright (c) 2009-2012 The Bitcoin developers
// Copyright (c) 2013 The NovaCoin developers
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "db.h"
#include "miner.h"
#include "kernel.h"
using namespace std;
//////////////////////////////////////////////////////////////////////////////
//
// BitcoinMiner
//
string strMintMessage = "Info: Mining suspended due to locked wallet.";
string strMintWarning;
extern unsigned int nMinerSleep;
int static FormatHashBlocks(void* pbuffer, unsigned int len)
{
unsigned char* pdata = (unsigned char*)pbuffer;
unsigned int blocks = 1 + ((len + 8) / 64);
unsigned char* pend = pdata + 64 * blocks;
memset(pdata + len, 0, 64 * blocks - len);
pdata[len] = 0x80;
unsigned int bits = len * 8;
pend[-1] = (bits >> 0) & 0xff;
pend[-2] = (bits >> 8) & 0xff;
pend[-3] = (bits >> 16) & 0xff;
pend[-4] = (bits >> 24) & 0xff;
return blocks;
}
static const unsigned int pSHA256InitState[8] =
{0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19};
void SHA256Transform(void* pstate, void* pinput, const void* pinit)
{
SHA256_CTX ctx;
unsigned char data[64];
SHA256_Init(&ctx);
for (int i = 0; i < 16; i++)
((uint32_t*)data)[i] = ByteReverse(((uint32_t*)pinput)[i]);
for (int i = 0; i < 8; i++)
ctx.h[i] = ((uint32_t*)pinit)[i];
SHA256_Update(&ctx, data, sizeof(data));
for (int i = 0; i < 8; i++)
((uint32_t*)pstate)[i] = ctx.h[i];
}
// Some explaining would be appreciated
class COrphan
{
public:
CTransaction* ptx;
set<uint256> setDependsOn;
double dPriority;
double dFeePerKb;
COrphan(CTransaction* ptxIn)
{
ptx = ptxIn;
dPriority = dFeePerKb = 0;
}
void print() const
{<|fim▁hole|> }
};
uint64 nLastBlockTx = 0;
uint64 nLastBlockSize = 0;
int64 nLastCoinStakeSearchInterval = 0;
// We want to sort transactions by priority and fee, so:
typedef boost::tuple<double, double, CTransaction*> TxPriority;
class TxPriorityCompare
{
bool byFee;
public:
TxPriorityCompare(bool _byFee) : byFee(_byFee) { }
bool operator()(const TxPriority& a, const TxPriority& b)
{
if (byFee)
{
if (a.get<1>() == b.get<1>())
return a.get<0>() < b.get<0>();
return a.get<1>() < b.get<1>();
}
else
{
if (a.get<0>() == b.get<0>())
return a.get<1>() < b.get<1>();
return a.get<0>() < b.get<0>();
}
}
};
// CreateNewBlock: create new block (without proof-of-work/proof-of-stake)
CBlock* CreateNewBlock(CWallet* pwallet, bool fProofOfStake)
{
// Create new block
auto_ptr<CBlock> pblock(new CBlock());
if (!pblock.get())
return NULL;
// Create coinbase tx
CTransaction txNew;
txNew.vin.resize(1);
txNew.vin[0].prevout.SetNull();
txNew.vout.resize(1);
if (!fProofOfStake)
{
CReserveKey reservekey(pwallet);
txNew.vout[0].scriptPubKey << reservekey.GetReservedKey() << OP_CHECKSIG;
}
else
txNew.vout[0].SetEmpty();
// Add our coinbase tx as first transaction
pblock->vtx.push_back(txNew);
// Largest block you're willing to create:
unsigned int nBlockMaxSize = GetArg("-blockmaxsize", MAX_BLOCK_SIZE_GEN/2);
// Limit to betweeen 1K and MAX_BLOCK_SIZE-1K for sanity:
nBlockMaxSize = std::max((unsigned int)1000, std::min((unsigned int)(MAX_BLOCK_SIZE-1000), nBlockMaxSize));
// How much of the block should be dedicated to high-priority transactions,
// included regardless of the fees they pay
unsigned int nBlockPrioritySize = GetArg("-blockprioritysize", 11000);
nBlockPrioritySize = std::min(nBlockMaxSize, nBlockPrioritySize);
// Minimum block size you want to create; block will be filled with free transactions
// until there are no more or the block reaches this size:
unsigned int nBlockMinSize = GetArg("-blockminsize", 0);
nBlockMinSize = std::min(nBlockMaxSize, nBlockMinSize);
// Fee-per-kilobyte amount considered the same as "free"
// Be careful setting this: if you set it to zero then
// a transaction spammer can cheaply fill blocks using
// 1-satoshi-fee transactions. It should be set above the real
// cost to you of processing a transaction.
int64 nMinTxFee = MIN_TX_FEE;
if (mapArgs.count("-mintxfee"))
ParseMoney(mapArgs["-mintxfee"], nMinTxFee);
CBlockIndex* pindexPrev = pindexBest;
pblock->nBits = GetNextTargetRequired(pindexPrev, fProofOfStake);
// Collect memory pool transactions into the block
int64 nFees = 0;
{
LOCK2(cs_main, mempool.cs);
CCoinsViewCache view(*pcoinsTip, true);
// Priority order to process transactions
list<COrphan> vOrphan; // list memory doesn't move
map<uint256, vector<COrphan*> > mapDependers;
// This vector will be sorted into a priority queue:
vector<TxPriority> vecPriority;
vecPriority.reserve(mempool.mapTx.size());
for (map<uint256, CTransaction>::iterator mi = mempool.mapTx.begin(); mi != mempool.mapTx.end(); ++mi)
{
CTransaction& tx = (*mi).second;
if (tx.IsCoinBase() || tx.IsCoinStake() || !tx.IsFinal())
continue;
COrphan* porphan = NULL;
double dPriority = 0;
int64 nTotalIn = 0;
bool fMissingInputs = false;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
// Read prev transaction
CCoins coins;
if (!view.GetCoins(txin.prevout.hash, coins))
{
// This should never happen; all transactions in the memory
// pool should connect to either transactions in the chain
// or other transactions in the memory pool.
if (!mempool.mapTx.count(txin.prevout.hash))
{
printf("ERROR: mempool transaction missing input\n");
if (fDebug) assert("mempool transaction missing input" == 0);
fMissingInputs = true;
if (porphan)
vOrphan.pop_back();
break;
}
// Has to wait for dependencies
if (!porphan)
{
// Use list for automatic deletion
vOrphan.push_back(COrphan(&tx));
porphan = &vOrphan.back();
}
mapDependers[txin.prevout.hash].push_back(porphan);
porphan->setDependsOn.insert(txin.prevout.hash);
nTotalIn += mempool.mapTx[txin.prevout.hash].vout[txin.prevout.n].nValue;
continue;
}
int64 nValueIn = coins.vout[txin.prevout.n].nValue;
nTotalIn += nValueIn;
int nConf = pindexPrev->nHeight - coins.nHeight;
dPriority += (double)nValueIn * nConf;
}
if (fMissingInputs) continue;
// Priority is sum(valuein * age) / txsize
unsigned int nTxSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
dPriority /= nTxSize;
// This is a more accurate fee-per-kilobyte than is used by the client code, because the
// client code rounds up the size to the nearest 1K. That's good, because it gives an
// incentive to create smaller transactions.
double dFeePerKb = double(nTotalIn-tx.GetValueOut()) / (double(nTxSize)/1000.0);
if (porphan)
{
porphan->dPriority = dPriority;
porphan->dFeePerKb = dFeePerKb;
}
else
vecPriority.push_back(TxPriority(dPriority, dFeePerKb, &(*mi).second));
}
// Collect transactions into block
uint64 nBlockSize = 1000;
uint64 nBlockTx = 0;
int nBlockSigOps = 100;
bool fSortedByFee = (nBlockPrioritySize <= 0);
TxPriorityCompare comparer(fSortedByFee);
std::make_heap(vecPriority.begin(), vecPriority.end(), comparer);
while (!vecPriority.empty()) {
unsigned int nAdjTime = GetAdjustedTime();
// Take highest priority transaction off the priority queue:
double dPriority = vecPriority.front().get<0>();
double dFeePerKb = vecPriority.front().get<1>();
CTransaction& tx = *(vecPriority.front().get<2>());
std::pop_heap(vecPriority.begin(), vecPriority.end(), comparer);
vecPriority.pop_back();
// second layer cached modifications just for this transaction
CCoinsViewCache viewTemp(view, true);
// Size limits
unsigned int nTxSize = ::GetSerializeSize(tx, SER_NETWORK, PROTOCOL_VERSION);
if (nBlockSize + nTxSize >= nBlockMaxSize)
continue;
// Legacy limits on sigOps:
unsigned int nTxSigOps = tx.GetLegacySigOpCount();
if (nBlockSigOps + nTxSigOps >= MAX_BLOCK_SIGOPS)
continue;
// Timestamp limit
if ((tx.nTime > nAdjTime) || (fProofOfStake && tx.nTime > pblock->vtx[0].nTime))
continue;
// Simplify transaction fee - allow free = false
int64 nMinFee = tx.GetMinFee(nBlockSize, false, GMF_BLOCK);
// Skip free transactions if we're past the minimum block size:
if (fSortedByFee && (dFeePerKb < nMinTxFee) && (nBlockSize + nTxSize >= nBlockMinSize))
continue;
// Prioritize by fee once past the priority size or we run out of high-priority
// transactions:
if (!fSortedByFee &&
((nBlockSize + nTxSize >= nBlockPrioritySize) || (dPriority < COIN * 144 / 250)))
{
fSortedByFee = true;
comparer = TxPriorityCompare(fSortedByFee);
std::make_heap(vecPriority.begin(), vecPriority.end(), comparer);
}
if (!tx.CheckInputs(viewTemp, CS_ALWAYS, true, false))
continue;
int64 nTxFees = tx.GetValueIn(viewTemp)-tx.GetValueOut();
if (nTxFees < nMinFee)
continue;
nTxSigOps += tx.GetP2SHSigOpCount(viewTemp);
if (nBlockSigOps + nTxSigOps >= MAX_BLOCK_SIGOPS)
continue;
/*
* We need to call UpdateCoins using actual block timestamp, so don't perform this here.
*
CTxUndo txundo;
if (!tx.UpdateCoins(viewTemp, txundo, pindexPrev->nHeight+1, pblock->nTime))
continue;
*/
// push changes from the second layer cache to the first one
viewTemp.Flush();
uint256 hash = tx.GetHash();
// Added
pblock->vtx.push_back(tx);
nBlockSize += nTxSize;
++nBlockTx;
nBlockSigOps += nTxSigOps;
nFees += nTxFees;
if (fDebug && GetBoolArg("-printpriority"))
{
printf("priority %.1f feeperkb %.1f txid %s\n",
dPriority, dFeePerKb, tx.GetHash().ToString().c_str());
}
// Add transactions that depend on this one to the priority queue
if (mapDependers.count(hash))
{
BOOST_FOREACH(COrphan* porphan, mapDependers[hash])
{
if (!porphan->setDependsOn.empty())
{
porphan->setDependsOn.erase(hash);
if (porphan->setDependsOn.empty())
{
vecPriority.push_back(TxPriority(porphan->dPriority, porphan->dFeePerKb, porphan->ptx));
std::push_heap(vecPriority.begin(), vecPriority.end(), comparer);
}
}
}
}
}
nLastBlockTx = nBlockTx;
nLastBlockSize = nBlockSize;
if (fDebug && GetBoolArg("-printpriority"))
printf("CreateNewBlock(): total size %"PRI64u"\n", nBlockSize);
if (!fProofOfStake)
pblock->vtx[0].vout[0].nValue = GetProofOfWorkReward(pindexPrev->nHeight, nFees);
// Fill in header
pblock->hashPrevBlock = pindexPrev->GetBlockHash();
pblock->nTime = max(pindexPrev->GetMedianTimePast()+1, pblock->GetMaxTransactionTime());
pblock->nTime = max(pblock->GetBlockTime(), PastDrift(pindexPrev->GetBlockTime()));
if (!fProofOfStake)
pblock->UpdateTime(pindexPrev);
pblock->nNonce = 0;
}
return pblock.release();
}
void IncrementExtraNonce(CBlock* pblock, CBlockIndex* pindexPrev, unsigned int& nExtraNonce)
{
// Update nExtraNonce
static uint256 hashPrevBlock;
if (hashPrevBlock != pblock->hashPrevBlock)
{
nExtraNonce = 0;
hashPrevBlock = pblock->hashPrevBlock;
}
++nExtraNonce;
unsigned int nHeight = pindexPrev->nHeight+1; // Height first in coinbase required
pblock->vtx[0].vin[0].scriptSig = (CScript() << nHeight << CBigNum(nExtraNonce)) + COINBASE_FLAGS;
assert(pblock->vtx[0].vin[0].scriptSig.size() <= 100);
pblock->hashMerkleRoot = pblock->BuildMerkleTree();
}
void FormatHashBuffers(CBlock* pblock, char* pmidstate, char* pdata, char* phash1)
{
//
// Pre-build hash buffers
//
struct
{
struct unnamed2
{
int nVersion;
uint256 hashPrevBlock;
uint256 hashMerkleRoot;
unsigned int nTime;
unsigned int nBits;
unsigned int nNonce;
}
block;
unsigned char pchPadding0[64];
uint256 hash1;
unsigned char pchPadding1[64];
}
tmp;
memset(&tmp, 0, sizeof(tmp));
tmp.block.nVersion = pblock->nVersion;
tmp.block.hashPrevBlock = pblock->hashPrevBlock;
tmp.block.hashMerkleRoot = pblock->hashMerkleRoot;
tmp.block.nTime = pblock->nTime;
tmp.block.nBits = pblock->nBits;
tmp.block.nNonce = pblock->nNonce;
FormatHashBlocks(&tmp.block, sizeof(tmp.block));
FormatHashBlocks(&tmp.hash1, sizeof(tmp.hash1));
// Byte swap all the input buffer
for (unsigned int i = 0; i < sizeof(tmp)/4; i++)
((unsigned int*)&tmp)[i] = ByteReverse(((unsigned int*)&tmp)[i]);
// Precalc the first half of the first hash, which stays constant
SHA256Transform(pmidstate, &tmp.block, pSHA256InitState);
memcpy(pdata, &tmp.block, 128);
memcpy(phash1, &tmp.hash1, 64);
}
bool CheckWork(CBlock* pblock, CWallet& wallet, CReserveKey& reservekey)
{
uint256 hashBlock = pblock->GetHash();
uint256 hashTarget = CBigNum().SetCompact(pblock->nBits).getuint256();
if(!pblock->IsProofOfWork())
return error("CheckWork() : %s is not a proof-of-work block", hashBlock.GetHex().c_str());
if (hashBlock > hashTarget)
return error("CheckWork() : proof-of-work not meeting target");
//// debug print
printf("CheckWork() : new proof-of-work block found \n hash: %s \ntarget: %s\n", hashBlock.GetHex().c_str(), hashTarget.GetHex().c_str());
pblock->print();
printf("generated %s\n", FormatMoney(pblock->vtx[0].vout[0].nValue).c_str());
// Found a solution
{
LOCK(cs_main);
if (pblock->hashPrevBlock != hashBestChain)
return error("CheckWork() : generated block is stale");
// Remove key from key pool
reservekey.KeepKey();
// Track how many getdata requests this block gets
{
LOCK(wallet.cs_wallet);
wallet.mapRequestCount[hashBlock] = 0;
}
// Process this block the same as if we had received it from another node
if (!ProcessBlock(NULL, pblock))
return error("CheckWork() : ProcessBlock, block not accepted");
}
return true;
}
bool CheckStake(CBlock* pblock, CWallet& wallet)
{
uint256 proofHash = 0, hashTarget = 0;
uint256 hashBlock = pblock->GetHash();
bool fFatal = false;
if(!pblock->IsProofOfStake())
return error("CheckStake() : %s is not a proof-of-stake block", hashBlock.GetHex().c_str());
// verify hash target and signature of coinstake tx
if (!CheckProofOfStake(pblock->vtx[1], pblock->nBits, proofHash, hashTarget, fFatal, true))
return error("CheckStake() : proof-of-stake checking failed");
//// debug print
printf("CheckStake() : new proof-of-stake block found \n hash: %s \nproofhash: %s \ntarget: %s\n", hashBlock.GetHex().c_str(), proofHash.GetHex().c_str(), hashTarget.GetHex().c_str());
pblock->print();
printf("out %s\n", FormatMoney(pblock->vtx[1].GetValueOut()).c_str());
// Found a solution
{
LOCK(cs_main);
if (pblock->hashPrevBlock != hashBestChain)
return error("CheckStake() : generated block is stale");
// Track how many getdata requests this block gets
{
LOCK(wallet.cs_wallet);
wallet.mapRequestCount[hashBlock] = 0;
}
// Process this block the same as if we had received it from another node
if (!ProcessBlock(NULL, pblock))
return error("CheckStake() : ProcessBlock, block not accepted");
}
return true;
}
void StakeMiner(CWallet *pwallet)
{
SetThreadPriority(THREAD_PRIORITY_LOWEST);
// Make this thread recognisable as the mining thread
RenameThread("novacoin-miner");
// Each thread has its own counter
unsigned int nExtraNonce = 0;
while (true)
{
if (fShutdown)
return;
while (pwallet->IsLocked())
{
strMintWarning = strMintMessage;
Sleep(1000);
if (fShutdown)
return;
}
while (vNodes.empty() || IsInitialBlockDownload())
{
Sleep(1000);
if (fShutdown)
return;
}
strMintWarning = "";
//
// Create new block
//
CBlockIndex* pindexPrev = pindexBest;
auto_ptr<CBlock> pblock(CreateNewBlock(pwallet, true));
if (!pblock.get())
return;
IncrementExtraNonce(pblock.get(), pindexPrev, nExtraNonce);
// Trying to sign a block
if (pblock->SignBlock(*pwallet))
{
strMintWarning = _("Stake generation: new block found!");
SetThreadPriority(THREAD_PRIORITY_NORMAL);
CheckStake(pblock.get(), *pwallet);
SetThreadPriority(THREAD_PRIORITY_LOWEST);
Sleep(1000);
}
else
Sleep(nMinerSleep);
}
}<|fim▁end|> | printf("COrphan(hash=%s, dPriority=%.1f, dFeePerKb=%.1f)\n",
ptx->GetHash().ToString().substr(0,10).c_str(), dPriority, dFeePerKb);
BOOST_FOREACH(uint256 hash, setDependsOn)
printf(" setDependsOn %s\n", hash.ToString().substr(0,10).c_str()); |
<|file_name|>cliffWalk_QL.py<|end_file_name|><|fim▁begin|># solve cliff-walking task with Q-Learning, very similar to SARSA
# original example problem from the book, introduction for reinforcement learning
# Author: Wenbin Li
# numeric backend
import pygame
from pygame.locals import *
import numpy as np
grid_size = 100
n_row = 4
n_col = 12
state = np.zeros((n_row * grid_size, n_col * grid_size))
step_size = 0.5
epsilon = 0.1 # parameter for epislon-greedy
N_actions = 4 # number of actions {left,up,right,down}
N_episodes = 600 # number of episodes
# as suggested by the book, reach optimality by 8000 time steps
# rewards of -1 until the goal state is reached
# -100 for entering cliff region and instantly return to starting position
# specify goal location
goal_r = 3
goal_c = 11
# specify start location
start_r = 3
start_c = 0
# initialize state-action value function
q = np.zeros((n_row,n_col,N_actions)) # num_row by num_col by num_states
# Note: Q(terminal-state,.) = 0
# undiscounted and episodic task
n_steps = 0
n_episodes = 0
# epsilon-greedy strategy
def ep_greedy(epsilon,num_actions,q,i,j):
roll = np.random.uniform(0,1)
# epsilon-greedy strategy
if roll < epsilon: # exploration
a = np.random.randint(0,num_actions)
else: # exploitation
a = np.argmax(q[i,j,:])
return a
# translate action into state-change
def action2state(i,j,a):
# Note: coordintate system start from the upper-left corner and
# right/downwards are the positive direction
if a == 0: # to left
i_next = i
j_next = j - 1
elif a == 1: # upwards
i_next = i - 1
j_next = j
elif a == 2: # to right
i_next = i
j_next = j + 1
else: # downwards
i_next = i + 1
j_next = j
return i_next,j_next
# Sarsa method
while n_episodes < N_episodes:
# begin of an episode
i = start_r
j = start_c
# end of an episode
n_episodes += 1
print "episode ",str(n_episodes),"..."
while True:
n_steps += 1
# print " step ",str(n_steps),"..."
# choose A from S using policy derived from Q (epsilon-greedy)
a = ep_greedy(epsilon,N_actions,q,i,j)
# translate action into state-change with windy effect
i_next,j_next = action2state(i,j,a)
# update the state-action value function with Sarsa/Q-Learning of choice
# state transitions end in the goal state
# state should be in the range of the gridworld
if i_next == goal_r and j_next == goal_c: # reach the goal position
# q[i,j] = q[i,j] + step_size * (-1 + 0 - q[i,j]) #the Q(terminal,.) = 0
q[i,j,a] = q[i,j,a] + step_size * (-1 + 0 - q[i,j,a]) #the Q(terminal,.) = 0
# Note, transition from noterminal to terminal also gets reward of -1 in this case
break
# different reward/consequence when entering the cliff region
elif i_next == 3 and j_next > 1 and j_next < n_col - 1:
i_next = start_r
j_next = start_c
r = -100
elif i_next < 0 or i_next > n_row -1:
i_next = i
r = -1
elif j_next < 0 or j_next > n_col - 1:
j_next = j
r = -1
else:
r = -1
# a_next = ep_greedy(epsilon,N_actions,q,i_next,j_next)
q[i,j,a] = q[i,j,a] + step_size * (r + max(q[i_next,j_next,:]) - q[i,j,a])
i = i_next
j = j_next
# visualize the solution/GUI-backend
# plot the gridworld as background
# (optional) mark wind direction
pygame.init()<|fim▁hole|>pygame.display.set_mode((n_col * grid_size,n_row * grid_size))
pygame.display.set_caption('Cliff Walking')
screen = pygame.display.get_surface()
surface = pygame.Surface(screen.get_size())
bg = pygame.Surface(screen.get_size())
# draw background, with mark on start/end states & cliff region
def draw_bg(surface,n_row,n_col,grid_size,start_r,start_c,goal_r,goal_c):
for i in range(n_col):
for j in range(n_row):
x = i * grid_size
y = j * grid_size
coords = pygame.Rect(x,y,grid_size,grid_size)
pygame.draw.rect(surface,(255,255,255),coords,1)
# draw start state
pygame.draw.circle(surface,(192,192,192),(start_c * grid_size + grid_size/2,
start_r * grid_size + grid_size/2),grid_size/4)
# draw goal state
pygame.draw.circle(surface,(102,204,0),(goal_c * grid_size + grid_size/2,
goal_r * grid_size + grid_size/2),grid_size/4)
# draw cliff region
x = 1 * grid_size
y = 3 * grid_size
coords = pygame.Rect(x,y,grid_size*10,grid_size)
pygame.draw.rect(surface,(192,192,192),coords)
# use state-action function to find one-step optimal policy
def step_q(q,s_r,s_c,n_row,n_col):
print "state-action value:"
print q[s_r,s_c,:]
a = np.argmax(q[s_r,s_c,:]) # greedy only
# display debug
if a == 0:
print "move left"
elif a == 1:
print "move upward"
elif a == 2:
print "move right"
else:
print "move downwards"
s_r_next,s_c_next = action2state(s_r,s_c,a)
# define rules especially when the agent enter the cliff region
if s_r_next == 3 and s_c_next > 1 and s_c_next < n_col - 1:
s_r_next = start_r
s_c_next = start_c
# in theory, the produced optimal policy should not enter this branch
elif s_r_next < 0 or s_r_next > n_row -1:
s_r_next = s_r
elif s_c_next < 0 or s_c_next > n_col - 1:
s_c_next = s_c
return s_r_next,s_c_next
s_r = start_r
s_c = start_c
while True:
for event in pygame.event.get():
if event.type == pygame.QUIT:
pygame.quit()
# draw gridworld background
draw_bg(bg,n_row,n_col,grid_size,start_r,start_c,goal_r,goal_c)
screen.blit(bg,(0,0))
# draw the state of the agent, i.e. the path (start --> end) as the foreground
surface.fill((0,0,0))
# use state-action function to find a optimal policy
# in the loop, should provide a step function
#print (s_r,s_c)
s_r_next,s_c_next = step_q(q,s_r,s_c,n_row,n_col)
#print (s_r_next,s_c_next)
if s_r_next != goal_r or s_c_next != goal_c:
pygame.draw.circle(surface,(255,255,255),(s_c_next * grid_size + grid_size/2,
s_r_next * grid_size + grid_size/2),grid_size/4)
bg.blit(surface,(0,0))
pygame.display.flip() # update
pygame.time.delay(1000)
s_r,s_c = s_r_next,s_c_next # update coordinate<|fim▁end|> | |
<|file_name|>spsc_queue.rs<|end_file_name|><|fim▁begin|>// Copyright 2017 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! A single-producer single-consumer concurrent queue
//!
//! This module contains the implementation of an SPSC queue which can be used
//! concurrently between two threads. This data structure is safe to use and
//! enforces the semantics that there is one pusher and one popper.
// http://www.1024cores.net/home/lock-free-algorithms/queues/unbounded-spsc-queue
use alloc::boxed::Box;
use core::ptr;
use core::cell::UnsafeCell;
use sync::atomic::{AtomicPtr, AtomicUsize, Ordering};
use super::cache_aligned::CacheAligned;
// Node within the linked list queue of messages to send
struct Node<T> {
// FIXME: this could be an uninitialized T if we're careful enough, and
// that would reduce memory usage (and be a bit faster).
// is it worth it?
value: Option<T>, // nullable for re-use of nodes
cached: bool, // This node goes into the node cache
next: AtomicPtr<Node<T>>, // next node in the queue
}
/// The single-producer single-consumer queue. This structure is not cloneable,
/// but it can be safely shared in an Arc if it is guaranteed that there
/// is only one popper and one pusher touching the queue at any one point in
/// time.
pub struct Queue<T, ProducerAddition=(), ConsumerAddition=()> {
// consumer fields
consumer: CacheAligned<Consumer<T, ConsumerAddition>>,
// producer fields
producer: CacheAligned<Producer<T, ProducerAddition>>,
}
struct Consumer<T, Addition> {
tail: UnsafeCell<*mut Node<T>>, // where to pop from
tail_prev: AtomicPtr<Node<T>>, // where to pop from
cache_bound: usize, // maximum cache size
cached_nodes: AtomicUsize, // number of nodes marked as cachable
addition: Addition,
}
struct Producer<T, Addition> {
head: UnsafeCell<*mut Node<T>>, // where to push to
first: UnsafeCell<*mut Node<T>>, // where to get new nodes from
tail_copy: UnsafeCell<*mut Node<T>>, // between first/tail
addition: Addition,
}
unsafe impl<T: Send, P: Send + Sync, C: Send + Sync> Send for Queue<T, P, C> { }
unsafe impl<T: Send, P: Send + Sync, C: Send + Sync> Sync for Queue<T, P, C> { }
impl<T> Node<T> {
fn new() -> *mut Node<T> {
Box::into_raw(box Node {
value: None,
cached: false,
next: AtomicPtr::new(ptr::null_mut::<Node<T>>()),
})
}
}
impl<T, ProducerAddition, ConsumerAddition> Queue<T, ProducerAddition, ConsumerAddition> {
/// Creates a new queue. With given additional elements in the producer and
/// consumer portions of the queue.
///
/// Due to the performance implications of cache-contention,
/// we wish to keep fields used mainly by the producer on a separate cache
/// line than those used by the consumer.
/// Since cache lines are usually 64 bytes, it is unreasonably expensive to
/// allocate one for small fields, so we allow users to insert additional
/// fields into the cache lines already allocated by this for the producer
/// and consumer.
///
/// This is unsafe as the type system doesn't enforce a single
/// consumer-producer relationship. It also allows the consumer to `pop`
/// items while there is a `peek` active due to all methods having a
/// non-mutable receiver.
///
/// # Arguments
///
/// * `bound` - This queue implementation is implemented with a linked
/// list, and this means that a push is always a malloc. In
/// order to amortize this cost, an internal cache of nodes is
/// maintained to prevent a malloc from always being
/// necessary. This bound is the limit on the size of the
/// cache (if desired). If the value is 0, then the cache has
/// no bound. Otherwise, the cache will never grow larger than
/// `bound` (although the queue itself could be much larger.
pub unsafe fn with_additions(
bound: usize,
producer_addition: ProducerAddition,
consumer_addition: ConsumerAddition,
) -> Self {
let n1 = Node::new();
let n2 = Node::new();
(*n1).next.store(n2, Ordering::Relaxed);
Queue {
consumer: CacheAligned::new(Consumer {
tail: UnsafeCell::new(n2),
tail_prev: AtomicPtr::new(n1),
cache_bound: bound,
cached_nodes: AtomicUsize::new(0),
addition: consumer_addition
}),
producer: CacheAligned::new(Producer {
head: UnsafeCell::new(n2),
first: UnsafeCell::new(n1),
tail_copy: UnsafeCell::new(n1),
addition: producer_addition
}),
}
}
/// Pushes a new value onto this queue. Note that to use this function
/// safely, it must be externally guaranteed that there is only one pusher.
pub fn push(&self, t: T) {
unsafe {
// Acquire a node (which either uses a cached one or allocates a new
// one), and then append this to the 'head' node.
let n = self.alloc();
assert!((*n).value.is_none());
(*n).value = Some(t);
(*n).next.store(ptr::null_mut(), Ordering::Relaxed);
(**self.producer.head.get()).next.store(n, Ordering::Release);
*(&self.producer.head).get() = n;
}
}
unsafe fn alloc(&self) -> *mut Node<T> {
// First try to see if we can consume the 'first' node for our uses.
if *self.producer.first.get() != *self.producer.tail_copy.get() {
let ret = *self.producer.first.get();
*self.producer.0.first.get() = (*ret).next.load(Ordering::Relaxed);
return ret;
}
// If the above fails, then update our copy of the tail and try
// again.
*self.producer.0.tail_copy.get() =
self.consumer.tail_prev.load(Ordering::Acquire);
if *self.producer.first.get() != *self.producer.tail_copy.get() {
let ret = *self.producer.first.get();
*self.producer.0.first.get() = (*ret).next.load(Ordering::Relaxed);
return ret;
}
// If all of that fails, then we have to allocate a new node
// (there's nothing in the node cache).
Node::new()
}
/// Attempts to pop a value from this queue. Remember that to use this type
/// safely you must ensure that there is only one popper at a time.
pub fn pop(&self) -> Option<T> {
unsafe {
// The `tail` node is not actually a used node, but rather a
// sentinel from where we should start popping from. Hence, look at
// tail's next field and see if we can use it. If we do a pop, then
// the current tail node is a candidate for going into the cache.
let tail = *self.consumer.tail.get();
let next = (*tail).next.load(Ordering::Acquire);
if next.is_null() { return None }
assert!((*next).value.is_some());
let ret = (*next).value.take();
*self.consumer.0.tail.get() = next;
if self.consumer.cache_bound == 0 {
self.consumer.tail_prev.store(tail, Ordering::Release);
} else {
let cached_nodes = self.consumer.cached_nodes.load(Ordering::Relaxed);
if cached_nodes < self.consumer.cache_bound && !(*tail).cached {
self.consumer.cached_nodes.store(cached_nodes, Ordering::Relaxed);
(*tail).cached = true;
}<|fim▁hole|> } else {
(*self.consumer.tail_prev.load(Ordering::Relaxed))
.next.store(next, Ordering::Relaxed);
// We have successfully erased all references to 'tail', so
// now we can safely drop it.
let _: Box<Node<T>> = Box::from_raw(tail);
}
}
ret
}
}
/// Attempts to peek at the head of the queue, returning `None` if the queue
/// has no data currently
///
/// # Warning
/// The reference returned is invalid if it is not used before the consumer
/// pops the value off the queue. If the producer then pushes another value
/// onto the queue, it will overwrite the value pointed to by the reference.
pub fn peek(&self) -> Option<&mut T> {
// This is essentially the same as above with all the popping bits
// stripped out.
unsafe {
let tail = *self.consumer.tail.get();
let next = (*tail).next.load(Ordering::Acquire);
if next.is_null() { None } else { (*next).value.as_mut() }
}
}
pub fn producer_addition(&self) -> &ProducerAddition {
&self.producer.addition
}
pub fn consumer_addition(&self) -> &ConsumerAddition {
&self.consumer.addition
}
}
impl<T, ProducerAddition, ConsumerAddition> Drop for Queue<T, ProducerAddition, ConsumerAddition> {
fn drop(&mut self) {
unsafe {
let mut cur = *self.producer.first.get();
while !cur.is_null() {
let next = (*cur).next.load(Ordering::Relaxed);
let _n: Box<Node<T>> = Box::from_raw(cur);
cur = next;
}
}
}
}
#[cfg(all(test, not(target_os = "emscripten")))]
mod tests {
use sync::Arc;
use super::Queue;
use thread;
use sync::mpsc::channel;
#[test]
fn smoke() {
unsafe {
let queue = Queue::with_additions(0, (), ());
queue.push(1);
queue.push(2);
assert_eq!(queue.pop(), Some(1));
assert_eq!(queue.pop(), Some(2));
assert_eq!(queue.pop(), None);
queue.push(3);
queue.push(4);
assert_eq!(queue.pop(), Some(3));
assert_eq!(queue.pop(), Some(4));
assert_eq!(queue.pop(), None);
}
}
#[test]
fn peek() {
unsafe {
let queue = Queue::with_additions(0, (), ());
queue.push(vec![1]);
// Ensure the borrowchecker works
match queue.peek() {
Some(vec) => {
assert_eq!(&*vec, &[1]);
},
None => unreachable!()
}
match queue.pop() {
Some(vec) => {
assert_eq!(&*vec, &[1]);
},
None => unreachable!()
}
}
}
#[test]
fn drop_full() {
unsafe {
let q: Queue<Box<_>> = Queue::with_additions(0, (), ());
q.push(box 1);
q.push(box 2);
}
}
#[test]
fn smoke_bound() {
unsafe {
let q = Queue::with_additions(0, (), ());
q.push(1);
q.push(2);
assert_eq!(q.pop(), Some(1));
assert_eq!(q.pop(), Some(2));
assert_eq!(q.pop(), None);
q.push(3);
q.push(4);
assert_eq!(q.pop(), Some(3));
assert_eq!(q.pop(), Some(4));
assert_eq!(q.pop(), None);
}
}
#[test]
fn stress() {
unsafe {
stress_bound(0);
stress_bound(1);
}
unsafe fn stress_bound(bound: usize) {
let q = Arc::new(Queue::with_additions(bound, (), ()));
let (tx, rx) = channel();
let q2 = q.clone();
let _t = thread::spawn(move|| {
for _ in 0..100000 {
loop {
match q2.pop() {
Some(1) => break,
Some(_) => panic!(),
None => {}
}
}
}
tx.send(()).unwrap();
});
for _ in 0..100000 {
q.push(1);
}
rx.recv().unwrap();
}
}
}<|fim▁end|> |
if (*tail).cached {
self.consumer.tail_prev.store(tail, Ordering::Release); |
<|file_name|>config_flow.py<|end_file_name|><|fim▁begin|>"""Config flow for Google Maps Travel Time integration."""
from __future__ import annotations
import logging
import voluptuous as vol
from homeassistant import config_entries
from homeassistant.const import CONF_API_KEY, CONF_MODE, CONF_NAME
from homeassistant.core import callback
import homeassistant.helpers.config_validation as cv
from .const import (
ALL_LANGUAGES,
ARRIVAL_TIME,
AVOID,
CONF_ARRIVAL_TIME,
CONF_AVOID,
CONF_DEPARTURE_TIME,
CONF_DESTINATION,
CONF_LANGUAGE,
CONF_ORIGIN,
CONF_TIME,
CONF_TIME_TYPE,
CONF_TRAFFIC_MODEL,
CONF_TRANSIT_MODE,
CONF_TRANSIT_ROUTING_PREFERENCE,
CONF_UNITS,
DEFAULT_NAME,
DEPARTURE_TIME,
DOMAIN,
TIME_TYPES,
TRANSIT_PREFS,
TRANSPORT_TYPE,
TRAVEL_MODE,
TRAVEL_MODEL,
UNITS,
)
from .helpers import is_valid_config_entry
_LOGGER = logging.getLogger(__name__)
class GoogleOptionsFlow(config_entries.OptionsFlow):
"""Handle an options flow for Google Travel Time."""
def __init__(self, config_entry: config_entries.ConfigEntry) -> None:
"""Initialize google options flow."""
self.config_entry = config_entry
async def async_step_init(self, user_input=None):
"""Handle the initial step."""
if user_input is not None:
time_type = user_input.pop(CONF_TIME_TYPE)
if time := user_input.pop(CONF_TIME, None):
if time_type == ARRIVAL_TIME:
user_input[CONF_ARRIVAL_TIME] = time
else:
user_input[CONF_DEPARTURE_TIME] = time
return self.async_create_entry(
title="",
data={k: v for k, v in user_input.items() if v not in (None, "")},
)
if CONF_ARRIVAL_TIME in self.config_entry.options:
default_time_type = ARRIVAL_TIME
default_time = self.config_entry.options[CONF_ARRIVAL_TIME]
else:
default_time_type = DEPARTURE_TIME
default_time = self.config_entry.options.get(CONF_ARRIVAL_TIME, "")
return self.async_show_form(
step_id="init",
data_schema=vol.Schema(
{
vol.Optional(
CONF_MODE, default=self.config_entry.options[CONF_MODE]
): vol.In(TRAVEL_MODE),
vol.Optional(
CONF_LANGUAGE,
default=self.config_entry.options.get(CONF_LANGUAGE),
): vol.In([None, *ALL_LANGUAGES]),
vol.Optional(
CONF_AVOID, default=self.config_entry.options.get(CONF_AVOID)
): vol.In([None, *AVOID]),
vol.Optional(
CONF_UNITS, default=self.config_entry.options[CONF_UNITS]
): vol.In(UNITS),
vol.Optional(CONF_TIME_TYPE, default=default_time_type): vol.In(
TIME_TYPES
),
vol.Optional(CONF_TIME, default=default_time): cv.string,
vol.Optional(
CONF_TRAFFIC_MODEL,
default=self.config_entry.options.get(CONF_TRAFFIC_MODEL),
): vol.In([None, *TRAVEL_MODEL]),
vol.Optional(
CONF_TRANSIT_MODE,
default=self.config_entry.options.get(CONF_TRANSIT_MODE),
): vol.In([None, *TRANSPORT_TYPE]),
vol.Optional(
CONF_TRANSIT_ROUTING_PREFERENCE,
default=self.config_entry.options.get(
CONF_TRANSIT_ROUTING_PREFERENCE
),
): vol.In([None, *TRANSIT_PREFS]),
}
),
)
class ConfigFlow(config_entries.ConfigFlow, domain=DOMAIN):
"""Handle a config flow for Google Maps Travel Time."""
VERSION = 1
@staticmethod
@callback
def async_get_options_flow(
config_entry: config_entries.ConfigEntry,
) -> GoogleOptionsFlow:
"""Get the options flow for this handler."""
return GoogleOptionsFlow(config_entry)
async def async_step_user(self, user_input=None):
"""Handle the initial step."""
errors = {}
user_input = user_input or {}
if user_input:
if await self.hass.async_add_executor_job(
is_valid_config_entry,
self.hass,
_LOGGER,
user_input[CONF_API_KEY],
user_input[CONF_ORIGIN],
user_input[CONF_DESTINATION],
):<|fim▁hole|> return self.async_create_entry(
title=user_input.get(CONF_NAME, DEFAULT_NAME),
data=user_input,
)
# If we get here, it's because we couldn't connect
errors["base"] = "cannot_connect"
return self.async_show_form(
step_id="user",
data_schema=vol.Schema(
{
vol.Required(
CONF_NAME, default=user_input.get(CONF_NAME, DEFAULT_NAME)
): cv.string,
vol.Required(CONF_API_KEY): cv.string,
vol.Required(CONF_DESTINATION): cv.string,
vol.Required(CONF_ORIGIN): cv.string,
}
),
errors=errors,
)<|fim▁end|> | |
<|file_name|>005reset.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import atexit
import argparse
import getpass
import sys
import textwrap
import time
from pyVim import connect
from pyVmomi import vim
import requests
requests.packages.urllib3.disable_warnings()
import ssl
try:
_create_unverified_https_context = ssl._create_unverified_context
except AttributeError:
# Legacy Python that doesn't verify HTTPS certificates by default
pass
else:
# Handle target environment that doesn't support HTTPS verification
ssl._create_default_https_context = _create_unverified_https_context
def get_args():
parser = argparse.ArgumentParser()
# because -h is reserved for 'help' we use -s for service
parser.add_argument('-s', '--host',
required=True,
action='store',
help='vSphere service to connect to')
# because we want -p for password, we use -o for port
parser.add_argument('-o', '--port',
type=int,
default=443,
action='store',
help='Port to connect on')
parser.add_argument('-u', '--user',
required=True,
action='store',
help='User name to use when connecting to host')
parser.add_argument('-p', '--password',
required=False,
action='store',
help='Password to use when connecting to host')
parser.add_argument('-n', '--name',
required=True,
action='store',
help='Name of the virtual_machine to look for.')
args = parser.parse_args()
if not args.password:
args.password = getpass.getpass(
prompt='Enter password for host %s and user %s: ' %<|fim▁hole|>
def _create_char_spinner():
"""Creates a generator yielding a char based spinner.
"""
while True:
for c in '|/-\\':
yield c
_spinner = _create_char_spinner()
def spinner(label=''):
"""Prints label with a spinner.
When called repeatedly from inside a loop this prints
a one line CLI spinner.
"""
sys.stdout.write("\r\t%s %s" % (label, _spinner.next()))
sys.stdout.flush()
def answer_vm_question(virtual_machine):
print "\n"
choices = virtual_machine.runtime.question.choice.choiceInfo
default_option = None
if virtual_machine.runtime.question.choice.defaultIndex is not None:
ii = virtual_machine.runtime.question.choice.defaultIndex
default_option = choices[ii]
choice = None
while choice not in [o.key for o in choices]:
print "VM power on is paused by this question:\n\n"
print "\n".join(textwrap.wrap(
virtual_machine.runtime.question.text, 60))
for option in choices:
print "\t %s: %s " % (option.key, option.label)
if default_option is not None:
print "default (%s): %s\n" % (default_option.label,
default_option.key)
choice = raw_input("\nchoice number: ").strip()
print "..."
return choice
# form a connection...
args = get_args()
si = connect.SmartConnect(host=args.host, user=args.user, pwd=args.password,
port=args.port)
# doing this means you don't need to remember to disconnect your script/objects
atexit.register(connect.Disconnect, si)
# search the whole inventory tree recursively... a brutish but effective tactic
vm = None
entity_stack = si.content.rootFolder.childEntity
while entity_stack:
entity = entity_stack.pop()
if entity.name == args.name:
vm = entity
del entity_stack[0:len(entity_stack)]
elif hasattr(entity, 'childEntity'):
entity_stack.extend(entity.childEntity)
elif isinstance(entity, vim.Datacenter):
entity_stack.append(entity.vmFolder)
if not isinstance(vm, vim.VirtualMachine):
print "could not find a virtual machine with the name %s" % args.name
sys.exit(-1)
print "Found VirtualMachine: %s Name: %s" % (vm, vm.name)
if vm.runtime.powerState == vim.VirtualMachinePowerState.poweredOn:
# using time.sleep we just wait until the power off action
# is complete. Nothing fancy here.
print "reset the vm"
task = vm.ResetVM_Task()
while task.info.state not in [vim.TaskInfo.State.success,
vim.TaskInfo.State.error]:
time.sleep(1)
print "resetting vm ..."
sys.exit(0)<|fim▁end|> | (args.host, args.user))
return args |
<|file_name|>main.go<|end_file_name|><|fim▁begin|>package main
import (
"fmt"
"os"
"os/signal"
"syscall"
"github.com/codegangsta/cli"
"github.com/yudai/gotty/app"
)
func main() {
cmd := cli.NewApp()
cmd.Version = "0.0.9"
cmd.Name = "gotty"
cmd.Usage = "Share your terminal as a web application"
cmd.HideHelp = true
flags := []flag{
flag{"address", "a", "IP address to listen"},
flag{"port", "p", "Port number to listen"},
flag{"permit-write", "w", "Permit clients to write to the TTY (BE CAREFUL)"},
flag{"credential", "c", "Credential for Basic Authentication (ex: user:pass, default disabled)"},
flag{"random-url", "r", "Add a random string to the URL"},
flag{"random-url-length", "", "Random URL length"},
flag{"tls", "t", "Enable TLS/SSL"},
flag{"tls-crt", "", "TLS/SSL crt file path"},
flag{"tls-key", "", "TLS/SSL key file path"},
flag{"index", "", "Custom index.html file"},
flag{"title-format", "", "Title format of browser window"},
flag{"reconnect", "", "Enable reconnection"},
flag{"reconnect-time", "", "Time to reconnect"},
flag{"once", "", "Accept only one client and exit on disconnection"},
}
mappingHint := map[string]string{
"index": "IndexFile",
"tls": "EnableTLS",
"tls-crt": "TLSCrtFile",
"tls-key": "TLSKeyFile",
"random-url": "EnableRandomUrl",
"reconnect": "EnableReconnect",
}
cliFlags, err := generateFlags(flags, mappingHint)
if err != nil {
exit(err, 3)<|fim▁hole|>
cmd.Flags = append(
cliFlags,
cli.StringFlag{
Name: "config",
Value: "~/.gotty",
Usage: "Config file path",
EnvVar: "GOTTY_CONFIG",
},
)
cmd.Action = func(c *cli.Context) {
if len(c.Args()) == 0 {
fmt.Println("Error: No command given.\n")
cli.ShowAppHelp(c)
exit(err, 1)
}
options := app.DefaultOptions
configFile := c.String("config")
_, err := os.Stat(app.ExpandHomeDir(configFile))
if configFile != "~/.gotty" || !os.IsNotExist(err) {
if err := app.ApplyConfigFile(&options, configFile); err != nil {
exit(err, 2)
}
}
applyFlags(&options, flags, mappingHint, c)
if c.IsSet("credential") {
options.EnableBasicAuth = true
}
app, err := app.New(c.Args(), &options)
if err != nil {
exit(err, 3)
}
registerSignals(app)
err = app.Run()
if err != nil {
exit(err, 4)
}
}
cli.AppHelpTemplate = helpTemplate
cmd.Run(os.Args)
}
func exit(err error, code int) {
if err != nil {
fmt.Println(err)
}
os.Exit(code)
}
func registerSignals(app *app.App) {
sigChan := make(chan os.Signal, 1)
signal.Notify(
sigChan,
syscall.SIGINT,
syscall.SIGTERM,
)
go func() {
for {
s := <-sigChan
switch s {
case syscall.SIGINT, syscall.SIGTERM:
if app.Exit() {
fmt.Println("Send ^C to force exit.")
} else {
os.Exit(5)
}
}
}
}()
}<|fim▁end|> | } |
<|file_name|>middleware.js<|end_file_name|><|fim▁begin|>/**<|fim▁hole|> * Express middleware
*/<|fim▁end|> | * boot/middleware.js |
<|file_name|>urls.py<|end_file_name|><|fim▁begin|>from django.conf.urls import url
<|fim▁hole|>
urlpatterns = [
url(r'^$', views.map, name='map'),
url(r'^mapSim', views.mapSim, name='mapSim'),
url(r'^api/getPos', views.getPos, name='getPos'),
url(r'^api/getProjAndPos', views.getProjAndPos, name='getProjAndPos'),
]<|fim▁end|> | from . import views |
<|file_name|>extra.d.ts<|end_file_name|><|fim▁begin|><|fim▁hole|><|fim▁end|> | export declare class UtilExtraClass {} |
<|file_name|>features.rs<|end_file_name|><|fim▁begin|>//! Feature tests for OS functionality
pub use self::os::*;
#[cfg(any(target_os = "linux", target_os = "android"))]
mod os {
use crate::sys::utsname::uname;
// Features:
// * atomic cloexec on socket: 2.6.27
// * pipe2: 2.6.27
// * accept4: 2.6.28
static VERS_UNKNOWN: usize = 1;
static VERS_2_6_18: usize = 2;
static VERS_2_6_27: usize = 3;
static VERS_2_6_28: usize = 4;
static VERS_3: usize = 5;
#[inline]
fn digit(dst: &mut usize, b: u8) {
*dst *= 10;
*dst += (b - b'0') as usize;
}
fn parse_kernel_version() -> usize {
let u = uname();
let mut curr: usize = 0;
let mut major: usize = 0;
let mut minor: usize = 0;
let mut patch: usize = 0;
for b in u.release().bytes() {
if curr >= 3 {
break;
}
match b {
b'.' | b'-' => {
curr += 1;<|fim▁hole|> 1 => digit(&mut minor, b),
_ => digit(&mut patch, b),
}
}
_ => break,
}
}
if major >= 3 {
VERS_3
} else if major >= 2 {
if minor >= 7 {
VERS_UNKNOWN
} else if minor >= 6 {
if patch >= 28 {
VERS_2_6_28
} else if patch >= 27 {
VERS_2_6_27
} else {
VERS_2_6_18
}
} else {
VERS_UNKNOWN
}
} else {
VERS_UNKNOWN
}
}
fn kernel_version() -> usize {
static mut KERNEL_VERS: usize = 0;
unsafe {
if KERNEL_VERS == 0 {
KERNEL_VERS = parse_kernel_version();
}
KERNEL_VERS
}
}
/// Check if the OS supports atomic close-on-exec for sockets
pub fn socket_atomic_cloexec() -> bool {
kernel_version() >= VERS_2_6_27
}
#[test]
pub fn test_parsing_kernel_version() {
assert!(kernel_version() > 0);
}
}
#[cfg(any(target_os = "illumos"))]
mod os {
/// Check if the OS supports atomic close-on-exec for sockets
pub fn socket_atomic_cloexec() -> bool {
true
}
}
#[cfg(any(target_os = "macos", target_os = "freebsd",
target_os = "dragonfly", target_os = "ios",
target_os = "openbsd", target_os = "netbsd",
target_os = "redox", target_os = "fuchsia",
target_os = "solaris"))]
mod os {
/// Check if the OS supports atomic close-on-exec for sockets
pub fn socket_atomic_cloexec() -> bool {
false
}
}<|fim▁end|> | }
b'0'..=b'9' => {
match curr {
0 => digit(&mut major, b), |
<|file_name|>run_gitserver.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from django.conf import settings
from django.core.management.base import BaseCommand
from twisted.conch.interfaces import ISession
from twisted.internet import reactor
from twisted.python import components
from brigitte.gitserver.server import GitSession, GitConchUser, GitServer<|fim▁hole|>
class Command(BaseCommand):
help = 'Starts the GitServer for brigitte.'
def handle(self, *args, **options):
components.registerAdapter(GitSession, GitConchUser, ISession)
reactor.listenTCP(settings.BRIGITTE_SSH_PORT,
GitServer(settings.BRIGITTE_SSH_KEY_PATH))
reactor.run()<|fim▁end|> | |
<|file_name|>OPD_panel.py<|end_file_name|><|fim▁begin|>from Plugins.Plugin import PluginDescriptor
from Screens.PluginBrowser import *
from Screens.Ipkg import Ipkg
from Screens.HarddiskSetup import HarddiskSetup
from Components.ProgressBar import ProgressBar
from Components.SelectionList import SelectionList
from Screens.NetworkSetup import *
from enigma import *
from Screens.Standby import *
from Screens.LogManager import *
from Screens.MessageBox import MessageBox
from Plugins.SystemPlugins.SoftwareManager.Flash_online import FlashOnline
from Components.ActionMap import ActionMap, NumberActionMap, HelpableActionMap
from Screens.Screen import Screen
from Screens.TaskView import JobView
from Components.Task import Task, Job, job_manager, Condition
from GlobalActions import globalActionMap
from Screens.ChoiceBox import ChoiceBox
from Tools.BoundFunction import boundFunction
from Tools.LoadPixmap import LoadPixmap
from Tools.Directories import resolveFilename, SCOPE_CURRENT_SKIN, SCOPE_PLUGINS
from Components.MenuList import MenuList
from Components.FileList import FileList
from Components.Label import Label
from Components.ScrollLabel import ScrollLabel
from Components.Pixmap import Pixmap
from Components.config import ConfigSubsection, ConfigInteger, ConfigText, getConfigListEntry, ConfigSelection, ConfigIP, ConfigYesNo, ConfigSequence, ConfigNumber, NoSave, ConfigEnableDisable, configfile
from Components.ConfigList import ConfigListScreen, ConfigList
from Components.Sources.StaticText import StaticText
from Components.Sources.Progress import Progress
from Components.Button import Button
from Components.ActionMap import ActionMap
from Components.SystemInfo import SystemInfo
from Screens.VirtualKeyBoard import VirtualKeyBoard
from Components.MultiContent import MultiContentEntryText, MultiContentEntryPixmapAlphaTest
from OPENDROID.OscamSmartcard import *
from enigma import eConsoleAppContainer
from Tools.Directories import fileExists
from Tools.Downloader import downloadWithProgress
from boxbranding import getBoxType, getMachineName, getMachineBrand, getBrandOEM
from enigma import getDesktop
from Screens.InputBox import PinInput
import string
from random import Random
import os
import sys
import re, string
font = 'Regular;16'
import ServiceReference
import time
import datetime
inOPD_panel = None
config.softcam = ConfigSubsection()
config.softcam.actCam = ConfigText(visible_width=200)
config.softcam.actCam2 = ConfigText(visible_width=200)
config.softcam.waittime = ConfigSelection([('0',_("dont wait")),('1',_("1 second")), ('5',_("5 seconds")),('10',_("10 seconds")),('15',_("15 seconds")),('20',_("20 seconds")),('30',_("30 seconds"))], default='15')
if os.path.isfile('/usr/lib/enigma2/python/Plugins/Extensions/MultiQuickButton/plugin.pyo') is True:
try:
from Plugins.Extensions.MultiQuickButton.plugin import *
except:
pass
from OPENDROID.BluePanel import *
from OPENDROID.CronManager import *
from OPENDROID.ScriptRunner import *
from OPENDROID.MountManager import *
from OPENDROID.SwapManager import Swap, SwapAutostart
from OPENDROID.SoftwarePanel import SoftwarePanel
from Plugins.SystemPlugins.SoftwareManager.BackupRestore import BackupScreen, RestoreScreen, BackupSelection, getBackupPath, getBackupFilename
import gettext
def _(txt):
t = gettext.dgettext("OPD_panel", txt)
if t == txt:
print "[OPD_panel] fallback to default translation for", txt
t = gettext.gettext(txt)
return t
def command(comandline, strip=1):
comandline = comandline + " >/tmp/command.txt"
os.system(comandline)
text = ""
if os.path.exists("/tmp/command.txt") is True:
file = open("/tmp/command.txt", "r")
if strip == 1:
for line in file:
text = text + line.strip() + '\n'
else:
for line in file:
text = text + line
if text[-1:] != '\n': text = text + "\n"
file.close()
# if one or last line then remove linefeed
if text[-1:] == '\n': text = text[:-1]
comandline = text
os.system("rm /tmp/command.txt")
return comandline
boxversion = getBoxType()
machinename = getMachineName()
machinebrand = getMachineBrand()
OEMname = getBrandOEM()
OPD_panel_Version = 'OPD PANEL V1.4 (By OPD-Team)'
print "[OPD_panel] machinebrand: %s" % (machinebrand)
print "[OPD_panel] machinename: %s" % (machinename)
print "[OPD_panel] oem name: %s" % (OEMname)
print "[OPD_panel] boxtype: %s" % (boxversion)
panel = open("/tmp/OPD_panel.ver", "w")
panel.write(OPD_panel_Version + '\n')
panel.write("Machinebrand: %s " % (machinebrand)+ '\n')
panel.write("Machinename: %s " % (machinename)+ '\n')
panel.write("oem name: %s " % (OEMname)+ '\n')
panel.write("Boxtype: %s " % (boxversion)+ '\n')
panel.close()
ExitSave = "[Exit] = " +_("Cancel") +" [Ok] =" +_("Save")
class ConfigPORT(ConfigSequence):
def __init__(self, default):
ConfigSequence.__init__(self, seperator = ".", limits = [(1,65535)], default = default)
def main(session, **kwargs):
session.open(OPD_panel)
def Apanel(menuid, **kwargs):
if menuid == "mainmenu":
return [(_("OPD_panel"), main, "OPD_panel", 3)]
else:
return []
def Plugins(**kwargs):
return [
PluginDescriptor(name='OPD_panel', description='OPD_panel GUI 16/5/2016', where=PluginDescriptor.WHERE_MENU, fnc=Apanel),
PluginDescriptor(where=[PluginDescriptor.WHERE_SESSIONSTART, PluginDescriptor.WHERE_AUTOSTART], fnc=camstart),
PluginDescriptor(where=[PluginDescriptor.WHERE_SESSIONSTART, PluginDescriptor.WHERE_AUTOSTART], fnc=SwapAutostart),
PluginDescriptor(name='OPD_panel', description='OPD_panel GUI 16/5/2016', where=PluginDescriptor.WHERE_EXTENSIONSMENU, fnc=main)]
MENU_SKIN = '<screen position="center,center" size="950,470" title="OPD Panel - Main Menu" >\n\t<ePixmap pixmap="/usr/lib/enigma2/python/OPENDROID/icons/redlogo.png" position="0,380" size="950,84" alphatest="on" zPosition="1"/>\n\t<ePixmap pixmap="/usr/lib/enigma2/python/OPENDROID/icons/opendroid_info.png" position="510,11" size="550,354" alphatest="on" zPosition="1"/>\n\t\t<widget source="global.CurrentTime" render="Label" position="450, 340" size="500,24" font="Regular;20" foregroundColor="#FFFFFF" halign="right" transparent="1" zPosition="5">\n\t\t<convert type="ClockToText">>Format%H:%M:%S</convert>\n\t</widget>\n\t<eLabel backgroundColor="#56C856" position="0,330" size="950,1" zPosition="0" />\n <widget name="Mlist" position="70,110" size="705,260" itemHeight="50" scrollbarMode="showOnDemand" transparent="1" zPosition="0" />\n\t<widget name="label1" position="10,340" size="490,25" font="Regular;20" transparent="1" foregroundColor="#f2e000" halign="left" />\n</screen>'
CONFIG_SKIN = '<screen position="center,center" size="600,440" title="PANEL Config" >\n\t<widget name="config" position="10,10" size="580,377" enableWrapAround="1" scrollbarMode="showOnDemand" />\n\t<widget name="labelExitsave" position="90,410" size="420,25" halign="center" font="Regular;20" transparent="1" foregroundColor="#f2e000" />\n</screen>'
INFO_SKIN = '<screen name="OPD_panel" position="center,center" size="730,400" title="OPD_panel" >\n\t<widget name="label2" position="0,10" size="730,25" font="Regular;20" transparent="1" halign="center" foregroundColor="#f2e000" />\n\t<widget name="label1" position="10,45" size="710,350" font="Console;20" zPosition="1" backgroundColor="#251e1f20" transparent="1" />\n</screen>'
INFO_SKIN2 = '<screen name="OPD_panel" position="center,center" size="530,400" title="OPD_panel" backgroundColor="#251e1f20">\n\t<widget name="label1" position="10,50" size="510,340" font="Regular;15" zPosition="1" backgroundColor="#251e1f20" transparent="1" />\n</screen>'
class PanelList(MenuList):
if (getDesktop(0).size().width() == 1920):
def __init__(self, list, font0 = 38, font1 = 28, itemHeight = 60, enableWrapAround = True):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.l.setFont(0, gFont("Regular", font0))
self.l.setFont(1, gFont("Regular", font1))
self.l.setItemHeight(itemHeight)
else:
def __init__(self, list, font0 = 24, font1 = 16, itemHeight = 50, enableWrapAround = True):
MenuList.__init__(self, list, enableWrapAround, eListboxPythonMultiContent)
self.l.setFont(0, gFont("Regular", font0))
self.l.setFont(1, gFont("Regular", font1))
self.l.setItemHeight(itemHeight)
def MenuEntryItem(entry):
if (getDesktop(0).size().width() == 1920):
res = [entry]
res.append(MultiContentEntryPixmapAlphaTest(pos=(0, 10), size=(60, 60), png=entry[0]))
res.append(MultiContentEntryText(pos=(110, 5), size=(690, 50), font=0, text=entry[1]))
return res
else:
res = [entry]
res.append(MultiContentEntryPixmapAlphaTest(pos=(0, 5), size=(100, 40), png=entry[0]))
res.append(MultiContentEntryText(pos=(110, 10), size=(440, 40), font=0, text=entry[1]))
return res
from Screens.PiPSetup import PiPSetup
from Screens.InfoBarGenerics import InfoBarPiP
def InfoEntryComponent(file):
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/' + file + '.png'))
if png == None:
png = LoadPixmap('/usr/lib/enigma2/python/OPENDROID/icons/' + file + '.png')
if png == None:
png = LoadPixmap(cached=True, path=resolveFilename(SCOPE_CURRENT_SKIN, 'icons/default.png'))
if png == None:
png = LoadPixmap('/usr/lib/enigma2/python/OPENDROID/icons/default.png')
res = png
return res
class OPD_panel(Screen, InfoBarPiP):
servicelist = None
def __init__(self, session, services = None):
global menu
global inOPD_panel
global pluginlist
global INFOCONF
Screen.__init__(self, session)
self.session = session
self.skin = MENU_SKIN
self.onShown.append(self.setWindowTitle)
self.service = None
INFOCONF = 0
pluginlist = 'False'
try:
print '[OPD_panel] SHOW'
OPD_panel = self
except:
print '[OPD_Panel] Error Hide'
if services is not None:
self.servicelist = services
else:
self.servicelist = None
self.list = []
self['actions'] = ActionMap(['OkCancelActions', 'DirectionActions', 'ColorActions'], {'cancel': self.Exit,
'upUp': self.up,
'downUp': self.down,
'ok': self.ok}, 1)
self['label1'] = Label(OPD_panel_Version)
self.Mlist = []
self.Mlist.append(MenuEntryItem((InfoEntryComponent('ImageFlash'), _('Image-Flasher'), 'ImageFlash')))
self.Mlist.append(MenuEntryItem((InfoEntryComponent('LogManager'), _('Log-Manager'), 'LogManager')))
self.Mlist.append(MenuEntryItem((InfoEntryComponent('SoftwareManager'), _('Software-Manager'), 'software-manager')))
self.Mlist.append(MenuEntryItem((InfoEntryComponent('services'), _('services'), 'services')))
self.Mlist.append(MenuEntryItem((InfoEntryComponent('Infos'), _('Infos'), 'Infos')))
self.Mlist.append(MenuEntryItem((InfoEntryComponent('Infobar_Setup'), _('Infobar_Setup'), 'Infobar_Setup')))
self.onChangedEntry = []
self["Mlist"] = PanelList([])
self["Mlist"].l.setList(self.Mlist)
menu = 0
self['Mlist'].onSelectionChanged.append(self.selectionChanged)
def getCurrentEntry(self):
if self['Mlist'].l.getCurrentSelection():
selection = self['Mlist'].l.getCurrentSelection()[0]
if selection[0] is not None:
return selection[0]
return
def selectionChanged(self):
item = self.getCurrentEntry()
def setWindowTitle(self):
self.setTitle(_('OPD-Main Menu'))
def up(self):
pass
def down(self):
pass
def left(self):
pass
def right(self):
pass
def Red(self):
self.showExtensionSelection1(Parameter='run')
def Green(self):
pass
def yellow(self):
pass
def blue(self):
pass
def Exit(self):
global menu
global inOPD_panel
if menu == 0:
try:
self.service = self.session.nav.getCurrentlyPlayingServiceReference()
service = self.service.toCompareString()
servicename = ServiceReference.ServiceReference(service).getServiceName().replace('\xc2\x87', '').replace('\xc2\x86', '').ljust(16)
print '[OPD_panel] HIDE'
inOPD_panel = None
except:
print '[OPD_panel] Error Hide'
self.close()
elif menu == 1:
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.oldmlist)
menu = 0
self['label1'].setText(OPD_panel_Version)
elif menu == 2:
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.oldmlist1)
menu = 1
self['label1'].setText('Infos')
return
def ok(self):
menu = self['Mlist'].l.getCurrentSelection()[0][2]
print '[OPD_panel] MenuItem: ' + menu
if menu == 'services':
self.services()
elif menu == 'Pluginbrowser':
self.session.open(PluginBrowser)
elif menu == 'Infos':
self.Infos()
elif menu == 'Service_Team':
self.session.open(Info, 'Service_Team')
elif menu == 'Info':
self.session.open(Info, 'SystemInfo')
elif menu == 'ImageVersion':
self.session.open(Info, 'ImageVersion')
elif menu == 'FreeSpace':
self.session.open(Info, 'FreeSpace')
elif menu == 'Network':
self.session.open(Info, 'Network')
elif menu == 'Mounts':
self.session.open(Info, 'Mounts')
elif menu == 'Kernel':
self.session.open(Info, 'Kernel')
elif menu == 'Ram':
self.session.open(Info, 'Free')
elif menu == 'Cpu':
self.session.open(Info, 'Cpu')
elif menu == 'Top':
self.session.open(Info, 'Top')
elif menu == 'MemInfo':
self.session.open(Info, 'MemInfo')
elif menu == 'Module':
self.session.open(Info, 'Module')
elif menu == 'Mtd':
self.session.open(Info, 'Mtd')
elif menu == 'Partitions':
self.session.open(Info, 'Partitions')
elif menu == 'Swap':
self.session.open(Info, 'Swap')
elif menu == 'SystemInfo':
self.System()
elif menu == 'CronManager':
self.session.open(CronManager)
elif menu == 'Infobar_Setup':
from OPENDROID.GreenPanel import InfoBarSetup
self.session.open(InfoBarSetup)
elif menu == 'Decoding_Setup':
from OPENDROID.GreenPanel import DecodingSetup
self.session.open(DecodingSetup)
elif menu == 'JobManager':
self.session.open(ScriptRunner)
elif menu == 'software-manager':
self.Software_Manager()
elif menu == 'software-update':
self.session.open(SoftwarePanel)
elif menu == 'backup-settings':
self.session.openWithCallback(self.backupDone, BackupScreen, runBackup=True)
elif menu == 'restore-settings':
self.backuppath = getBackupPath()
self.backupfile = getBackupFilename()
self.fullbackupfilename = self.backuppath + '/' + self.backupfile
if os_path.exists(self.fullbackupfilename):
self.session.openWithCallback(self.startRestore, MessageBox, _('Are you sure you want to restore your STB backup?\nSTB will restart after the restore'))
else:
self.session.open(MessageBox, _('Sorry no backups found!'), MessageBox.TYPE_INFO, timeout=10)
elif menu == 'backup-files':
self.session.openWithCallback(self.backupfiles_choosen, BackupSelection)
elif menu == 'MultiQuickButton':
self.session.open(MultiQuickButton)
elif menu == 'MountManager':
self.session.open(DeviceManager)
elif menu == 'OscamSmartcard':
self.session.open(OscamSmartcard)
elif menu == 'SwapManager':
self.session.open(Swap)
elif menu == 'RedPanel':
self.session.open(RedPanel)
elif menu == 'Yellow-Key-Action':
self.session.open(YellowPanel)
elif menu == 'LogManager':
self.session.open(LogManager)
elif menu == 'ImageFlash':
self.session.open(FlashOnline)
elif menu == 'Samba':
self.session.open(NetworkSamba)
def services(self):
global menu
menu = 1
self['label1'].setText(_('services'))
self.tlist = []
self.oldmlist = []
self.oldmlist = self.Mlist
self.tlist.append(MenuEntryItem((InfoEntryComponent('MountManager'), _('MountManager'), 'MountManager')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('CronManager'), _('CronManager'), 'CronManager')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('JobManager'), _('JobManager'), 'JobManager')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('SwapManager'), _('SwapManager'), 'SwapManager')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('OscamSmartcard'), _('OscamSmartcard'), 'OscamSmartcard')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Samba'), _('Samba'), 'Samba')))
if os.path.isfile('/usr/lib/enigma2/python/Plugins/Extensions/MultiQuickButton/plugin.pyo') is True:
self.tlist.append(MenuEntryItem((InfoEntryComponent('MultiQuickButton'), _('MultiQuickButton'), 'MultiQuickButton')))
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.tlist)
def Infos(self):
global menu
menu = 1
self['label1'].setText(_('Infos'))
self.tlist = []
self.oldmlist = []
self.oldmlist1 = []
self.oldmlist = self.Mlist
self.tlist.append(MenuEntryItem((InfoEntryComponent('Service_Team'), _('Service_Team'), 'Service_Team')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('ImageVersion'), _('Image-Version'), 'ImageVersion')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('FreeSpace'), _('FreeSpace'), 'FreeSpace')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Kernel'), _('Kernel'), 'Kernel')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Mounts'), _('Mounts'), 'Mounts')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Network'), _('Network'), 'Network')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Ram'), _('Ram'), 'Ram')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('SystemInfo'), _('SystemInfo'), 'SystemInfo')))
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.tlist)
self.oldmlist1 = self.tlist
def System(self):
global menu
menu = 2
self['label1'].setText(_('System Info'))
self.tlist = []
self.tlist.append(MenuEntryItem((InfoEntryComponent('Cpu'), _('Cpu'), 'Cpu')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('MemInfo'), _('MemInfo'), 'MemInfo')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Mtd'), _('Mtd'), 'Mtd')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Module'), _('Module'), 'Module')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Partitions'), _('Partitions'), 'Partitions')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Swap'), _('Swap'), 'Swap')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Top'), _('Top'), 'Top')))
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.tlist)
def System_main(self):
global menu
menu = 1
self["label1"].setText(_("Image/Remote Setup"))
self.tlist = []
self.oldmlist = []
self.oldmlist = self.Mlist
self.tlist.append(MenuEntryItem((InfoEntryComponent('Red-Key-Action'), _("Red Panel"), 'Red-Key-Action')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('Blue-Key-Action'), _("Blue Panel"), 'Blue-Key-Action')))
self["Mlist"].moveToIndex(0)
self["Mlist"].l.setList(self.tlist)
def System_main(self):
global menu
menu = 1
self['label1'].setText(_('System'))
self.tlist = []
self.oldmlist = []
self.oldmlist = self.Mlist
self.tlist.append(MenuEntryItem((InfoEntryComponent('Info'), _('Info'), 'Info')))
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.tlist)
def Software_Manager(self):
global menu
menu = 1
self['label1'].setText(_('Software Manager'))
self.tlist = []
self.oldmlist = []
self.oldmlist = self.Mlist
self.tlist.append(MenuEntryItem((InfoEntryComponent('SoftwareManager'), _('Software update'), 'software-update')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('BackupSettings'), _('Backup Settings'), 'backup-settings')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('RestoreSettings'), _('Restore Settings'), 'restore-settings')))
self.tlist.append(MenuEntryItem((InfoEntryComponent('BackupFiles'), _('Choose backup files'), 'backup-files')))
self['Mlist'].moveToIndex(0)
self['Mlist'].l.setList(self.tlist)
def backupfiles_choosen(self, ret):
config.plugins.configurationbackup.backupdirs.save()
config.plugins.configurationbackup.save()
config.save()
def backupDone(self, retval = None):
if retval is True:
self.session.open(MessageBox, _('Backup done.'), MessageBox.TYPE_INFO, timeout=10)
else:
self.session.open(MessageBox, _('Backup failed.'), MessageBox.TYPE_INFO, timeout=10)
def startRestore(self, ret = False):
if ret == True:
self.exe = True
self.session.open(RestoreScreen, runRestore=True)
class RedPanel(ConfigListScreen, Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.session = session
self.skinName = 'Setup'
Screen.setTitle(self, _('RedPanel') + '...')
self.setup_title = _('RedPanel') + '...'
self['HelpWindow'] = Pixmap()
self['HelpWindow'].hide()
self['status'] = StaticText()
self['footnote'] = Label('')
self['description'] = Label(_(''))
self['labelExitsave'] = Label('[Exit] = ' + _('Cancel') + ' [Ok] =' + _('Save'))
self.onChangedEntry = []
self.list = []
ConfigListScreen.__init__(self, self.list, session=self.session, on_change=self.changedEntry)
self.createSetup()
self['actions'] = ActionMap(['SetupActions', 'ColorActions'], {'ok': self.keySave,
'cancel': self.keyCancel,
'red': self.keyCancel,
'green': self.keySave,
'menu': self.keyCancel}, -2)
self['key_red'] = StaticText(_('Cancel'))
self['key_green'] = StaticText(_('OK'))
if self.selectionChanged not in self['config'].onSelectionChanged:
self['config'].onSelectionChanged.append(self.selectionChanged)
self.selectionChanged()
def createSetup(self):
self.editListEntry = None
self.list = []
self.list.append(getConfigListEntry(_('Show OPD_panel Red-key'), config.plugins.OPD_panel_redpanel.enabled))
self.list.append(getConfigListEntry(_('Show Softcam-Panel Red-key long'), config.plugins.OPD_panel_redpanel.enabledlong))
self['config'].list = self.list
self['config'].setList(self.list)
if config.usage.sort_settings.value:
self['config'].list.sort()
return
def selectionChanged(self):
self['status'].setText(self['config'].getCurrent()[0])
def changedEntry(self):
for x in self.onChangedEntry:
x()
self.selectionChanged()
def getCurrentEntry(self):
return self['config'].getCurrent()[0]
def getCurrentValue(self):
return str(self['config'].getCurrent()[1].getText())
def getCurrentDescription(self):
return self['config'].getCurrent() and len(self['config'].getCurrent()) > 2 and self['config'].getCurrent()[2] or ''
def createSummary(self):
from Screens.Setup import SetupSummary
return SetupSummary
def saveAll(self):
for x in self['config'].list:
x[1].save()
configfile.save()
def keySave(self):
self.saveAll()
self.close()
def cancelConfirm(self, result):
if not result:
return
for x in self['config'].list:
x[1].cancel()
self.close()
def keyCancel(self):
if self['config'].isChanged():
self.session.openWithCallback(self.cancelConfirm, MessageBox, _('Really close without saving settings?'))
else:
self.close()
class YellowPanel(ConfigListScreen, Screen):
def __init__(self, session):
Screen.__init__(self, session)
self.session = session
self.skinName = 'Setup'
Screen.setTitle(self, _('Yellow Key Action') + '...')
self.setup_title = _('Yellow Key Action') + '...'
self['HelpWindow'] = Pixmap()
self['HelpWindow'].hide()
self['status'] = StaticText()
self['footnote'] = Label('')
self['description'] = Label('')
self['labelExitsave'] = Label('[Exit] = ' + _('Cancel') + ' [Ok] =' + _('Save'))
self.onChangedEntry = []
self.list = []
ConfigListScreen.__init__(self, self.list, session=self.session, on_change=self.changedEntry)
self.createSetup()
self['actions'] = ActionMap(['SetupActions', 'ColorActions'], {'ok': self.keySave,
'cancel': self.keyCancel,
'red': self.keyCancel,
'green': self.keySave,
'menu': self.keyCancel}, -2)
self['key_red'] = StaticText(_('Cancel'))
self['key_green'] = StaticText(_('OK'))
if self.selectionChanged not in self['config'].onSelectionChanged:
self['config'].onSelectionChanged.append(self.selectionChanged)
self.selectionChanged()
def createSetup(self):
self.editListEntry = None
self.list = []
self.list.append(getConfigListEntry(_('Yellow Key Action'), config.plugins.OPD_panel_yellowkey.list))
self['config'].list = self.list
self['config'].setList(self.list)
if config.usage.sort_settings.value:
self['config'].list.sort()
return
def selectionChanged(self):
self['status'].setText(self['config'].getCurrent()[0])
def changedEntry(self):
for x in self.onChangedEntry:
x()
self.selectionChanged()
def getCurrentEntry(self):
return self['config'].getCurrent()[0]
def getCurrentValue(self):
return str(self['config'].getCurrent()[1].getText())
def getCurrentDescription(self):
return self['config'].getCurrent() and len(self['config'].getCurrent()) > 2 and self['config'].getCurrent()[2] or ''
def createSummary(self):
from Screens.Setup import SetupSummary
return SetupSummary
def saveAll(self):
for x in self['config'].list:
x[1].save()
configfile.save()
def keySave(self):
self.saveAll()
self.close()
def cancelConfirm(self, result):
if not result:
return
for x in self['config'].list:
x[1].cancel()
self.close()
def keyCancel(self):
if self['config'].isChanged():
self.session.openWithCallback(self.cancelConfirm, MessageBox, _('Really close without saving settings?'))
else:
self.close()
class Info(Screen):
def __init__(self, session, info):
self.service = None
Screen.__init__(self, session)
self.skin = INFO_SKIN
self['label2'] = Label('INFO')
self['label1'] = ScrollLabel()
if info == 'Service_Team':
self.Service_Team()
if info == 'SystemInfo':
self.SystemInfo()
elif info == 'ImageVersion':
self.ImageVersion()
elif info == 'FreeSpace':
self.FreeSpace()
elif info == 'Mounts':
self.Mounts()
elif info == 'Network':
self.Network()
elif info == 'Kernel':
self.Kernel()
elif info == 'Free':
self.Free()
elif info == 'Cpu':
self.Cpu()
elif info == 'Top':
self.Top()
elif info == 'MemInfo':
self.MemInfo()
elif info == 'Module':
self.Module()
elif info == 'Mtd':
self.Mtd()
elif info == 'Partitions':
self.Partitions()
elif info == 'Swap':
self.Swap()
self['actions'] = ActionMap(['OkCancelActions', 'DirectionActions'], {'cancel': self.Exit,
'ok': self.ok,
'up': self.Up,
'down': self.Down}, -1)
return
def Exit(self):
self.close()
def ok(self):
self.close()
def Down(self):
self['label1'].pageDown()
def Up(self):
self['label1'].pageUp()
def Service_Team(self):
try:
self['label2'].setText('INFO')
info1 = self.Do_cmd('cat', '/etc/motd', None)
if info1.find('wElc0me') > -1:
info1 = info1[info1.find('wElc0me'):len(info1)] + '\n'
info1 = info1.replace('|', '')
else:
info1 = info1[info1.find('INFO'):len(info1)] + '\n'
info2 = self.Do_cmd('cat', '/etc/image-version', None)
info3 = self.Do_cut(info1 + info2)
self['label1'].setText(info3)
except:
self['label1'].setText(_('an internal error has occur'))
return
def SystemInfo(self):
try:
self['label2'].setText(_('Image Info'))
info1 = self.Do_cmd('cat', '/etc/version', None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def ImageVersion(self):
try:
self['label2'].setText(_('Image Version'))
now = datetime.now()
info1 = 'Date = ' + now.strftime('%d-%B-%Y') + '\n'
info2 = 'Time = ' + now.strftime('%H:%M:%S') + '\n'
info3 = self.Do_cmd('uptime', None, None)
tmp = info3.split(',')
info3 = 'Uptime = ' + tmp[0].lstrip() + '\n'
info4 = self.Do_cmd('cat', '/etc/image-version', ' | head -n 1')
info4 = info4[9:]
info4 = 'Imagetype = ' + info4 + '\n'
info5 = 'Load = ' + self.Do_cmd('cat', '/proc/loadavg', None)
info6 = self.Do_cut(info1 + info2 + info3 + info4 + info5)
self['label1'].setText(info6)
except:
self['label1'].setText(_('an internal error has occur'))
return
def FreeSpace(self):
try:
self['label2'].setText(_('FreeSpace'))
info1 = self.Do_cmd('df', None, '-h')
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Mounts(self):
try:
self['label2'].setText(_('Mounts'))
info1 = self.Do_cmd('mount', None, None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Network(self):
try:
self['label2'].setText(_('Network'))
info1 = self.Do_cmd('ifconfig', None, None) + '\n'
info2 = self.Do_cmd('route', None, '-n')
info3 = self.Do_cut(info1 + info2)
self['label1'].setText(info3)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Kernel(self):
try:
self['label2'].setText(_('Kernel'))
info0 = self.Do_cmd('cat', '/proc/version', None)
info = info0.split('(')
info1 = 'Name = ' + info[0] + '\n'
info2 = 'Owner = ' + info[1].replace(')', '') + '\n'
info3 = 'Mainimage = ' + info[2][0:info[2].find(')')] + '\n'
info4 = 'Date = ' + info[3][info[3].find('SMP') + 4:len(info[3])]
info5 = self.Do_cut(info1 + info2 + info3 + info4)
self['label1'].setText(info5)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Free(self):
try:
self['label2'].setText(_('Ram'))
info1 = self.Do_cmd('free', None, None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Cpu(self):
try:
self['label2'].setText(_('Cpu'))
info1 = self.Do_cmd('cat', '/proc/cpuinfo', None, " | sed 's/\t\t/\t/'")
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Top(self):
try:
self['label2'].setText(_('Top'))
info1 = self.Do_cmd('top', None, '-b -n1')
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def MemInfo(self):
try:
self['label2'].setText(_('MemInfo'))
info1 = self.Do_cmd('cat', '/proc/meminfo', None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Module(self):
try:
self['label2'].setText(_('Module'))
info1 = self.Do_cmd('cat', '/proc/modules', None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Mtd(self):
try:
self['label2'].setText(_('Mtd'))
info1 = self.Do_cmd('cat', '/proc/mtd', None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Partitions(self):
try:
self['label2'].setText(_('Partitions'))
info1 = self.Do_cmd('cat', '/proc/partitions', None)
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Swap(self):
try:
self['label2'].setText(_('Swap'))
info0 = self.Do_cmd('cat', '/proc/swaps', None, " | sed 's/\t/ /g; s/[ ]* / /g'")
info0 = info0.split('\n')
info1 = ''
for l in info0[1:]:
l1 = l.split(' ')
info1 = info1 + 'Name: ' + l1[0] + '\n'
info1 = info1 + 'Type: ' + l1[1] + '\n'
info1 = info1 + 'Size: ' + l1[2] + '\n'
info1 = info1 + 'Used: ' + l1[3] + '\n'
info1 = info1 + 'Prio: ' + l1[4] + '\n\n'
if info1[-1:] == '\n':
info1 = info1[:-1]
if info1[-1:] == '\n':
info1 = info1[:-1]
info1 = self.Do_cut(info1)
self['label1'].setText(info1)
except:
self['label1'].setText(_('an internal error has occur'))
return
def Do_find(self, text, search):
text = text + ' '
ret = ''
pos = text.find(search)
pos1 = text.find(' ', pos)
if pos > -1:
ret = text[pos + len(search):pos1]
return ret
def Do_cut(self, text):
text1 = text.split('\n')
text = ''
for line in text1:
text = text + line[:95] + '\n'
if text[-1:] == '\n':
text = text[:-1]
return text
def Do_cmd(self, cmd, file, arg, pipe = ''):
try:
if file != None:
if os.path.exists(file) is True:
o = command(cmd + ' ' + file + pipe, 0)
else:
o = 'File not found: \n' + file
elif arg == None:
o = command(cmd, 0)
else:
o = command(cmd + ' ' + arg, 0)
return o<|fim▁hole|> return
####################################################################################################################################
class FileDownloadJob(Job):
def __init__(self, url, filename, file):
Job.__init__(self, _('Downloading %s' % file))
FileDownloadTask(self, url, filename)
class DownloaderPostcondition(Condition):
def check(self, task):
return task.returncode == 0
def getErrorMessage(self, task):
return self.error_message
class FileDownloadTask(Task):
def __init__(self, job, url, path):
Task.__init__(self, job, _('Downloading'))
self.postconditions.append(DownloaderPostcondition())
self.job = job
self.url = url
self.path = path
self.error_message = ''
self.last_recvbytes = 0
self.error_message = None
self.download = None
self.aborted = False
return
def run(self, callback):
self.callback = callback
self.download = downloadWithProgress(self.url, self.path)
self.download.addProgress(self.download_progress)
self.download.start().addCallback(self.download_finished).addErrback(self.download_failed)
print '[FileDownloadTask] downloading', self.url, 'to', self.path
def abort(self):
print '[FileDownloadTask] aborting', self.url
if self.download:
self.download.stop()
self.aborted = True
def download_progress(self, recvbytes, totalbytes):
if recvbytes - self.last_recvbytes > 10000:
self.progress = int(100 * (float(recvbytes) / float(totalbytes)))
self.name = _('Downloading') + ' ' + '%d of %d kBytes' % (recvbytes / 1024, totalbytes / 1024)
self.last_recvbytes = recvbytes
def download_failed(self, failure_instance = None, error_message = ''):
self.error_message = error_message
if error_message == '' and failure_instance is not None:
self.error_message = failure_instance.getErrorMessage()
Task.processFinished(self, 1)
return
def download_finished(self, string = ''):
if self.aborted:
self.finish(aborted=True)
else:
Task.processFinished(self, 0)<|fim▁end|> | except:
o = ''
return o
|
<|file_name|>parser.py<|end_file_name|><|fim▁begin|>import json
from datetime import datetime
from dojo.models import Finding
class MeterianParser(object):
def get_scan_types(self):
return ["Meterian Scan"]
def get_label_for_scan_types(self, scan_type):
return scan_type
def get_description_for_scan_types(self, scan_type):
return "Meterian JSON report output file can be imported."
<|fim▁hole|> findings = []
report_json = json.load(report)
security_reports = self.get_security_reports(report_json)
scan_date = str(datetime.fromisoformat(report_json["timestamp"]).date())
for single_security_report in security_reports:
findings += self.do_get_findings(single_security_report, scan_date, test)
return findings
def get_security_reports(self, report_json):
if "reports" in report_json:
if "security" in report_json["reports"]:
if "reports" in report_json["reports"]["security"]:
return report_json["reports"]["security"]["reports"]
raise ValueError("Malformed report: the security reports are missing.")
def do_get_findings(self, single_security_report, scan_date, test):
findings = []
language = single_security_report["language"]
for dependency_report in single_security_report["reports"]:
lib_name = dependency_report["dependency"]["name"]
lib_ver = dependency_report["dependency"]["version"]
finding_title = lib_name + ":" + lib_ver
for advisory in dependency_report["advices"]:
severity = self.get_severity(advisory)
finding = Finding(
title=finding_title,
date=scan_date,
test=test,
severity=severity,
severity_justification="Issue severity of: **" + severity + "** from a base " +
"CVSS score of: **" + str(advisory.get('cvss')) + "**",
description=advisory['description'],
component_name=lib_name,
component_version=lib_ver,
false_p=False,
duplicate=False,
out_of_scope=False,
impact=severity,
static_finding=True,
dynamic_finding=False,
file_path="Manifest file",
unique_id_from_tool=advisory['id'],
tags=[language]
)
if 'cve' in advisory:
if "N/A" != advisory["cve"]:
finding.cve = advisory["cve"]
if "cwe" in advisory:
finding.cwe = int(advisory["cwe"].replace("CWE-", ""))
mitigation_msg = "## Remediation\n"
safe_versions = dependency_report["safeVersions"]
if "latestPatch" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestPatch"] + " or higher."
elif "latestMinor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMinor"] + " or higher."
elif "latestMajor" in safe_versions:
mitigation_msg += "Upgrade " + lib_name + " to version " + safe_versions["latestMajor"] + "."
else:
mitigation_msg = "We were not able to provide a safe version for this library.\nYou should consider replacing this component as it could be an issue for the safety of your application."
finding.mitigation = mitigation_msg
references = ""
for link in advisory["links"]:
ref_link = self.get_reference_url(link)
if ref_link is not None:
references += "- " + ref_link + "\n"
if references != "":
finding.references = references
findings.append(finding)
return findings
def get_severity(self, advisory):
# Following the CVSS Scoring per https://nvd.nist.gov/vuln-metrics/cvss
if 'cvss' in advisory:
if advisory['cvss'] <= 3.9:
severity = "Low"
elif advisory['cvss'] >= 4.0 and advisory['cvss'] <= 6.9:
severity = "Medium"
elif advisory['cvss'] >= 7.0 and advisory['cvss'] <= 8.9:
severity = "High"
else:
severity = "Critical"
else:
if advisory["severity"] == "SUGGEST" or advisory["severity"] == "NA" or advisory["severity"] == "NONE":
severity = "Info"
else:
severity = advisory["severity"].title()
return severity
def get_reference_url(self, link_obj):
url = link_obj["url"]
if link_obj["type"] == "CVE":
url = "https://cve.mitre.org/cgi-bin/cvename.cgi?name=" + link_obj["url"]
elif link_obj["type"] == "NVD":
url = "https://nvd.nist.gov/vuln/detail/" + link_obj["url"]
return url<|fim▁end|> | def get_findings(self, report, test): |
<|file_name|>ripple.js<|end_file_name|><|fim▁begin|>import { strToEl } from '../utils';
export default class Ripple {
constructor() {
this.container = strToEl('<div class="ripple"></div>');
}
<|fim▁hole|> animate() {
this.container.classList.remove('animate');
this.container.offsetLeft;
this.container.classList.add('animate');
}
}<|fim▁end|> | |
<|file_name|>TerrainTexturePack.java<|end_file_name|><|fim▁begin|>package textures;
public class TerrainTexturePack {
private TerrainTexture backgroundTexture;
private TerrainTexture rTexture;
private TerrainTexture gTexture;
private TerrainTexture bTexture;
public TerrainTexture getBackgroundTexture() {
return backgroundTexture;
}
public void setBackgroundTexture(TerrainTexture backgroundTexture) {
this.backgroundTexture = backgroundTexture;
}
public TerrainTexturePack(TerrainTexture backgroundTexture,
TerrainTexture rTexture, TerrainTexture gTexture,
TerrainTexture bTexture) {
super();
this.backgroundTexture = backgroundTexture;
this.rTexture = rTexture;
this.gTexture = gTexture;
this.bTexture = bTexture;
}
public TerrainTexture getrTexture() {<|fim▁hole|> public void setrTexture(TerrainTexture rTexture) {
this.rTexture = rTexture;
}
public TerrainTexture getgTexture() {
return gTexture;
}
public void setgTexture(TerrainTexture gTexture) {
this.gTexture = gTexture;
}
public TerrainTexture getbTexture() {
return bTexture;
}
public void setbTexture(TerrainTexture bTexture) {
this.bTexture = bTexture;
}
}<|fim▁end|> | return rTexture;
} |
<|file_name|>create_new.py<|end_file_name|><|fim▁begin|># Copyright (c) 2013, Web Notes Technologies Pvt. Ltd.
# MIT License. See license.txt
from __future__ import unicode_literals
"""
Create a new document with defaults set
"""
import webnotes
from webnotes.utils import nowdate, nowtime, cint, flt
import webnotes.defaults
def get_new_doc(doctype, parent_doc = None, parentfield = None):
doc = webnotes.doc({
"doctype": doctype,
"__islocal": 1,
"owner": webnotes.session.user,
"docstatus": 0
})
meta = webnotes.get_doctype(doctype)
if parent_doc:
doc.parent = parent_doc.name
doc.parenttype = parent_doc.doctype
if parentfield:
doc.parentfield = parentfield
for d in meta.get({"doctype":"DocField", "parent": doctype}):
default = webnotes.defaults.get_user_default(d.fieldname)
if default:
doc.fields[d.fieldname] = default
elif d.fields.get("default"):
if d.default == "__user":
doc.fields[d.fieldname] = webnotes.session.user
elif d.default == "Today":
doc.fields[d.fieldname] = nowdate()
elif d.default.startswith(":"):
ref_fieldname = d.default[1:].lower().replace(" ", "_")
if parent_doc:
ref_docname = parent_doc.fields[ref_fieldname]<|fim▁hole|> ref_docname, d.fieldname)
else:
doc.fields[d.fieldname] = d.default
# convert type of default
if d.fieldtype in ("Int", "Check"):
doc.fields[d.fieldname] = cint(doc.fields[d.fieldname])
elif d.fieldtype in ("Float", "Currency"):
doc.fields[d.fieldname] = flt(doc.fields[d.fieldname])
elif d.fieldtype == "Time":
doc.fields[d.fieldname] = nowtime()
return doc<|fim▁end|> | else:
ref_docname = webnotes.conn.get_default(ref_fieldname)
doc.fields[d.fieldname] = webnotes.conn.get_value(d.default[1:], |
<|file_name|>AbstractScalaFormatterTestBase.java<|end_file_name|><|fim▁begin|>package org.jetbrains.plugins.scala.lang.formatter;
import com.intellij.openapi.application.ApplicationManager;
import com.intellij.openapi.command.CommandProcessor;
import com.intellij.openapi.editor.Document;
import com.intellij.openapi.editor.EditorFactory;
import com.intellij.openapi.editor.impl.DocumentImpl;
import com.intellij.openapi.util.TextRange;
import com.intellij.openapi.util.io.FileUtil;
import com.intellij.openapi.util.text.StringUtil;
import com.intellij.psi.PsiDocumentManager;
import com.intellij.psi.PsiFile;
import com.intellij.psi.codeStyle.CodeStyleManager;
import com.intellij.psi.codeStyle.CodeStyleSettings;
import com.intellij.psi.codeStyle.CodeStyleSettingsManager;
import com.intellij.psi.codeStyle.CommonCodeStyleSettings;
import com.intellij.testFramework.LightIdeaTestCase;
import com.intellij.util.IncorrectOperationException;
import org.jetbrains.plugins.scala.ScalaLanguage;
import org.jetbrains.plugins.scala.lang.formatting.settings.ScalaCodeStyleSettings;
import org.jetbrains.plugins.scala.util.TestUtils;
import java.io.File;
import java.util.EnumMap;
import java.util.Map;
/**
* Base class for java formatter tests that holds utility methods.
*
* @author Denis Zhdanov
* @since Apr 27, 2010 6:26:29 PM
*/
//todo: almost duplicate from Java
public abstract class AbstractScalaFormatterTestBase extends LightIdeaTestCase {
protected enum Action {REFORMAT, INDENT}
private interface TestFormatAction {
void run(PsiFile psiFile, int startOffset, int endOffset);
}
private static final Map<Action, TestFormatAction> ACTIONS = new EnumMap<Action, TestFormatAction>(Action.class);
static {
ACTIONS.put(Action.REFORMAT, new TestFormatAction() {
public void run(PsiFile psiFile, int startOffset, int endOffset) {
CodeStyleManager.getInstance(getProject()).reformatText(psiFile, startOffset, endOffset);
}
});
ACTIONS.put(Action.INDENT, new TestFormatAction() {
public void run(PsiFile psiFile, int startOffset, int endOffset) {
CodeStyleManager.getInstance(getProject()).adjustLineIndent(psiFile, startOffset);
}
});
}
private static final String BASE_PATH = TestUtils.getTestDataPath() + "/psi/formatter";
public TextRange myTextRange;
public TextRange myLineRange;
public CommonCodeStyleSettings getCommonSettings() {
return getSettings().getCommonSettings(ScalaLanguage.INSTANCE);
}
public ScalaCodeStyleSettings getScalaSettings() {
return getSettings().getCustomSettings(ScalaCodeStyleSettings.class);
}
public CodeStyleSettings getSettings() {
return CodeStyleSettingsManager.getSettings(getProject());
}
public CommonCodeStyleSettings.IndentOptions getIndentOptions() {
return getCommonSettings().getIndentOptions();
}
public void doTest() throws Exception {
doTest(getTestName(false) + ".scala", getTestName(false) + "_after.scala");
}
public void doTest(String fileNameBefore, String fileNameAfter) throws Exception {
doTextTest(Action.REFORMAT, loadFile(fileNameBefore), loadFile(fileNameAfter));
}
public void doTextTest(final String text, String textAfter) throws IncorrectOperationException {
doTextTest(Action.REFORMAT, StringUtil.convertLineSeparators(text), StringUtil.convertLineSeparators(textAfter));
}
public void doTextTest(final Action action, final String text, String textAfter) throws IncorrectOperationException {
final PsiFile file = createFile("A.scala", text);
if (myLineRange != null) {
final DocumentImpl document = new DocumentImpl(text);
myTextRange =
new TextRange(document.getLineStartOffset(myLineRange.getStartOffset()), document.getLineEndOffset(myLineRange.getEndOffset()));
}
/*
CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() {
public void run() {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
performFormatting(file);
}
});
}
}, null, null);
assertEquals(prepareText(textAfter), prepareText(file.getText()));
*/
final PsiDocumentManager manager = PsiDocumentManager.getInstance(getProject());
final Document document = manager.getDocument(file);
CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() {
public void run() {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
document.replaceString(0, document.getTextLength(), text);
manager.commitDocument(document);
try {<|fim▁hole|> if (rangeToUse == null) {
rangeToUse = file.getTextRange();
}
ACTIONS.get(action).run(file, rangeToUse.getStartOffset(), rangeToUse.getEndOffset());
}
catch (IncorrectOperationException e) {
assertTrue(e.getLocalizedMessage(), false);
}
}
});
}
}, "", "");
if (document == null) {
fail("Don't expect the document to be null");
return;
}
assertEquals(prepareText(textAfter), prepareText(document.getText()));
manager.commitDocument(document);
assertEquals(prepareText(textAfter), prepareText(file.getText()));
}
//todo: was unused, should be deleted (??)
/* public void doMethodTest(final String before, final String after) throws Exception {
doTextTest(
Action.REFORMAT,
"class Foo{\n" + " void foo() {\n" + before + '\n' + " }\n" + "}",
"class Foo {\n" + " void foo() {\n" + shiftIndentInside(after, 8, false) + '\n' + " }\n" + "}"
);
}
public void doClassTest(final String before, final String after) throws Exception {
doTextTest(
Action.REFORMAT,
"class Foo{\n" + before + '\n' + "}",
"class Foo {\n" + shiftIndentInside(after, 4, false) + '\n' + "}"
);
}*/
private static String prepareText(String actual) {
if (actual.startsWith("\n")) {
actual = actual.substring(1);
}
if (actual.startsWith("\n")) {
actual = actual.substring(1);
}
// Strip trailing spaces
final Document doc = EditorFactory.getInstance().createDocument(actual);
CommandProcessor.getInstance().executeCommand(getProject(), new Runnable() {
public void run() {
ApplicationManager.getApplication().runWriteAction(new Runnable() {
public void run() {
((DocumentImpl)doc).stripTrailingSpaces(getProject());
}
});
}
}, "formatting", null);
return doc.getText().trim();
}
private static String loadFile(String name) throws Exception {
String fullName = BASE_PATH + File.separatorChar + name;
String text = new String(FileUtil.loadFileText(new File(fullName)));
text = StringUtil.convertLineSeparators(text);
return text;
}
@Override
protected void setUp() throws Exception {
super.setUp();
TestUtils.disableTimerThread();
}
}<|fim▁end|> | TextRange rangeToUse = myTextRange; |
<|file_name|>views.py<|end_file_name|><|fim▁begin|>import datetime
import logging
from django.contrib import messages
from django.contrib.auth.decorators import login_required, permission_required
from django.contrib.auth.models import Group
from django.shortcuts import render, redirect
from allianceauth.services.forms import ServicePasswordForm
from .forms import JabberBroadcastForm
from .manager import OpenfireManager, PingBotException
from .models import OpenfireUser
from .tasks import OpenfireTasks
logger = logging.getLogger(__name__)
ACCESS_PERM = 'openfire.access_openfire'
@login_required
@permission_required(ACCESS_PERM)
def activate_jabber(request):
logger.debug("activate_jabber called by user %s" % request.user)
character = request.user.profile.main_character
logger.debug("Adding jabber user for user %s with main character %s" % (request.user, character))
info = OpenfireManager.add_user(OpenfireTasks.get_username(request.user))
# If our username is blank means we already had a user
if info[0] is not "":
OpenfireUser.objects.update_or_create(user=request.user, defaults={'username': info[0]})
logger.debug("Updated authserviceinfo for user %s with jabber credentials. Updating groups." % request.user)
OpenfireTasks.update_groups.delay(request.user.pk)
logger.info("Successfully activated jabber for user %s" % request.user)
messages.success(request, 'Activated jabber account.')
credentials = {
'username': info[0],
'password': info[1],
}
return render(request, 'services/service_credentials.html',
context={'credentials': credentials, 'service': 'Jabber'})
else:
logger.error("Unsuccessful attempt to activate jabber for user %s" % request.user)
messages.error(request, 'An error occurred while processing your jabber account.')
return redirect("services:services")
@login_required
@permission_required(ACCESS_PERM)
def deactivate_jabber(request):
logger.debug("deactivate_jabber called by user %s" % request.user)
if OpenfireTasks.has_account(request.user) and OpenfireTasks.delete_user(request.user):
logger.info("Successfully deactivated jabber for user %s" % request.user)
messages.success(request, 'Deactivated jabber account.')
else:
logger.error("Unsuccessful attempt to deactivate jabber for user %s" % request.user)
messages.error(request, 'An error occurred while processing your jabber account.')
return redirect("services:services")
@login_required
@permission_required(ACCESS_PERM)
def reset_jabber_password(request):
logger.debug("reset_jabber_password called by user %s" % request.user)
if OpenfireTasks.has_account(request.user):
result = OpenfireManager.update_user_pass(request.user.openfire.username)
# If our username is blank means we failed
if result != "":
logger.info("Successfully reset jabber password for user %s" % request.user)
messages.success(request, 'Reset jabber password.')
credentials = {
'username': request.user.openfire.username,
'password': result,
}
return render(request, 'services/service_credentials.html',
context={'credentials': credentials, 'service': 'Jabber'})
logger.error("Unsuccessful attempt to reset jabber for user %s" % request.user)
messages.error(request, 'An error occurred while processing your jabber account.')
return redirect("services:services")
@login_required
@permission_required('auth.jabber_broadcast')
def jabber_broadcast_view(request):
logger.debug("jabber_broadcast_view called by user %s" % request.user)
allchoices = []
if request.user.has_perm('auth.jabber_broadcast_all'):
allchoices.append(('all', 'all'))
for g in Group.objects.all():
allchoices.append((str(g.name), str(g.name)))
else:
for g in request.user.groups.all():
allchoices.append((str(g.name), str(g.name)))
if request.method == 'POST':
form = JabberBroadcastForm(request.POST)
form.fields['group'].choices = allchoices
logger.debug("Received POST request containing form, valid: %s" % form.is_valid())
if form.is_valid():
main_char = request.user.profile.main_character
logger.debug("Processing jabber broadcast for user %s with main character %s" % (request.user, main_char))
try:
if main_char is not None:
message_to_send = form.cleaned_data[<|fim▁hole|> main_char.character_name + " TO: " + \
form.cleaned_data['group'] + " WHEN: " + datetime.datetime.utcnow().strftime(
"%Y-%m-%d %H:%M:%S") + " #####\n##### Replies are NOT monitored #####\n"
group_to_send = form.cleaned_data['group']
else:
message_to_send = form.cleaned_data[
'message'] + "\n##### SENT BY: " + "No character but can send pings?" + " TO: " + \
form.cleaned_data['group'] + " WHEN: " + datetime.datetime.utcnow().strftime(
"%Y-%m-%d %H:%M:%S") + " #####\n##### Replies are NOT monitored #####\n"
group_to_send = form.cleaned_data['group']
OpenfireManager.send_broadcast_message(group_to_send, message_to_send)
messages.success(request, 'Sent jabber broadcast to %s' % group_to_send)
logger.info("Sent jabber broadcast on behalf of user %s" % request.user)
except PingBotException as e:
messages.error(request, e)
else:
form = JabberBroadcastForm()
form.fields['group'].choices = allchoices
logger.debug("Generated broadcast form for user %s containing %s groups" % (
request.user, len(form.fields['group'].choices)))
context = {'form': form}
return render(request, 'services/openfire/broadcast.html', context=context)
@login_required
@permission_required(ACCESS_PERM)
def set_jabber_password(request):
logger.debug("set_jabber_password called by user %s" % request.user)
if request.method == 'POST':
logger.debug("Received POST request with form.")
form = ServicePasswordForm(request.POST)
logger.debug("Form is valid: %s" % form.is_valid())
if form.is_valid() and OpenfireTasks.has_account(request.user):
password = form.cleaned_data['password']
logger.debug("Form contains password of length %s" % len(password))
result = OpenfireManager.update_user_pass(request.user.openfire.username, password=password)
if result != "":
logger.info("Successfully set jabber password for user %s" % request.user)
messages.success(request, 'Set jabber password.')
else:
logger.error("Failed to install custom jabber password for user %s" % request.user)
messages.error(request, 'An error occurred while processing your jabber account.')
return redirect("services:services")
else:
logger.debug("Request is not type POST - providing empty form.")
form = ServicePasswordForm()
logger.debug("Rendering form for user %s" % request.user)
context = {'form': form, 'service': 'Jabber'}
return render(request, 'services/service_password.html', context=context)<|fim▁end|> | 'message'] + "\n##### SENT BY: " + "[" + main_char.corporation_ticker + "]" + \ |
<|file_name|>DailyQuestsManager.java<|end_file_name|><|fim▁begin|>package l2s.gameserver.instancemanager;
import java.util.ArrayList;
import java.util.List;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
import l2s.commons.util.Rnd;
import l2s.gameserver.model.Player;
import l2s.gameserver.model.quest.Quest;
import l2s.gameserver.model.quest.QuestState;
public class DailyQuestsManager
{
private static final Logger _log = LoggerFactory.getLogger(DailyQuestsManager.class);
private static List<Integer> _disabledQuests = new ArrayList<Integer>();
<|fim▁hole|> switch(Rnd.get(1, 3))
//60-64 1 quest per day
{
case 1:
_disabledQuests.add(470);
break;
case 2:
_disabledQuests.add(474);
break;
}
switch(Rnd.get(1, 2))
//75-79 2 quest per day
{
case 1:
_disabledQuests.add(488);
break;
case 2:
_disabledQuests.add(489);
break;
}
_log.info("Daily Quests Disable Managed: Loaded " + _disabledQuests.size() + " quests in total (4).");
}
//dunno if this is retail but as I understand if quest gone from the list it will be removed from the player as well.
public static void checkAndRemoveDisabledQuests(Player player)
{
if(player == null)
return;
for(int qId : _disabledQuests)
{
Quest q = QuestManager.getQuest(qId);
QuestState qs = player.getQuestState(q.getName());
if(qs == null)
continue;
if(q.checkMaxLevelCondition(player))
continue;
qs.exitCurrentQuest(true);
}
}
public static boolean isQuestDisabled(int questId)
{
if(_disabledQuests.contains(questId))
return true;
return false;
}
}<|fim▁end|> | public static void EngageSystem()
{ |
<|file_name|>constEnumsEmitOutputInMultipleFiles.ts<|end_file_name|><|fim▁begin|>/// <reference path='fourslash.ts'/>
// @Filename: a.ts
////const enum TestEnum {
//// Foo, Bar
////}
////var testFirstFile = TestEnum.Bar;
// @Filename: b.ts
/////// <reference path="a.ts" />
/////*1*/
////var testInOtherFile = TestEnum.Bar;
goTo.marker("1");
verify.verifyGetEmitOutputForCurrentFile(
"/// <reference path=\"a.ts\" />\r\n\
<|fim▁hole|> )<|fim▁end|> | var testInOtherFile = 1 /* TestEnum.Bar */;\r\n"
|
<|file_name|>pb0027.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# *-* coding:utf-8 *-*
"""
Date :
Author : Vianney Gremmel [email protected]
"""
def memo(f):
class Memo(dict):
def __missing__(self, key):
r = self[key] = f(key)
return r
return Memo().__getitem__
@memo
def isprime(n):
for d in xrange(2, int(n**0.5) + 1):
if n % d == 0:
return False
return True
def maxi_primes():
for a in xrange(-1000, 1001):
for b in xrange(-999, 1001, 2):
n = 0
while True:
if not isprime(abs(n*n + a*n + b)) and n:
yield (n, a, b)<|fim▁hole|> n += 1
print 'please wait...'
max_score = max(score for score in maxi_primes())
print max_score
print max_score[1]*max_score[2]<|fim▁end|> | break |
<|file_name|>utils.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
Helper functions used in views.
"""
from json import dumps
from functools import wraps
from flask import Response
<|fim▁hole|> """
@wraps(function)
def inner(*args, **kwargs):
return Response(dumps(function(*args, **kwargs)),
mimetype='application/json')
return inner<|fim▁end|> |
def jsonify(function):
"""
Creates a response with the JSON representation of wrapped function result. |
<|file_name|>location.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::bindings::codegen::Bindings::LocationBinding;
use dom::bindings::js::{JSRef, Temporary};
use dom::bindings::utils::{Reflectable, Reflector, reflect_dom_object};
use dom::window::Window;
use page::Page;
use servo_util::str::DOMString;
use serialize::{Encoder, Encodable};
use std::rc::Rc;
use url::query_to_str;
#[deriving(Encodable)]
pub struct Location {
reflector_: Reflector, //XXXjdm cycle: window->Location->window
page: Rc<Page>,
}
impl Location {
pub fn new_inherited(page: Rc<Page>) -> Location {
Location {
reflector_: Reflector::new(),
page: page
}
}
pub fn new(window: &JSRef<Window>, page: Rc<Page>) -> Temporary<Location> {
reflect_dom_object(box Location::new_inherited(page),
window,
LocationBinding::Wrap)
}
}
pub trait LocationMethods {
fn Href(&self) -> DOMString;
fn Search(&self) -> DOMString;<|fim▁hole|>}
impl<'a> LocationMethods for JSRef<'a, Location> {
fn Href(&self) -> DOMString {
self.page.get_url().to_str()
}
fn Search(&self) -> DOMString {
let query = query_to_str(&self.page.get_url().query);
if query.as_slice() == "" {
query
} else {
"?".to_string().append(query.as_slice())
}
}
}
impl Reflectable for Location {
fn reflector<'a>(&'a self) -> &'a Reflector {
&self.reflector_
}
}<|fim▁end|> | |
<|file_name|>Arez_ObservableWithSpecificExceptionModel.java<|end_file_name|><|fim▁begin|>import arez.Arez;
import arez.ArezContext;
import arez.Component;
import arez.Disposable;
import arez.ObservableValue;
import arez.SafeProcedure;
import arez.component.DisposeNotifier;
import arez.component.Identifiable;
import arez.component.internal.ComponentKernel;
import java.text.ParseException;
import javax.annotation.Generated;
import javax.annotation.Nonnull;
import org.realityforge.braincheck.Guards;
@Generated("arez.processor.ArezProcessor")
final class Arez_ObservableWithSpecificExceptionModel extends ObservableWithSpecificExceptionModel implements Disposable, Identifiable<Integer>, DisposeNotifier {
private static volatile int $$arezi$$_nextId;
private final ComponentKernel $$arezi$$_kernel;
@Nonnull
private final ObservableValue<Long> $$arez$$_time;
Arez_ObservableWithSpecificExceptionModel() {
super();
final ArezContext $$arezv$$_context = Arez.context();
final int $$arezv$$_id = ++$$arezi$$_nextId;
final String $$arezv$$_name = Arez.areNamesEnabled() ? "ObservableWithSpecificExceptionModel." + $$arezv$$_id : null;
final Component $$arezv$$_component = Arez.areNativeComponentsEnabled() ? $$arezv$$_context.component( "ObservableWithSpecificExceptionModel", $$arezv$$_id, $$arezv$$_name, this::$$arezi$$_nativeComponentPreDispose ) : null;
this.$$arezi$$_kernel = new ComponentKernel( Arez.areZonesEnabled() ? $$arezv$$_context : null, Arez.areNamesEnabled() ? $$arezv$$_name : null, $$arezv$$_id, Arez.areNativeComponentsEnabled() ? $$arezv$$_component : null, null, Arez.areNativeComponentsEnabled() ? null : this::$$arezi$$_dispose, null, true, false, false );
this.$$arez$$_time = $$arezv$$_context.observable( Arez.areNativeComponentsEnabled() ? $$arezv$$_component : null, Arez.areNamesEnabled() ? $$arezv$$_name + ".time" : null, Arez.arePropertyIntrospectorsEnabled() ? () -> super.getTime() : null, Arez.arePropertyIntrospectorsEnabled() ? v -> super.setTime( v ) : null );<|fim▁hole|>
private int $$arezi$$_id() {
return this.$$arezi$$_kernel.getId();
}
@Override
@Nonnull
public Integer getArezId() {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named 'getArezId' invoked on uninitialized component of type 'ObservableWithSpecificExceptionModel'" );
}
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenConstructed(), () -> "Method named 'getArezId' invoked on un-constructed component named '" + ( null == this.$$arezi$$_kernel ? "?" : this.$$arezi$$_kernel.getName() ) + "'" );
}
return $$arezi$$_id();
}
private void $$arezi$$_nativeComponentPreDispose() {
this.$$arezi$$_kernel.notifyOnDisposeListeners();
}
@Override
public void addOnDisposeListener(@Nonnull final Object key, @Nonnull final SafeProcedure action) {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named 'addOnDisposeListener' invoked on uninitialized component of type 'ObservableWithSpecificExceptionModel'" );
}
this.$$arezi$$_kernel.addOnDisposeListener( key, action );
}
@Override
public void removeOnDisposeListener(@Nonnull final Object key) {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named 'removeOnDisposeListener' invoked on uninitialized component of type 'ObservableWithSpecificExceptionModel'" );
}
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenConstructed(), () -> "Method named 'removeOnDisposeListener' invoked on un-constructed component named '" + ( null == this.$$arezi$$_kernel ? "?" : this.$$arezi$$_kernel.getName() ) + "'" );
}
this.$$arezi$$_kernel.removeOnDisposeListener( key );
}
@Override
public boolean isDisposed() {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named 'isDisposed' invoked on uninitialized component of type 'ObservableWithSpecificExceptionModel'" );
}
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenConstructed(), () -> "Method named 'isDisposed' invoked on un-constructed component named '" + ( null == this.$$arezi$$_kernel ? "?" : this.$$arezi$$_kernel.getName() ) + "'" );
}
return this.$$arezi$$_kernel.isDisposed();
}
@Override
public void dispose() {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenInitialized(), () -> "Method named 'dispose' invoked on uninitialized component of type 'ObservableWithSpecificExceptionModel'" );
}
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.hasBeenConstructed(), () -> "Method named 'dispose' invoked on un-constructed component named '" + ( null == this.$$arezi$$_kernel ? "?" : this.$$arezi$$_kernel.getName() ) + "'" );
}
this.$$arezi$$_kernel.dispose();
}
private void $$arezi$$_dispose() {
this.$$arez$$_time.dispose();
}
@Override
public long getTime() throws ParseException {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.isActive(), () -> "Method named 'getTime' invoked on " + this.$$arezi$$_kernel.describeState() + " component named '" + this.$$arezi$$_kernel.getName() + "'" );
}
this.$$arez$$_time.reportObserved();
return super.getTime();
}
@Override
public void setTime(final long time) throws ParseException {
if ( Arez.shouldCheckApiInvariants() ) {
Guards.apiInvariant( () -> null != this.$$arezi$$_kernel && this.$$arezi$$_kernel.isActive(), () -> "Method named 'setTime' invoked on " + this.$$arezi$$_kernel.describeState() + " component named '" + this.$$arezi$$_kernel.getName() + "'" );
}
this.$$arez$$_time.preReportChanged();
final long $$arezv$$_currentValue = super.getTime();
if ( time != $$arezv$$_currentValue ) {
super.setTime( time );
this.$$arez$$_time.reportChanged();
}
}
@Override
public String toString() {
if ( Arez.areNamesEnabled() ) {
return "ArezComponent[" + this.$$arezi$$_kernel.getName() + "]";
} else {
return super.toString();
}
}
}<|fim▁end|> | this.$$arezi$$_kernel.componentConstructed();
this.$$arezi$$_kernel.componentReady();
} |
<|file_name|>Pinger.java<|end_file_name|><|fim▁begin|>/*
* Copyright 2011 Witoslaw Koczewsi <[email protected]>, Artjom Kochtchi
*
* This program is free software: you can redistribute it and/or modify it under the terms of the GNU Affero
* General Public License as published by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the
* implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU Affero General Public
* License for more details.
*
* You should have received a copy of the GNU General Public License along with this program. If not, see
* <http://www.gnu.org/licenses/>.
*/
package scrum.client.communication;
import ilarkesto.core.logging.Log;
import ilarkesto.core.time.Tm;
import java.util.LinkedList;
import scrum.client.DataTransferObject;
import scrum.client.core.ApplicationStartedEvent;
import scrum.client.core.ApplicationStartedHandler;
import scrum.client.project.Requirement;
import scrum.client.workspace.BlockCollapsedEvent;
import scrum.client.workspace.BlockCollapsedHandler;
import scrum.client.workspace.BlockExpandedEvent;
import scrum.client.workspace.BlockExpandedHandler;
import com.google.gwt.user.client.Timer;
public class Pinger extends GPinger implements ServerDataReceivedHandler, BlockExpandedHandler, BlockCollapsedHandler,
ApplicationStartedHandler {
private static Log log = Log.get(Pinger.class);
public static final int MIN_DELAY = 1000;
public static final int MAX_DELAY = 5000;
private Timer timer;
private int maxDelay = MAX_DELAY;
private long lastDataReceiveTime = Tm.getCurrentTimeMillis();
private LinkedList<Long> pingTimes = new LinkedList<Long>();
private boolean disabled;<|fim▁hole|> @Override
public void onApplicationStarted(ApplicationStartedEvent event) {
timer = new Timer() {
@Override
public void run() {
if (!disabled && !serviceCaller.containsServiceCall(PingServiceCall.class)) {
final long start = Tm.getCurrentTimeMillis();
new PingServiceCall().execute(new Runnable() {
@Override
public void run() {
long time = Tm.getCurrentTimeMillis() - start;
pingTimes.add(time);
if (pingTimes.size() > 10) pingTimes.removeFirst();
}
});
}
reschedule();
}
};
reschedule();
}
public void setDisabled(boolean disabled) {
this.disabled = disabled;
}
public boolean isDisabled() {
return disabled;
}
public void shutdown() {
log.info("Shutting down");
if (timer == null) return;
timer.cancel();
timer = null;
}
@Override
public void onServerDataReceived(ServerDataReceivedEvent event) {
DataTransferObject data = event.getData();
if (data.containsEntities()) {
lastDataReceiveTime = Tm.getCurrentTimeMillis();
reschedule();
}
}
@Override
public void onBlockCollapsed(BlockCollapsedEvent event) {
deactivatePowerPolling();
}
@Override
public void onBlockExpanded(BlockExpandedEvent event) {
Object object = event.getObject();
if (object instanceof Requirement) {
Requirement requirement = (Requirement) object;
if (requirement.isWorkEstimationVotingActive()) activatePowerPolling();
}
}
public void reschedule() {
if (timer == null) return;
long idle = Tm.getCurrentTimeMillis() - lastDataReceiveTime;
idle = (int) (idle * 0.15);
if (idle < MIN_DELAY) idle = MIN_DELAY;
if (idle > maxDelay) idle = maxDelay;
timer.scheduleRepeating((int) idle);
}
private void activatePowerPolling() {
maxDelay = MIN_DELAY;
log.debug("PowerPolling activated");
}
private void deactivatePowerPolling() {
if (maxDelay == MAX_DELAY) return;
maxDelay = MAX_DELAY;
lastDataReceiveTime = Tm.getCurrentTimeMillis();
log.debug("PowerPolling deactivated");
}
public Long getAvaragePingTime() {
if (pingTimes.isEmpty()) return null;
long sum = 0;
for (Long time : pingTimes) {
sum += time;
}
return sum / pingTimes.size();
}
public String getAvaragePingTimeMessage() {
Long time = getAvaragePingTime();
if (time == null) return null;
return "Current response time: " + time + " ms.";
}
}<|fim▁end|> | |
<|file_name|>NodalMutationTest.java<|end_file_name|><|fim▁begin|>/*
Grammatical Evolution in Java
Release: GEVA-v2.0.zip
Copyright (C) 2008 Michael O'Neill, Erik Hemberg, Anthony Brabazon, Conor Gilligan
Contributors Patrick Middleburgh, Eliott Bartley, Jonathan Hugosson, Jeff Wrigh
Separate licences for asm, bsf, antlr, groovy, jscheme, commons-logging, jsci is included in the lib folder.
Separate licence for rieps is included in src/com folder.
This licence refers to GEVA-v2.0.
This software is distributed under the terms of the GNU General Public License.
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
/>.
*/
/*
* To change this template, choose Tools | Templates
* and open the template in the editor.
*/
package Operator.Operations.ContextSensitiveOperations;
import Helpers.GrammarCreator;
import Helpers.IndividualMaker;
import Helpers.JUnitHelper;
import Individuals.GEChromosome;
import Individuals.GEIndividual;
import Individuals.GEIndividualTest;
import Mapper.ContextualDerivationTree;
import Util.Constants;
import Util.GenotypeHelper;
import Util.Random.MersenneTwisterFast;
import java.util.ArrayList;
import java.util.Properties;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
import static org.junit.Assert.*;
/**
*
* @author jbyrne
*/
public class NodalMutationTest {
Properties p;
Properties p2;
public NodalMutationTest() {
p = GrammarCreator.getProperties();
p2 = GrammarCreator.getProperties();
p.setProperty(Constants.MAX_WRAPS,"0");
p.setProperty(Constants.DERIVATION_TREE,"Mapper.ContextualDerivationTree");
p2.setProperty(Constants.MAX_WRAPS,"0");
p2.setProperty(Constants.DERIVATION_TREE,"Mapper.ContextualDerivationTree");
String grammar_file = GrammarCreator.getGrammarFile("test_gec.bnf");
p2.setProperty("grammar_file", grammar_file);
}
@BeforeClass
public static void setUpClass() throws Exception {
}
@AfterClass
public static void tearDownClass() throws Exception {
}
@Before
public void setUp() {
}
@After
public void tearDown() {
}
/**
* Test of doOperation method, of class NodalMutation.
* create an instance, mutate it, see if its okay
*/
@Test
public void testDoOperation_Individual() {
System.out.println("Nodal muation doOperation");
//Integer.MAX_VALUE
GEIndividual operand = IndividualMaker.makeIndividual(p);
int[] chromosome = {0,1,2};
int[] expected = {0,1,111352301};
GEChromosome geChromosome = (GEChromosome)operand.getGenotype().get(0);
geChromosome.setAll(chromosome);
NodalMutation instance = new NodalMutation(0.5, new MersenneTwisterFast(2));
instance.doOperation(operand);
JUnitHelper.checkArrays(expected, geChromosome.data);
GEIndividualTest.testInvalidated(operand);
//test to make sure its invalidated
geChromosome = null;
try {
instance.doOperation(operand);
} catch(NullPointerException e) {
assertTrue(true);
}
GEIndividualTest.testInvalidated(operand);
}
/**<|fim▁hole|> public void testDoOperation_codonList() {
System.out.println("Nodal mutation codonlist");
//Integer.MAX_VALUE
GEIndividual operand = IndividualMaker.makeIndividual(p);
int[] chromosome = {1,1,2,1,1,2,4,6,7,8,9,9,9,0,5,4,3};
GEChromosome geChromosome = (GEChromosome)operand.getGenotype().get(0);
geChromosome.setAll(chromosome);
ContextualDerivationTree tree = (ContextualDerivationTree) GenotypeHelper.buildDerivationTree(operand);
System.out.println(tree.toString());
ArrayList<Integer> expected = tree.getNodeCodonList();
NodalMutation instance = new NodalMutation(0.5, new MersenneTwisterFast(2));
instance.doOperation(operand);
tree = (ContextualDerivationTree) GenotypeHelper.buildDerivationTree(operand);
ArrayList<Integer> result = tree.getNodeCodonList();;
System.out.println("expected"+expected.toString());
System.out.println("result"+result.toString());
System.out.println(tree.toString());
JUnitHelper.checkArrays(expected, result);
GEIndividualTest.testInvalidated(operand);
}
//this tests that it will mutate gecodonvalues
@Test
public void testDoOperation_GECodonValue() {
GEIndividual operand = IndividualMaker.makeIndividual(p2);
int[] chromosome = {1,2,1,1,2,2,0,0};
int[] expected = {1,2,1937831252,1,2,1748719057,0,0,};
GEChromosome geChromosome = (GEChromosome)operand.getGenotype().get(0);
geChromosome.setAll(chromosome);
System.out.println("Operand:"+operand);
ContextualDerivationTree tree = (ContextualDerivationTree) GenotypeHelper.buildDerivationTree(operand);
System.out.println("BEFORE "+operand.getGenotype());
//FIXME Erik Commenting out string because it threw null pointer and I did not know why. And it did not seem to matter to the test what was printed??
//System.out.println(tree.toString());
NodalMutation instance = new NodalMutation(1.0, new MersenneTwisterFast(0));
instance.doOperation(operand);
tree = (ContextualDerivationTree) GenotypeHelper.buildDerivationTree(operand);
System.out.println("AFTER "+operand.getGenotype());
// System.out.println(tree.toString());
//JUnitHelper.checkArrays(expected, geChromosome.data);
//GEIndividualTest.testInvalidated(operand);
}
}<|fim▁end|> | * Test of doOperation method, of class NodalMutation.
* create an instance, mutate it, see if its okay
*/
@Test |
<|file_name|>testutil.py<|end_file_name|><|fim▁begin|>from __future__ import absolute_import
import os
import socket
import random
import string
import time
import uuid
import pytest
from . import unittest
from kafka import SimpleClient, create_message
from kafka.client_async import KafkaClient
from kafka.errors import (
LeaderNotAvailableError, KafkaTimeoutError, InvalidTopicError,
NotLeaderForPartitionError, UnknownTopicOrPartitionError,
FailedPayloadsError
)
from kafka.structs import OffsetRequestPayload, ProduceRequestPayload
#from test.fixtures import random_string, version_str_to_list, version as kafka_version #pylint: disable=wrong-import-order
def random_string(length):
return "".join(random.choice(string.ascii_letters) for i in range(length))
def env_kafka_version():
"""Return the Kafka version set in the OS environment as a tuple.
Example: '0.8.1.1' --> (0, 8, 1, 1)
"""
if 'KAFKA_VERSION' not in os.environ:
return ()
return tuple(map(int, os.environ['KAFKA_VERSION'].split('.')))
def get_open_port():
sock = socket.socket()
sock.bind(("", 0))
port = sock.getsockname()[1]
sock.close()
return port
_MESSAGES = {}
def msg(message):
"""Format, encode and deduplicate a message
"""
global _MESSAGES #pylint: disable=global-statement
if message not in _MESSAGES:
_MESSAGES[message] = '%s-%s' % (message, str(uuid.uuid4()))
return _MESSAGES[message].encode('utf-8')
def send_messages(client, topic, partition, messages):
"""Send messages to a topic's partition
"""
messages = [create_message(msg(str(m))) for m in messages]
produce = ProduceRequestPayload(topic, partition, messages=messages)
resp, = client.send_produce_request([produce])
assert resp.error == 0
return [x.value for x in messages]
def current_offset(client, topic, partition, kafka_broker=None):
"""Get the current offset of a topic's partition
"""
try:
offsets, = client.send_offset_request([OffsetRequestPayload(topic,
partition, -1, 1)])<|fim▁hole|> except Exception:
# XXX: We've seen some UnknownErrors here and can't debug w/o server logs
if kafka_broker:
kafka_broker.dump_logs()
raise
else:
return offsets.offsets[0]
def assert_message_count(messages, num_messages):
"""Check that we received the expected number of messages with no duplicates."""
# Make sure we got them all
assert len(messages) == num_messages
# Make sure there are no duplicates
# Note: Currently duplicates are identified only using key/value. Other attributes like topic, partition, headers,
# timestamp, etc are ignored... this could be changed if necessary, but will be more tolerant of dupes.
unique_messages = {(m.key, m.value) for m in messages}
assert len(unique_messages) == num_messages
class KafkaIntegrationTestCase(unittest.TestCase):
create_client = True
topic = None
zk = None
server = None
def setUp(self):
super(KafkaIntegrationTestCase, self).setUp()
if not os.environ.get('KAFKA_VERSION'):
self.skipTest('Integration test requires KAFKA_VERSION')
if not self.topic:
topic = "%s-%s" % (self.id()[self.id().rindex(".") + 1:], random_string(10))
self.topic = topic
if self.create_client:
self.client = SimpleClient('%s:%d' % (self.server.host, self.server.port))
self.client_async = KafkaClient(bootstrap_servers='%s:%d' % (self.server.host, self.server.port))
timeout = time.time() + 30
while time.time() < timeout:
try:
self.client.load_metadata_for_topics(self.topic, ignore_leadernotavailable=False)
if self.client.has_metadata_for_topic(topic):
break
except (LeaderNotAvailableError, InvalidTopicError):
time.sleep(1)
else:
raise KafkaTimeoutError('Timeout loading topic metadata!')
# Ensure topic partitions have been created on all brokers to avoid UnknownPartitionErrors
# TODO: It might be a good idea to move this to self.client.ensure_topic_exists
for partition in self.client.get_partition_ids_for_topic(self.topic):
while True:
try:
req = OffsetRequestPayload(self.topic, partition, -1, 100)
self.client.send_offset_request([req])
break
except (NotLeaderForPartitionError, UnknownTopicOrPartitionError, FailedPayloadsError) as e:
if time.time() > timeout:
raise KafkaTimeoutError('Timeout loading topic metadata!')
time.sleep(.1)
self._messages = {}
def tearDown(self):
super(KafkaIntegrationTestCase, self).tearDown()
if not os.environ.get('KAFKA_VERSION'):
return
if self.create_client:
self.client.close()
def current_offset(self, topic, partition):
try:
offsets, = self.client.send_offset_request([OffsetRequestPayload(topic,
partition, -1, 1)])
except Exception:
# XXX: We've seen some UnknownErrors here and can't debug w/o server logs
self.zk.child.dump_logs()
self.server.child.dump_logs()
raise
else:
return offsets.offsets[0]
def msgs(self, iterable):
return [self.msg(x) for x in iterable]
def msg(self, s):
if s not in self._messages:
self._messages[s] = '%s-%s-%s' % (s, self.id(), str(uuid.uuid4()))
return self._messages[s].encode('utf-8')
def key(self, k):
return k.encode('utf-8')
class Timer(object):
def __enter__(self):
self.start = time.time()
return self
def __exit__(self, *args):
self.end = time.time()
self.interval = self.end - self.start<|fim▁end|> | |
<|file_name|>validators.d.ts<|end_file_name|><|fim▁begin|>import { OpaqueToken } from 'angular2/src/core/di';
import * as modelModule from './model';
export declare const NG_VALIDATORS: OpaqueToken;
/**
* Provides a set of validators used by form controls.
*
* # Example
*
* ```<|fim▁hole|> * var loginControl = new Control("", Validators.required)
* ```
*/
export declare class Validators {
static required(control: modelModule.Control): {
[key: string]: boolean;
};
static minLength(minLength: number): Function;
static maxLength(maxLength: number): Function;
static nullValidator(c: any): {
[key: string]: boolean;
};
static compose(validators: Function[]): Function;
static group(group: modelModule.ControlGroup): {
[key: string]: any[];
};
static array(array: modelModule.ControlArray): {
[key: string]: any[];
};
static _mergeErrors(control: modelModule.AbstractControl, res: {
[key: string]: any[];
}): void;
}<|fim▁end|> | |
<|file_name|>client_simple.rs<|end_file_name|><|fim▁begin|>//! An example of usage of the `solicit::client::SimpleClient` API.
//!
//! This is a simple implementation of an HTTP/2 client, built on top of the API of `solicit::http`
//! that performs all IO in the main thread.
extern crate solicit;
use std::env;
use std::str;
use solicit::http::Response;
use solicit::http::client::CleartextConnector;
use solicit::client::SimpleClient;
fn fetch(host: &str, port: u16, paths: &[String]) -> Vec<Response<'static, 'static>> {
let mut client = SimpleClient::with_connector(CleartextConnector::with_port(host, port)).unwrap();
paths.iter().map(|path| client.get(path.as_bytes(), &[]).unwrap()).collect()
}
fn main() {
fn print_usage() {
println!("Usage: client_simple <host>[:<port>] <path> [<path>...]");
println!(
"NOTE: The example does not accept URLs, rather the host name and a list of paths");
}<|fim▁hole|> if host.is_none() || paths.is_empty() {
print_usage();
return;
}
let host = host.unwrap();
// Split off the port, if present
let parts: Vec<_> = host.split(":").collect();
if parts.len() > 2 {
println!("Invalid host!");
print_usage();
return;
}
let (host, port) = if parts.len() == 1 {
(parts[0], 80)
} else {
let port = match str::FromStr::from_str(parts[1]) {
Err(_) => {
println!("Invalid host (invalid port given)");
print_usage();
return;
},
Ok(port) => port,
};
(parts[0], port)
};
let responses = fetch(&host, port, &paths);
for (path, response) in paths.iter().zip(responses) {
println!("Request path: {}", path);
println!(" status == {}", response.status_code().unwrap());
// Dump the headers and the response body to stdout.
// They are returned as raw bytes for the user to do as they please.
// (Note: in general directly decoding assuming a utf8 encoding might not
// always work -- this is meant as a simple example that shows that the
// response is well formed.)
for header in response.headers.iter() {
println!(" {}: {}",
str::from_utf8(header.name()).unwrap(),
str::from_utf8(header.value()).unwrap());
}
println!("");
println!("{}", str::from_utf8(&response.body).unwrap());
}
}<|fim▁end|> |
let host = env::args().nth(1);
let paths: Vec<_> = env::args().skip(2).collect();
|
<|file_name|>test-logger.js<|end_file_name|><|fim▁begin|>#!/usr/bin/env node
var logger = require('../lib/logger')('test-logger');
var config = require('../lib/config');
var count = 1;
<|fim▁hole|>setInterval(function() {
logger.debug(count);
logger.info(count);
count += 1;
}, 1000);<|fim▁end|> | |
<|file_name|>id3v1.rs<|end_file_name|><|fim▁begin|>extern crate byteorder;
use std::io::{self, Read, Write, Seek, SeekFrom};
use num::Bounded;
use std::fmt;
use self::byteorder::{BigEndian, ReadBytesExt};
/// The fields in an ID3v1 tag, including the "1.1" track number field.
#[derive(Copy, Clone)]
#[allow(missing_docs)]
pub enum Fields {
Title,
Artist,
Album,
Year,
Comment,
Track,
Genre,
}
impl Fields {
fn length(&self) -> usize {
LENGTHS[*self as usize] as usize
}
}
const LENGTHS: &'static [i8]=&[30, 30, 30, 4, 30, -1, 1];
const TAG: &'static [u8] = b"TAG";
/// How far from the end of a file to probe for an ID3 tag signature.
pub const TAG_OFFSET: i64 = 128;
const TAGPLUS: &'static [u8] = b"TAG+";
/// How far from the end of a file to probe for an extended ID3 tag signature.
pub const TAGPLUS_OFFSET: i64 = 355;
const XLENGTHS: &'static [i8]=&[60, 60, 60, 30, 6, 6];
/// The fields in an extended ID3v1 tag.
#[derive(Copy, Clone)]
#[allow(missing_docs)]
pub enum XFields {
XTitle,
XArtist,
XAlbum,
Speed,
XGenre,
Start,
End,
}
impl XFields {
fn length(&self) -> usize {
XLENGTHS[*self as usize] as usize
}
}
/// ID3v1's notion of a four-digit year.
#[derive(Debug, Copy, Clone)]
pub struct Year
{
value: u16,
}
impl Year {
fn value(&self) -> u16 {
self.value
}
fn new(year: u16) -> Option<Year> {
#![allow(deprecated)]
let max: Year = Bounded::max_value();
if year > max.value() {
None
} else {
Some(Year {value: year})
}
}
}
impl Bounded for Year {
#![allow(deprecated)]
fn min_value() -> Year {
Year {value: 0}
}
fn max_value() -> Year {
Year {value: 9999}
}
}
/// ID3v1 extended time tags--encoded in the format "mmm:ss", a valid value can be a maximum of 999m99s = 999*60+99 = 60039 seconds.
#[derive(Copy, Clone, Debug)]
pub struct Time
{
value: u16,
}
impl Time {
fn seconds(&self) -> u16 {
self.value
}
fn new(seconds: u16) -> Option<Time> {
#![allow(deprecated)]
let max: Time = Bounded::max_value();
if seconds > max.seconds() {
None
} else {
Some(Time {value: seconds})
}
}
}
impl Bounded for Time {
#![allow(deprecated)]
fn min_value() -> Time {
Time {value: 0}
}
fn max_value() -> Time {
Time {value: 60039}
}
}
impl fmt::Display for Time {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{:03}:{:02}", self.seconds()/60, self.seconds()%60)
}
}
/// Parsed ID3v1 tag metadata.
#[derive(Debug)]
pub struct Tag {
/// The full title (ID3v1 + extension if present).
pub title: Vec<u8>,
/// The full artist (ID3v1 + extension if present).
pub artist: Vec<u8>,
/// The full album (ID3v1 + extension if present).
pub album: Vec<u8>,
/// A 4-digit string, if we are lucky
pub year: Year,
/// A free-form comment.
pub comment: Vec<u8>,
/// Number of the track, 0 if not set. ID3v1.1 data.
pub track: u8,
/// The genre mapping is standardized up to 79, some extensions exist.
/// http://eyed3.nicfit.net/plugins/genres_plugin.html
pub genre: u8,
/// 1 (slow), 2, 3, 4 (fast) or 0 (not set). ID3v1 extended data.
pub speed: u8,
/// Free-form genre string. ID3v1 extended data.
pub genre_str: Vec<u8>,
/// The real start of the track, mmm:ss. ID3v1 extended data.
pub start_time: Time,
/// The real end of the track, mmm:ss. ID3v1 extended data.
pub end_time: Time,
}
fn write_zero_padded<W: Write>(writer: &mut W, data: &[u8], offset: usize, len: usize) -> Result<(), io::Error> {
let start = ::std::cmp::min(offset, data.len());
let actual_len = ::std::cmp::min(offset+len, data.len());
try!(writer.write(&data[start..actual_len]));
for _ in 0..(len-(actual_len-start)) {
try!(writer.write(&[0]));
}
Ok(())
}
impl Tag {
/// Create a new ID3v1 tag with no information.
pub fn new() -> Tag {
Tag {
title: vec![], artist: vec![], album: vec![], year: Year::new(0).unwrap(), comment: vec![], track: 0,
genre: 0, speed: 0, genre_str: vec![], start_time: Time::new(0).unwrap(), end_time: Time::new(0).unwrap()
}
}
/// Returns whether the tag contains information which would be lost if the extended tag were not written.
pub fn has_extended_data(&self) -> bool {
use self::Fields::*;
self.title.len() > Title.length() ||
self.artist.len() > Artist.length() ||
self.album.len() > Album.length() ||
self.speed > 0 ||
self.genre_str.len() > 0 ||
self.start_time.seconds() > 0 ||
self.end_time.seconds() > 0
}
/// Write the simple ID3 tag (128 bytes) into the given writer.
/// If write_track_number is true, the comment field will be truncated to 28 bytes and the removed two bytes will be used for a NUL and the track number.
pub fn write<W: Write>(&self, writer: &mut W, write_track_number: bool) -> Result<(), io::Error> {
use self::Fields::*;
try!(writer.write(TAG));
try!(write_zero_padded(writer, &*self.title, 0, Title.length()));
try!(write_zero_padded(writer, &*self.artist, 0, Artist.length()));
try!(write_zero_padded(writer, &*self.album, 0, Album.length()));
try!(write!(writer,"{:04}", self.year.value()));
if write_track_number {
try!(writer.write(&self.comment[..Comment.length()-2]));
try!(writer.write(&[0]));
try!(writer.write(&[self.track]));
} else {
try!(writer.write(&self.comment[..Comment.length()]));
}
try!(writer.write(&[self.genre]));
Ok(())
}
/// Write the extended portion of an ID3v1 tag (227 bytes) into the given writer.
pub fn write_extended<W: Write>(&self, writer: &mut W) -> Result<(), io::Error> {
use self::Fields::*;
use self::XFields::*;
try!(write_zero_padded(writer, &*self.title, Title.length(), XTitle.length()));
try!(write_zero_padded(writer, &*self.artist, Artist.length(), XArtist.length()));
try!(write_zero_padded(writer, &*self.album, Album.length(), XAlbum.length()));
try!(writer.write(&[self.speed]));
try!(write_zero_padded(writer, &*self.genre_str, 0, XGenre.length()));
try!(write!(writer,"{}", self.start_time));
try!(write!(writer,"{}", self.end_time));
Ok(())
}
}
/// Checks for presence of the signature indicating an ID3v1 tag at the reader's current offset.
/// Consumes 3 bytes from the reader.
#[inline]
pub fn probe_tag<R: Read>(reader: &mut R) -> Result<bool, io::Error> {
let mut x=&mut [0; 3/*TAG.len()*/];
reader.read(x).and(Ok(TAG == x))
}
/// Checks for presence of the signature indicating an ID3v1 extended metadata tag at the reader's current offset.
/// Consumes 4 bytes from the reader.
#[inline]
pub fn probe_xtag<R: Read>(reader: &mut R) -> Result<bool, io::Error> {
let mut x=&mut [0; 4/*TAGPLUS.len()*/];
reader.read(x).and(Ok(TAGPLUS == x))
}
fn parse_year(s: &[u8]) -> Year {
let zero = Year::new(0).unwrap();
match ::std::str::from_utf8(s) {
Ok(st) => {
let mn: Option<u16> = str::parse(st).ok();
let n = mn.unwrap_or(0);
Year::new(n).unwrap_or(zero)
},
Err(_) => zero
}
}
fn parse_time(s: &[u8]) -> Time {
enum State {
Seconds,
Minutes,
LeadingWhitespace,
}
let zero = Time::new(0).unwrap();
let mut mult: u64=1;
let mut seconds: u64=0;
let mut state=State::Seconds;
fn is_digit(s: u8) -> bool {
s >= b'0' && s <= b'9'
}
fn value(s: u8) -> u8 {
s-b'0'
}
for &i in s.iter().rev() {
match state {
State::Seconds =>
if is_digit(i) {
seconds+=mult*value(i) as u64;
mult*=10;
} else if i == b':' {
state=State::Minutes;
mult=60;
} else {
return zero;
},
State::Minutes =>
if is_digit(i) {
seconds+=mult*value(i) as u64;
mult*=60;
} else if (i as char).is_whitespace() {
state=State::LeadingWhitespace;
},
State::LeadingWhitespace =>
if (i as char).is_whitespace() {
continue
} else {
return zero;
},
}
}
if seconds > 65535 {
zero
} else {
Time::new(seconds as u16).unwrap_or(zero)
}
}
/// Read an ID3v1 tag from a reader.
pub fn read_tag<R: Read>(reader: &mut R) -> Result<Option<Tag>, io::Error> {
use self::Fields::*;
let mut tag = Tag::new();
// Try to read ID3v1 metadata.
let has_tag = try!(probe_tag(reader));
if has_tag {
read_all_vec!(reader, tag.title, Title.length());
read_all_vec!(reader, tag.artist, Artist.length());
read_all_vec!(reader, tag.album, Album.length());
let year_str=&mut [0u8; 4]; read_all!(reader, year_str);
tag.year=parse_year(year_str);
read_all_vec!(reader, tag.comment, Comment.length()-2);
let track_guard_byte=try!(reader.read_u8());
if track_guard_byte == 0 {
tag.track=try!(reader.read_u8());
} else {
tag.comment.push(track_guard_byte);
tag.comment.push(try!(reader.read_u8()));
}
tag.genre=try!(reader.read_u8());
Ok(Some(tag))
}
else
{
Ok(None)
}
}
/// Read the extended portion of an extended ID3v1 tag from a reader, combining
/// extended data with a previously-read ID3v1 tag.
///
/// Returns Ok(true) if valid extended data was parsed,
/// Ok(false) if no extended data was found (no header),
/// Err if read errors occurred
pub fn read_xtag<R: Read>(reader: &mut R, tag: &mut Tag) -> Result<bool, io::Error> {
use self::Fields::*;
use self::XFields::*;
// Try to read ID3v1 extended metadata.
let has_xtag = try!(probe_xtag(reader));
if has_xtag {
maybe_read!(reader, tag.title, XTitle.length());
maybe_read!(reader, tag.artist, XArtist.length());
maybe_read!(reader, tag.album, XAlbum.length());
tag.speed = try!(reader.read_u8());
maybe_read!(reader, tag.genre_str, Genre.length());
let mut start_str=vec![]; maybe_read!(reader, start_str, Start.length());
tag.start_time=parse_time(&*start_str);
let mut end_str=vec![]; maybe_read!(reader, end_str, End.length());
tag.end_time=parse_time(&*end_str);
Ok(true)
}
else
{
Ok(false)
}
}
/// Remove trailing zeros from an &[u8].
pub fn truncate_zeros(mut s: &[u8]) -> &[u8] {
while s.len() > 0 && s[s.len()-1] == 0 {
s=&s[..s.len()-1]
}
s
}
/// Read an ID3v1 and any extended tag data, if present, combining extended data
/// with a previously-read ID3v1 tag. If read_extended is false, does not attempt
/// to read or merge in extended data.
///
/// This function seeks to the expected offset (-TAG_OFFSET and -TAGPLUS_OFFSET)
/// relative to the end of the file) before attempting to read tag data.
pub fn read_seek<R: Read + Seek>(reader: &mut R, read_extended: bool) -> Result<Option<Tag>, io::Error> {
try!(reader.seek(SeekFrom::End(-TAG_OFFSET)));
let mut maybe_tag=try!(read_tag(reader));
if read_extended
{
if let Some(ref mut tag) = maybe_tag {
try!(reader.seek(SeekFrom::End(-TAGPLUS_OFFSET)));
try!(read_xtag(reader, tag));
}
}
Ok(maybe_tag)
}
/// Read an ID3v1 and any extended tag data, if present, from a reader,
/// combining extended data with a previously-read ID3v1 tag.
///
/// The reader should start TAGPLUS_OFFSET bytes from the end of the file.
pub fn read<R: Read>(reader: &mut R) -> Result<Option<Tag>, io::Error> {
let mut tagplus_buf = [0u8; TAGPLUS_OFFSET as usize];
read_all!(reader, &mut tagplus_buf);
let mut tag = try!(read_tag(reader));
if let Some(ref mut tag) = tag {
try!(read_xtag(reader, tag));
}
Ok(tag)
}
#[test]
fn smoke_test() {
use std::io::{Seek, SeekFrom};
use std::path::Path;
let mut f=::std::fs::File::open(&Path::new("id3v1.mp3")).ok().expect("could not open `id3v1.mp3`");
f.seek(SeekFrom::End(-TAG_OFFSET)).ok().unwrap();
let mut tag=read_tag(&mut f).ok().expect("error reading tag").expect("no tag in file");
println!("{:?}", tag);
f.seek(SeekFrom::End(-TAGPLUS_OFFSET)).ok().unwrap();
read_xtag(&mut f, &mut tag).ok().unwrap();
println!("{:?}", tag);
}
#[test]
fn test_read() {
let buf_notag = [b'x'; TAG_OFFSET as usize];
let buf_headeronly = [b'T', b'A', b'G'];
let buf_toosmall = [b'T', b'A', b'G', 0, 4, 36];
let tag_notag = read_tag(&mut &buf_notag[..]);
assert!(tag_notag.is_ok());
assert!(tag_notag.unwrap().is_none());
let tag_headeronly = read_tag(&mut &buf_headeronly[..]);
assert!(tag_headeronly.is_err());
let tag_toosmall = read_tag(&mut &buf_toosmall[..]);
assert!(tag_toosmall.is_err());
/* println!("{:?}", tag);
f.seek(SeekFrom::End(-TAGPLUS_OFFSET));
read_xtag(&mut f, &mut tag);<|fim▁hole|> println!("{:?}", tag);*/
}<|fim▁end|> | |
<|file_name|>microbatched_model.py<|end_file_name|><|fim▁begin|>import tensorflow as tf
import numpy as np
from baselines.ppo2.model import Model
class MicrobatchedModel(Model):
"""
Model that does training one microbatch at a time - when gradient computation
on the entire minibatch causes some overflow
"""
def __init__(self, *, policy, ob_space, ac_space, nbatch_act, nbatch_train,
nsteps, ent_coef, vf_coef, max_grad_norm, mpi_rank_weight, comm, microbatch_size):
self.nmicrobatches = nbatch_train // microbatch_size
self.microbatch_size = microbatch_size
assert nbatch_train % microbatch_size == 0, 'microbatch_size ({}) should divide nbatch_train ({}) evenly'.format(microbatch_size, nbatch_train)
super().__init__(<|fim▁hole|> ac_space=ac_space,
nbatch_act=nbatch_act,
nbatch_train=microbatch_size,
nsteps=nsteps,
ent_coef=ent_coef,
vf_coef=vf_coef,
max_grad_norm=max_grad_norm,
mpi_rank_weight=mpi_rank_weight,
comm=comm)
self.grads_ph = [tf.placeholder(dtype=g.dtype, shape=g.shape) for g in self.grads]
grads_ph_and_vars = list(zip(self.grads_ph, self.var))
self._apply_gradients_op = self.trainer.apply_gradients(grads_ph_and_vars)
def train(self, lr, cliprange, obs, returns, masks, actions, values, neglogpacs, states=None):
assert states is None, "microbatches with recurrent models are not supported yet"
# Here we calculate advantage A(s,a) = R + yV(s') - V(s)
# Returns = R + yV(s')
advs = returns - values
# Normalize the advantages
advs = (advs - advs.mean()) / (advs.std() + 1e-8)
# Initialize empty list for per-microbatch stats like pg_loss, vf_loss, entropy, approxkl (whatever is in self.stats_list)
stats_vs = []
for microbatch_idx in range(self.nmicrobatches):
_sli = range(microbatch_idx * self.microbatch_size, (microbatch_idx+1) * self.microbatch_size)
td_map = {
self.train_model.X: obs[_sli],
self.A:actions[_sli],
self.ADV:advs[_sli],
self.R:returns[_sli],
self.CLIPRANGE:cliprange,
self.OLDNEGLOGPAC:neglogpacs[_sli],
self.OLDVPRED:values[_sli]
}
# Compute gradient on a microbatch (note that variables do not change here) ...
grad_v, stats_v = self.sess.run([self.grads, self.stats_list], td_map)
if microbatch_idx == 0:
sum_grad_v = grad_v
else:
# .. and add to the total of the gradients
for i, g in enumerate(grad_v):
sum_grad_v[i] += g
stats_vs.append(stats_v)
feed_dict = {ph: sum_g / self.nmicrobatches for ph, sum_g in zip(self.grads_ph, sum_grad_v)}
feed_dict[self.LR] = lr
# Update variables using average of the gradients
self.sess.run(self._apply_gradients_op, feed_dict)
# Return average of the stats
return np.mean(np.array(stats_vs), axis=0).tolist()<|fim▁end|> | policy=policy,
ob_space=ob_space, |
<|file_name|>interaction.py<|end_file_name|><|fim▁begin|>__author__ = 'rohe0002'
import json
import logging
from urlparse import urlparse
from bs4 import BeautifulSoup
from mechanize import ParseResponseEx
from mechanize._form import ControlNotFoundError, AmbiguityError
from mechanize._form import ListControl
logger = logging.getLogger(__name__)
NO_CTRL = "No submit control with the name='%s' and value='%s' could be found"
class FlowException(Exception):
def __init__(self, function="", content="", url=""):
Exception.__init__(self)
self.function = function
self.content = content
self.url = url
def __str__(self):
return json.dumps(self.__dict__)
class InteractionNeeded(Exception):
pass
def NoneFunc():
return None
class RResponse():
"""
A Response class that behaves in the way that mechanize expects it.
Links to a requests.Response
"""
def __init__(self, resp):
self._resp = resp
self.index = 0
self.text = resp.text
if isinstance(self.text, unicode):
if resp.encoding == "UTF-8":
self.text = self.text.encode("utf-8")
else:
self.text = self.text.encode("latin-1")
self._len = len(self.text)
self.url = str(resp.url)
self.statuscode = resp.status_code
def geturl(self):
return self._resp.url
def __getitem__(self, item):
try:
return getattr(self._resp, item)
except AttributeError:
return getattr(self._resp.headers, item)
def __getattribute__(self, item):
try:
return getattr(self._resp, item)
except AttributeError:
return getattr(self._resp.headers, item)
def read(self, size=0):
"""
Read from the content of the response. The class remembers what has
been read so it's possible to read small consecutive parts of the
content.
:param size: The number of bytes to read
:return: Somewhere between zero and 'size' number of bytes depending
on how much it left in the content buffer to read.
"""
if size:
if self._len < size:
return self.text
else:
if self._len == self.index:
part = None
elif self._len - self.index < size:
part = self.text[self.index:]
self.index = self._len
else:
part = self.text[self.index:self.index + size]
self.index += size
return part
else:
return self.text
class Interaction(object):
def __init__(self, httpc, interactions=None):
self.httpc = httpc
self.interactions = interactions
self.who = "Form process"
def pick_interaction(self, _base="", content="", req=None):
logger.info("pick_interaction baseurl: %s" % _base)
unic = content
if content:
_bs = BeautifulSoup(content)
else:
_bs = None
for interaction in self.interactions:
_match = 0
for attr, val in interaction["matches"].items():
if attr == "url":
logger.info("matching baseurl against: %s" % val)
if val == _base:
_match += 1
elif attr == "title":
logger.info("matching '%s' against title" % val)
if _bs is None:
break
if _bs.title is None:
break
if val in _bs.title.contents:
_match += 1
else:
_c = _bs.title.contents
if isinstance(_c, list) and not isinstance(_c,
basestring):
for _line in _c:
if val in _line:
_match += 1
continue
elif attr == "content":
if unic and val in unic:
_match += 1
elif attr == "class":
if req and val == req:
_match += 1
if _match == len(interaction["matches"]):
logger.info("Matched: %s" % interaction["matches"])
return interaction
raise InteractionNeeded("No interaction matched")
def pick_form(self, response, url=None, **kwargs):
"""
Picks which form in a web-page that should be used
:param response: A HTTP request response. A DResponse instance
:param content: The HTTP response content
:param url: The url the request was sent to
:param kwargs: Extra key word arguments
:return: The picked form or None of no form matched the criteria.
"""
forms = ParseResponseEx(response)
if not forms:
raise FlowException(content=response.text, url=url)
#if len(forms) == 1:
# return forms[0]
#else:
_form = None
# ignore the first form, because I use ParseResponseEx which adds
# one form at the top of the list
forms = forms[1:]
if len(forms) == 1:
_form = forms[0]
else:
if "pick" in kwargs:
_dict = kwargs["pick"]
for form in forms:
if _form:
break
for key, _ava in _dict.items():
if key == "form":
_keys = form.attrs.keys()
for attr, val in _ava.items():
if attr in _keys and val == form.attrs[attr]:
_form = form
elif key == "control":
prop = _ava["id"]
_default = _ava["value"]
try:
orig_val = form[prop]
if isinstance(orig_val, basestring):
if orig_val == _default:
_form = form
elif _default in orig_val:
_form = form
except KeyError:
pass
except ControlNotFoundError:
pass
elif key == "method":
if form.method == _ava:
_form = form
else:
_form = None
if not _form:
break
elif "index" in kwargs:
_form = forms[int(kwargs["index"])]
return _form
def do_click(self, form, **kwargs):
"""
Emulates the user clicking submit on a form.
:param form: The form that should be submitted
:return: What do_request() returns
"""
if "click" in kwargs:
request = None
_name = kwargs["click"]
try:
_ = form.find_control(name=_name)
request = form.click(name=_name)
except AmbiguityError:
# more than one control with that name
_val = kwargs["set"][_name]
_nr = 0
while True:
try:
cntrl = form.find_control(name=_name, nr=_nr)
if cntrl.value == _val:
request = form.click(name=_name, nr=_nr)
break
else:
_nr += 1
except ControlNotFoundError:
raise Exception(NO_CTRL % (_name, _val))
else:
request = form.click()
headers = {}
for key, val in request.unredirected_hdrs.items():
headers[key] = val
url = request._Request__original
if form.method == "POST":
return self.httpc.send(url, "POST", data=request.data,
headers=headers)
else:
return self.httpc.send(url, "GET", headers=headers)
def select_form(self, orig_response, **kwargs):
"""
Pick a form on a web page, possibly enter some information and submit
the form.
<|fim▁hole|> :return: The response do_click() returns
"""
logger.info("select_form")
response = RResponse(orig_response)
try:
_url = response.url
except KeyError:
_url = kwargs["location"]
form = self.pick_form(response, _url, **kwargs)
#form.backwards_compatible = False
if not form:
raise Exception("Can't pick a form !!")
if "set" in kwargs:
for key, val in kwargs["set"].items():
if key.startswith("_"):
continue
if "click" in kwargs and kwargs["click"] == key:
continue
try:
form[key] = val
except ControlNotFoundError:
pass
except TypeError:
cntrl = form.find_control(key)
if isinstance(cntrl, ListControl):
form[key] = [val]
else:
raise
if form.action in kwargs["conv"].my_endpoints():
return {"SAMLResponse": form["SAMLResponse"],
"RelayState": form["RelayState"]}
return self.do_click(form, **kwargs)
#noinspection PyUnusedLocal
def chose(self, orig_response, path, **kwargs):
"""
Sends a HTTP GET to a url given by the present url and the given
relative path.
:param orig_response: The original response
:param content: The content of the response
:param path: The relative path to add to the base URL
:return: The response do_click() returns
"""
if not path.startswith("http"):
try:
_url = orig_response.url
except KeyError:
_url = kwargs["location"]
part = urlparse(_url)
url = "%s://%s%s" % (part[0], part[1], path)
else:
url = path
logger.info("GET %s" % url)
return self.httpc.send(url, "GET")
#return resp, ""
def post_form(self, orig_response, **kwargs):
"""
The same as select_form but with no possibility of change the content
of the form.
:param httpc: A HTTP Client instance
:param orig_response: The original response (as returned by requests)
:param content: The content of the response
:return: The response do_click() returns
"""
response = RResponse(orig_response)
form = self.pick_form(response, **kwargs)
return self.do_click(form, **kwargs)
#noinspection PyUnusedLocal
def parse(self, orig_response, **kwargs):
# content is a form from which I get the SAMLResponse
response = RResponse(orig_response)
form = self.pick_form(response, **kwargs)
#form.backwards_compatible = False
if not form:
raise InteractionNeeded("Can't pick a form !!")
return {"SAMLResponse": form["SAMLResponse"],
"RelayState": form["RelayState"]}
#noinspection PyUnusedLocal
def interaction(self, args):
_type = args["type"]
if _type == "form":
return self.select_form
elif _type == "link":
return self.chose
elif _type == "response":
return self.parse
else:
return NoneFunc
# ========================================================================
class Action(object):
def __init__(self, args):
self.args = args or {}
self.request = None
def update(self, dic):
self.args.update(dic)
#noinspection PyUnusedLocal
def post_op(self, result, conv, args):
pass
def __call__(self, httpc, conv, location, response, content, features):
intact = Interaction(httpc)
function = intact.interaction(self.args)
try:
_args = self.args.copy()
except (KeyError, AttributeError):
_args = {}
_args.update({"location": location, "features": features, "conv": conv})
logger.info("<-- FUNCTION: %s" % function.__name__)
logger.info("<-- ARGS: %s" % _args)
result = function(response, **_args)
self.post_op(result, conv, _args)
return result<|fim▁end|> | :param orig_response: The original response (as returned by requests) |
<|file_name|>handler_console.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
"""
This module implements a console output writer.
"""
import tensorflow as tf
from niftynet.engine.application_variables import CONSOLE
from niftynet.engine.signal import ITER_STARTED, ITER_FINISHED
class ConsoleLogger(object):
"""
This class handles iteration events to print output to the console.
"""
def __init__(self, **_unused):
ITER_STARTED.connect(self.read_console_vars)
ITER_FINISHED.connect(self.print_console_vars)
def read_console_vars(self, sender, **msg):
"""
Event handler to add all console output ops to the iteration message
:param sender: a niftynet.application instance
:param msg: an iteration message instance
:return:
"""
msg['iter_msg'].ops_to_run[CONSOLE] = \
sender.outputs_collector.variables(CONSOLE)
def print_console_vars(self, _sender, **msg):
"""
Printing iteration message with ``tf.logging`` interface.
:param _sender:<|fim▁hole|> :param msg: an iteration message instance
:return:
"""
tf.logging.info(msg['iter_msg'].to_console_string())<|fim▁end|> | |
<|file_name|>Namespace_Base.cpp<|end_file_name|><|fim▁begin|>#include "Namespace_Base.h"
#include <co/Coral.h>
#include <co/IComponent.h>
#include <co/IPort.h>
#include <co/IInterface.h>
namespace co {
//------ co.Namespace has a facet named 'namespace', of type co.INamespace ------//
co::IInterface* Namespace_co_INamespace::getInterface()
{
return co::typeOf<co::INamespace>::get();
}
co::IPort* Namespace_co_INamespace::getFacet()
{
co::IComponent* component = static_cast<co::IComponent*>( co::getType( "co.Namespace" ) );
assert( component );
co::IPort* facet = static_cast<co::IPort*>( component->getMember( "namespace" ) );
assert( facet );
return facet;
}
//------ Namespace_Base ------//
Namespace_Base::Namespace_Base()
{
// empty
}
Namespace_Base::~Namespace_Base()
{
// empty
}
co::IObject* Namespace_Base::getProvider()
{
return this;
}
void Namespace_Base::serviceRetain()
{
incrementRefCount();
}
void Namespace_Base::serviceRelease()
{
decrementRefCount();
}
co::IComponent* Namespace_Base::getComponent()
{
co::IType* type = co::getType( "co.Namespace" );
assert( type->getKind() == co::TK_COMPONENT );
return static_cast<co::IComponent*>( type );
}
co::IService* Namespace_Base::getServiceAt( co::IPort* port )
{
checkValidPort( port );
co::IService* res = NULL;
switch( port->getIndex() )
{
case 0: res = static_cast<co::INamespace*>( this ); break;
default: raiseUnexpectedPortIndex();
}
return res;
}<|fim▁hole|>
void Namespace_Base::setServiceAt( co::IPort* receptacle, co::IService* service )
{
checkValidReceptacle( receptacle );
raiseUnexpectedPortIndex();
CORAL_UNUSED( service );
}
} // namespace co<|fim▁end|> | |
<|file_name|>test_db.py<|end_file_name|><|fim▁begin|># Copyright 2012 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# @author: Ryota MIBU
import contextlib
import random
from neutron.common import constants as q_const
from neutron.openstack.common import uuidutils
from neutron.plugins.nec.common import exceptions as nexc
from neutron.plugins.nec.db import api as ndb
from neutron.plugins.nec.db import models as nmodels # noqa
from neutron.tests.unit.nec import test_nec_plugin
class NECPluginV2DBTestBase(test_nec_plugin.NecPluginV2TestCase):
"""Class conisting of NECPluginV2 DB unit tests."""
def setUp(self):
"""Setup for tests."""
super(NECPluginV2DBTestBase, self).setUp()
self.session = self.context.session
def get_ofc_item_random_params(self):
"""create random parameters for ofc_item test."""
ofc_id = uuidutils.generate_uuid()
neutron_id = uuidutils.generate_uuid()
none = uuidutils.generate_uuid()
return ofc_id, neutron_id, none
@contextlib.contextmanager
def portinfo_random_params(self):
with self.port() as port:
params = {'port_id': port['port']['id'],
'datapath_id': hex(random.randint(0, 0xffffffff)),
'port_no': random.randint(1, 100),
'vlan_id': random.randint(q_const.MIN_VLAN_TAG,
q_const.MAX_VLAN_TAG),
'mac': ':'.join(["%02x" % random.randint(0, 0xff)
for x in range(6)])
}
yield params
class NECPluginV2DBOfcMappingTest(NECPluginV2DBTestBase):
def test_add_ofc_item(self):
"""test add OFC item."""
o, q, n = self.get_ofc_item_random_params()
tenant = ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertEqual(tenant.ofc_id, o)
self.assertEqual(tenant.neutron_id, q)
def test_add_ofc_item_duplicate_entry(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertRaises(nexc.NECDBException,
ndb.add_ofc_item,
self.session, 'ofc_tenant', q, o)
def test_get_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
tenant = ndb.get_ofc_item(self.session, 'ofc_tenant', q)
self.assertEqual(tenant.ofc_id, o)
self.assertEqual(tenant.neutron_id, q)
def test_get_ofc_item_for_nonexisting_entry(self):
self.assertIsNone(
ndb.get_ofc_item(self.session, 'ofc_tenant', 'non-exist-id'))
def test_get_ofc_id(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
tenant_id = ndb.get_ofc_id(self.session, 'ofc_tenant', q)
self.assertEqual(tenant_id, o)
def test_get_ofc_id_for_nonexisting_entry(self):
self.assertRaises(nexc.OFCMappingNotFound,
ndb.get_ofc_id,
self.session, 'ofc_tenant', 'non-exist-id')
def test_exists_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()
self.assertFalse(ndb.exists_ofc_item(self.session, 'ofc_tenant', q))
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertTrue(ndb.exists_ofc_item(self.session, 'ofc_tenant', q))
ndb.del_ofc_item(self.session, 'ofc_tenant', q)
self.assertFalse(ndb.exists_ofc_item(self.session, 'ofc_tenant', q))
def test_find_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()
ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
tenant = ndb.find_ofc_item(self.session, 'ofc_tenant', o)
self.assertEqual(tenant.ofc_id, o)
self.assertEqual(tenant.neutron_id, q)
def test_find_ofc_item_for_nonexisting_entry(self):
self.assertIsNone(
ndb.find_ofc_item(self.session, 'ofc_tenant', 'non-existi-id'))
def test_del_ofc_item(self):
o, q, n = self.get_ofc_item_random_params()<|fim▁hole|> ndb.add_ofc_item(self.session, 'ofc_tenant', q, o)
self.assertTrue(ndb.del_ofc_item(self.session, 'ofc_tenant', q))
self.assertIsNone(ndb.get_ofc_item(self.session, 'ofc_tenant', q))
self.assertIsNone(ndb.find_ofc_item(self.session, 'ofc_tenant', o))
def test_del_ofc_item_for_nonexisting_entry(self):
self.assertFalse(
ndb.del_ofc_item(self.session, 'ofc_tenant', 'non-existi-id'))
class NECPluginV2DBPortInfoTest(NECPluginV2DBTestBase):
def _compare_portinfo(self, portinfo, expected):
self.assertEqual(portinfo.id, expected['port_id'])
self.assertEqual(portinfo.datapath_id, expected['datapath_id'])
self.assertEqual(portinfo.port_no, expected['port_no'])
self.assertEqual(portinfo.vlan_id, expected['vlan_id'])
self.assertEqual(portinfo.mac, expected['mac'])
def _add_portinfo(self, session, params):
return ndb.add_portinfo(session, params['port_id'],
params['datapath_id'], params['port_no'],
params['vlan_id'], params['mac'])
def testd_add_portinfo(self):
"""test add portinfo."""
with self.portinfo_random_params() as params:
portinfo = self._add_portinfo(self.session, params)
self._compare_portinfo(portinfo, params)
exception_raised = False
try:
self._add_portinfo(self.session, params)
except nexc.NECDBException:
exception_raised = True
self.assertTrue(exception_raised)
def teste_get_portinfo(self):
"""test get portinfo."""
with self.portinfo_random_params() as params:
self._add_portinfo(self.session, params)
portinfo = ndb.get_portinfo(self.session, params['port_id'])
self._compare_portinfo(portinfo, params)
nonexist_id = uuidutils.generate_uuid()
portinfo_none = ndb.get_portinfo(self.session, nonexist_id)
self.assertIsNone(portinfo_none)
def testf_del_portinfo(self):
"""test delete portinfo."""
with self.portinfo_random_params() as params:
self._add_portinfo(self.session, params)
portinfo = ndb.get_portinfo(self.session, params['port_id'])
self.assertEqual(portinfo.id, params['port_id'])
ndb.del_portinfo(self.session, params['port_id'])
portinfo_none = ndb.get_portinfo(self.session, params['port_id'])
self.assertIsNone(portinfo_none)<|fim▁end|> | |
<|file_name|>saveAddress.ts<|end_file_name|><|fim▁begin|>import { UpdateUserAddressMutationResponse } from "v2/__generated__/UpdateUserAddressMutation.graphql"
import { CreateUserAddressMutationResponse } from "v2/__generated__/CreateUserAddressMutation.graphql"
import { DeleteUserAddressMutationResponse } from "v2/__generated__/DeleteUserAddressMutation.graphql"
export const saveAddressSuccess: CreateUserAddressMutationResponse = {
createUserAddress: {
userAddressOrErrors: {
internalID: "address-id",
id: "graphql-id",
isDefault: false,
name: "Bob Ross",
addressLine1: "foo",
addressLine2: "bar",
addressLine3: "whatever",
phoneNumber: "111-111-1111",
city: "Brooklyn",
region: "NY",
country: "US",
postalCode: "11111",
},
},
}
export const updateAddressSuccess: UpdateUserAddressMutationResponse = {
updateUserAddress: {
userAddressOrErrors: {
internalID: "address-id",
id: "graphql-id",
name: "Bob Ross",
addressLine1: "1 Main St",
addressLine2: "",
isDefault: false,
phoneNumber: "718-000-0000",
city: "New York",
region: "NY",
postalCode: "10012",
country: "USA",
},<|fim▁hole|>
export const deleteAddressSuccess: DeleteUserAddressMutationResponse = {
deleteUserAddress: {
userAddressOrErrors: {
internalID: "1",
addressLine1: "1 Main St",
addressLine2: "",
city: "New York",
country: "US",
isDefault: false,
name: "Test Name",
phoneNumber: "555-555-5555",
postalCode: "28001",
region: "",
id: "addressID1",
},
},
}
export const updateAddressFailure: UpdateUserAddressMutationResponse = {
updateUserAddress: {
userAddressOrErrors: {
errors: [
{
code: "100",
message: "Invalid address",
},
],
},
},
}<|fim▁end|> | },
} |
<|file_name|>titanic_data_heuristic1.py<|end_file_name|><|fim▁begin|>import numpy
import pandas
import statsmodels.api as sm
'''
In this exercise, we will perform some rudimentary practices similar to those of
an actual data scientist.
Part of a data scientist's job is to use her or his intuition and insight to
write algorithms and heuristics. A data scientist also creates mathematical models
to make predictions based on some attributes from the data that they are examining.
We would like for you to take your knowledge and intuition about the Titanic
and its passengers' attributes to predict whether or not the passengers survived
or perished. You can read more about the Titanic and specifics about this dataset at:
http://en.wikipedia.org/wiki/RMS_Titanic
<|fim▁hole|> and their associated information. More information about the data can be seen at the
link below:
http://www.kaggle.com/c/titanic-gettingStarted/data.
For this exercise, you need to write a simple heuristic that will use
the passengers' gender to predict if that person survived the Titanic disaster.
You prediction should be 78% accurate or higher.
Here's a simple heuristic to start off:
1) If the passenger is female, your heuristic should assume that the
passenger survived.
2) If the passenger is male, you heuristic should
assume that the passenger did not survive.
You can access the gender of a passenger via passenger['Sex'].
If the passenger is male, passenger['Sex'] will return a string "male".
If the passenger is female, passenger['Sex'] will return a string "female".
Write your prediction back into the "predictions" dictionary. The
key of the dictionary should be the passenger's id (which can be accessed
via passenger["PassengerId"]) and the associated value should be 1 if the
passenger survied or 0 otherwise.
For example, if a passenger is predicted to have survived:
passenger_id = passenger['PassengerId']
predictions[passenger_id] = 1
And if a passenger is predicted to have perished in the disaster:
passenger_id = passenger['PassengerId']
predictions[passenger_id] = 0
You can also look at the Titantic data that you will be working with
at the link below:
https://www.dropbox.com/s/r5f9aos8p9ri9sa/titanic_data.csv
'''
def simple_heuristic(file_path):
predictions = {}
df = pandas.read_csv(file_path)
for passenger_index, passenger in df.iterrows():
passenger_id = passenger['PassengerId']
if passenger['Sex'] == 'female':
predictions[passenger_id] = 1
else:
predictions[passenger_id] = 0
#print predictions
return predictions<|fim▁end|> | http://www.kaggle.com/c/titanic-gettingStarted
In this exercise and the following ones, you are given a list of Titantic passengers
|
<|file_name|>oop_utils.rs<|end_file_name|><|fim▁begin|>use super::Universe;
use super::oop::*;
use ast::sexpr::SExpr;
<|fim▁hole|>unsafe fn fmt_oop(oop: Oop, u: &Universe, fmt: &mut Formatter) -> fmt::Result {
if oop == NULL_OOP {
write!(fmt, "<null>")?;
} else if Singleton::is_singleton(oop) {
write!(fmt, "{:?}", Singleton::from_oop(oop).unwrap())?;
} else if u.oop_is_fixnum(oop) {
let i = Fixnum::from_raw(oop);
write!(fmt, "{}", i.value())?;
} else if u.oop_is_pair(oop) {
let mut p = Pair::from_raw(oop);
write!(fmt, "({}", FmtOop(p.car, u))?;
while u.oop_is_pair(p.cdr) {
p = Pair::from_raw(p.cdr);
write!(fmt, " {}", FmtOop(p.car, u))?;
}
if Singleton::is_nil(p.cdr) {
write!(fmt, ")")?;
} else {
write!(fmt, " . {})", FmtOop(p.cdr, u))?;
}
} else if u.oop_is_symbol(oop) {
let s = Symbol::from_raw(oop);
write!(fmt, "{}", s.as_str())?;
} else if u.oop_is_closure(oop) {
let clo = Closure::from_raw(oop);
write!(fmt, "<Closure {} @{:#x}>", clo.info().name(), oop)?;
} else if u.oop_is_closure(oop) {
let mb = MutBox::from_raw(oop);
write!(fmt, "<Box {} @{:#x}>", FmtOop(mb.value(), u), oop)?;
} else if u.oop_is_ooparray(oop) {
let arr = OopArray::from_raw(oop);
write!(fmt, "[")?;
for (i, oop) in arr.content().iter().enumerate() {
if i != 0 {
write!(fmt, ", ")?;
}
fmt_oop(*oop, u, fmt)?;
}
write!(fmt, "]")?;
} else if u.oop_is_i64array(oop) {
let arr = OopArray::from_raw(oop);
write!(fmt, "i64[")?;
for (i, val) in arr.content().iter().enumerate() {
if i != 0 {
write!(fmt, ", ")?;
}
write!(fmt, "{}", val)?;
}
write!(fmt, "]")?;
} else {
write!(fmt, "<UnknownOop {:#x}>", oop)?;
}
Ok(())
}
pub struct FmtOop<'a>(pub Oop, pub &'a Universe);
impl<'a> Display for FmtOop<'a> {
fn fmt(&self, fmt: &mut Formatter) -> fmt::Result {
unsafe { fmt_oop(self.0, self.1, fmt) }
}
}
pub fn oop_to_sexpr(_oop: Handle<Closure>, _u: &Universe) -> SExpr {
panic!("oop_to_sexpr: not implemenetd")
}<|fim▁end|> | use std::fmt::{self, Formatter, Display};
// Format impl
|
<|file_name|>models.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
from django.utils.encoding import python_2_unicode_compatible
"""
Model for testing arithmetic expressions.
"""
from django.db import models<|fim▁hole|>class Number(models.Model):
integer = models.BigIntegerField(db_column='the_integer')
float = models.FloatField(null=True, db_column='the_float')
def __str__(self):
return '%i, %.3f' % (self.integer, self.float)
class Experiment(models.Model):
name = models.CharField(max_length=24)
assigned = models.DateField()
completed = models.DateField()
start = models.DateTimeField()
end = models.DateTimeField()
class Meta:
ordering = ('name',)
def duration(self):
return self.end - self.start<|fim▁end|> |
@python_2_unicode_compatible |
<|file_name|>previous_elections_spain_analysis.py<|end_file_name|><|fim▁begin|>import numpy as np
import pandas as pd
from ElectionsTools.Seats_assignation import DHondt_assignation
from previous_elections_spain_parser import *
import os
pathfiles = '../data/spain_previous_elections_results/provincia/'
pathfiles = '/'.join(os.path.realpath(__file__).split('/')[:-1]+[pathfiles])
fles = [pathfiles+'PROV_02_197706_1.xlsx',
pathfiles+'PROV_02_197903_1.xlsx',
pathfiles+'PROV_02_198210_1.xlsx',
pathfiles+'PROV_02_198606_1.xlsx',
pathfiles+'PROV_02_198910_1.xlsx',
pathfiles+'PROV_02_199306_1.xlsx',
pathfiles+'PROV_02_199603_1.xlsx',
pathfiles+'PROV_02_200003_1.xlsx',
pathfiles+'PROV_02_200403_1.xlsx',
pathfiles+'PROV_02_200803_1.xlsx',
pathfiles+'PROV_02_201111_1.xlsx']
years = [1977, 1979, 1982, 1986, 1989, 1993, 1996, 2000, 2004, 2008, 2011]
def compute_diputes_DHont(filename):
## 1. Parse
circ, parties, votes, diputes = parse_data_elecciones_esp(filename)
circ_com, votes_com, dips_com = collapse_by_col(circ, votes, diputes, 0)
circ_sp, votes_sp, dips_sp = collapse_by_col(circ, votes, diputes, None)
votes_sp = votes_sp.reshape(1,len(parties))
## 2. Assignation objects
assign = DHondt_assignation(diputes.sum(1))
assign1 = DHondt_assignation(dips_com.sum(1))
assign2 = DHondt_assignation(np.array([dips_sp.sum(0)]))
## 3. Compute assignations
d, price = assign.assignation(pd.DataFrame(votes, columns=parties))
d1, price1 = assign1.assignation(pd.DataFrame(votes_com, columns=parties))
d2, price2 = assign2.assignation(pd.DataFrame(votes_sp, columns=parties))
return d, d1, d2, parties
def prepare2export(d, d1, d2, parties):
logi = np.logical_or(np.logical_or(d.sum(0)>0, d1.sum(0)>0), d2.sum(0)>0)
parties = [parties[i] for i in np.where(logi)[0]]
d, d1, d2 = d[:, logi].sum(0), d1[:, logi].sum(0), d2[:, logi].sum(0)
return d, d1, d2, parties
def compute_all_year(year):<|fim▁hole|> return exp_d, exp_d1, exp_d2, exp_parties
def compute_table_all_years(year):
d1, d2, d3, cols = compute_all_year(year)
d1, d2, d3 = pd.DataFrame(d1), pd.DataFrame(d2), pd.DataFrame(d3)
ind = ['Dhont_estado', 'Dhont_comunidad', 'Dhont_provincia']
exp = pd.concat([d1.T, d2.T, d3.T], axis=0)
exp.columns = cols
exp.index = ind
return exp<|fim▁end|> | filename = fles[years.index(year)]
d, d1, d2, parties = compute_diputes_DHont(filename)
exp_d, exp_d1, exp_d2, exp_parties = prepare2export(d, d1, d2, parties) |
<|file_name|>canvas.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use cssparser::RGBA;
use euclid::{Point2D, Rect, Size2D, Transform2D};
use ipc_channel::ipc::{IpcBytesReceiver, IpcBytesSender, IpcSender};
use serde_bytes::ByteBuf;
use std::default::Default;
use std::str::FromStr;
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum FillRule {
Nonzero,
Evenodd,
}
#[derive(Clone, Copy, Debug, Deserialize, Eq, Hash, MallocSizeOf, PartialEq, Serialize)]
pub struct CanvasId(pub u64);
#[derive(Deserialize, Serialize)]
pub enum CanvasMsg {
Canvas2d(Canvas2dMsg, CanvasId),
Create(
IpcSender<CanvasId>,
Size2D<u32>,
webrender_api::RenderApiSender,
bool,
),
FromLayout(FromLayoutMsg, CanvasId),
FromScript(FromScriptMsg, CanvasId),
Recreate(Size2D<u32>, CanvasId),
Close(CanvasId),
Exit,
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct CanvasImageData {
pub image_key: webrender_api::ImageKey,
}
#[derive(Debug, Deserialize, Serialize)]
pub enum Canvas2dMsg {
Arc(Point2D<f32>, f32, f32, f32, bool),
ArcTo(Point2D<f32>, Point2D<f32>, f32),
DrawImage(Option<ByteBuf>, Size2D<f64>, Rect<f64>, Rect<f64>, bool),
DrawImageInOther(CanvasId, Size2D<f64>, Rect<f64>, Rect<f64>, bool),
BeginPath,
BezierCurveTo(Point2D<f32>, Point2D<f32>, Point2D<f32>),
ClearRect(Rect<f32>),
Clip,
ClosePath,
Ellipse(Point2D<f32>, f32, f32, f32, f32, f32, bool),
Fill,
FillText(String, f64, f64, Option<f64>),
FillRect(Rect<f32>),
GetImageData(Rect<u32>, Size2D<u32>, IpcBytesSender),
IsPointInPath(f64, f64, FillRule, IpcSender<bool>),
LineTo(Point2D<f32>),
MoveTo(Point2D<f32>),
PutImageData(Rect<u32>, IpcBytesReceiver),
QuadraticCurveTo(Point2D<f32>, Point2D<f32>),
Rect(Rect<f32>),
RestoreContext,
SaveContext,
StrokeRect(Rect<f32>),
Stroke,
SetFillStyle(FillOrStrokeStyle),
SetStrokeStyle(FillOrStrokeStyle),
SetLineWidth(f32),
SetLineCap(LineCapStyle),
SetLineJoin(LineJoinStyle),
SetMiterLimit(f32),
SetGlobalAlpha(f32),
SetGlobalComposition(CompositionOrBlending),
SetTransform(Transform2D<f32>),
SetShadowOffsetX(f64),
SetShadowOffsetY(f64),
SetShadowBlur(f64),
SetShadowColor(RGBA),
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum FromLayoutMsg {
SendData(IpcSender<CanvasImageData>),
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum FromScriptMsg {
SendPixels(IpcSender<Option<ByteBuf>>),
}
#[derive(Clone, Debug, Deserialize, MallocSizeOf, Serialize)]
pub struct CanvasGradientStop {
pub offset: f64,
pub color: RGBA,
}
#[derive(Clone, Debug, Deserialize, MallocSizeOf, Serialize)]
pub struct LinearGradientStyle {
pub x0: f64,
pub y0: f64,
pub x1: f64,
pub y1: f64,
pub stops: Vec<CanvasGradientStop>,
}
impl LinearGradientStyle {
pub fn new(
x0: f64,
y0: f64,
x1: f64,
y1: f64,
stops: Vec<CanvasGradientStop>,
) -> LinearGradientStyle {
LinearGradientStyle {
x0: x0,
y0: y0,
x1: x1,
y1: y1,
stops: stops,
}
}
}
#[derive(Clone, Debug, Deserialize, MallocSizeOf, Serialize)]
pub struct RadialGradientStyle {
pub x0: f64,
pub y0: f64,
pub r0: f64,
pub x1: f64,
pub y1: f64,
pub r1: f64,
pub stops: Vec<CanvasGradientStop>,
}
impl RadialGradientStyle {
pub fn new(
x0: f64,
y0: f64,
r0: f64,
x1: f64,
y1: f64,
r1: f64,
stops: Vec<CanvasGradientStop>,
) -> RadialGradientStyle {
RadialGradientStyle {
x0: x0,
y0: y0,
r0: r0,
x1: x1,
y1: y1,
r1: r1,
stops: stops,
}
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub struct SurfaceStyle {
pub surface_data: ByteBuf,
pub surface_size: Size2D<u32>,
pub repeat_x: bool,
pub repeat_y: bool,
}
impl SurfaceStyle {
pub fn new(
surface_data: Vec<u8>,
surface_size: Size2D<u32>,
repeat_x: bool,
repeat_y: bool,
) -> Self {
Self {
surface_data: surface_data.into(),
surface_size,<|fim▁hole|> }
}
}
#[derive(Clone, Debug, Deserialize, Serialize)]
pub enum FillOrStrokeStyle {
Color(RGBA),
LinearGradient(LinearGradientStyle),
RadialGradient(RadialGradientStyle),
Surface(SurfaceStyle),
}
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
pub enum LineCapStyle {
Butt = 0,
Round = 1,
Square = 2,
}
impl FromStr for LineCapStyle {
type Err = ();
fn from_str(string: &str) -> Result<LineCapStyle, ()> {
match string {
"butt" => Ok(LineCapStyle::Butt),
"round" => Ok(LineCapStyle::Round),
"square" => Ok(LineCapStyle::Square),
_ => Err(()),
}
}
}
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
pub enum LineJoinStyle {
Round = 0,
Bevel = 1,
Miter = 2,
}
impl FromStr for LineJoinStyle {
type Err = ();
fn from_str(string: &str) -> Result<LineJoinStyle, ()> {
match string {
"round" => Ok(LineJoinStyle::Round),
"bevel" => Ok(LineJoinStyle::Bevel),
"miter" => Ok(LineJoinStyle::Miter),
_ => Err(()),
}
}
}
#[derive(Clone, Copy, Debug, Deserialize, PartialEq, Serialize)]
pub enum RepetitionStyle {
Repeat,
RepeatX,
RepeatY,
NoRepeat,
}
impl FromStr for RepetitionStyle {
type Err = ();
fn from_str(string: &str) -> Result<RepetitionStyle, ()> {
match string {
"repeat" => Ok(RepetitionStyle::Repeat),
"repeat-x" => Ok(RepetitionStyle::RepeatX),
"repeat-y" => Ok(RepetitionStyle::RepeatY),
"no-repeat" => Ok(RepetitionStyle::NoRepeat),
_ => Err(()),
}
}
}
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
pub enum CompositionStyle {
SrcIn,
SrcOut,
SrcOver,
SrcAtop,
DestIn,
DestOut,
DestOver,
DestAtop,
Copy,
Lighter,
Xor,
}
impl FromStr for CompositionStyle {
type Err = ();
fn from_str(string: &str) -> Result<CompositionStyle, ()> {
match string {
"source-in" => Ok(CompositionStyle::SrcIn),
"source-out" => Ok(CompositionStyle::SrcOut),
"source-over" => Ok(CompositionStyle::SrcOver),
"source-atop" => Ok(CompositionStyle::SrcAtop),
"destination-in" => Ok(CompositionStyle::DestIn),
"destination-out" => Ok(CompositionStyle::DestOut),
"destination-over" => Ok(CompositionStyle::DestOver),
"destination-atop" => Ok(CompositionStyle::DestAtop),
"copy" => Ok(CompositionStyle::Copy),
"lighter" => Ok(CompositionStyle::Lighter),
"xor" => Ok(CompositionStyle::Xor),
_ => Err(()),
}
}
}
impl CompositionStyle {
pub fn to_str(&self) -> &str {
match *self {
CompositionStyle::SrcIn => "source-in",
CompositionStyle::SrcOut => "source-out",
CompositionStyle::SrcOver => "source-over",
CompositionStyle::SrcAtop => "source-atop",
CompositionStyle::DestIn => "destination-in",
CompositionStyle::DestOut => "destination-out",
CompositionStyle::DestOver => "destination-over",
CompositionStyle::DestAtop => "destination-atop",
CompositionStyle::Copy => "copy",
CompositionStyle::Lighter => "lighter",
CompositionStyle::Xor => "xor",
}
}
}
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
pub enum BlendingStyle {
Multiply,
Screen,
Overlay,
Darken,
Lighten,
ColorDodge,
ColorBurn,
HardLight,
SoftLight,
Difference,
Exclusion,
Hue,
Saturation,
Color,
Luminosity,
}
impl FromStr for BlendingStyle {
type Err = ();
fn from_str(string: &str) -> Result<BlendingStyle, ()> {
match string {
"multiply" => Ok(BlendingStyle::Multiply),
"screen" => Ok(BlendingStyle::Screen),
"overlay" => Ok(BlendingStyle::Overlay),
"darken" => Ok(BlendingStyle::Darken),
"lighten" => Ok(BlendingStyle::Lighten),
"color-dodge" => Ok(BlendingStyle::ColorDodge),
"color-burn" => Ok(BlendingStyle::ColorBurn),
"hard-light" => Ok(BlendingStyle::HardLight),
"soft-light" => Ok(BlendingStyle::SoftLight),
"difference" => Ok(BlendingStyle::Difference),
"exclusion" => Ok(BlendingStyle::Exclusion),
"hue" => Ok(BlendingStyle::Hue),
"saturation" => Ok(BlendingStyle::Saturation),
"color" => Ok(BlendingStyle::Color),
"luminosity" => Ok(BlendingStyle::Luminosity),
_ => Err(()),
}
}
}
impl BlendingStyle {
pub fn to_str(&self) -> &str {
match *self {
BlendingStyle::Multiply => "multiply",
BlendingStyle::Screen => "screen",
BlendingStyle::Overlay => "overlay",
BlendingStyle::Darken => "darken",
BlendingStyle::Lighten => "lighten",
BlendingStyle::ColorDodge => "color-dodge",
BlendingStyle::ColorBurn => "color-burn",
BlendingStyle::HardLight => "hard-light",
BlendingStyle::SoftLight => "soft-light",
BlendingStyle::Difference => "difference",
BlendingStyle::Exclusion => "exclusion",
BlendingStyle::Hue => "hue",
BlendingStyle::Saturation => "saturation",
BlendingStyle::Color => "color",
BlendingStyle::Luminosity => "luminosity",
}
}
}
#[derive(Clone, Copy, Debug, Deserialize, MallocSizeOf, PartialEq, Serialize)]
pub enum CompositionOrBlending {
Composition(CompositionStyle),
Blending(BlendingStyle),
}
impl Default for CompositionOrBlending {
fn default() -> CompositionOrBlending {
CompositionOrBlending::Composition(CompositionStyle::SrcOver)
}
}
impl FromStr for CompositionOrBlending {
type Err = ();
fn from_str(string: &str) -> Result<CompositionOrBlending, ()> {
if let Ok(op) = CompositionStyle::from_str(string) {
return Ok(CompositionOrBlending::Composition(op));
}
if let Ok(op) = BlendingStyle::from_str(string) {
return Ok(CompositionOrBlending::Blending(op));
}
Err(())
}
}<|fim▁end|> | repeat_x,
repeat_y, |
<|file_name|>MultimapSideInputTest.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*<|fim▁hole|> * http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.beam.fn.harness.state;
import static org.junit.Assert.assertArrayEquals;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.Iterables;
import com.google.protobuf.ByteString;
import java.io.IOException;
import org.apache.beam.model.fnexecution.v1.BeamFnApi.StateKey;
import org.apache.beam.sdk.coders.StringUtf8Coder;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link MultimapSideInput}. */
@RunWith(JUnit4.class)
public class MultimapSideInputTest {
@Test
public void testGet() throws Exception {
FakeBeamFnStateClient fakeBeamFnStateClient = new FakeBeamFnStateClient(ImmutableMap.of(
key("A"), encode("A1", "A2", "A3"),
key("B"), encode("B1", "B2")));
MultimapSideInput<String, String> multimapSideInput = new MultimapSideInput<>(
fakeBeamFnStateClient,
"instructionId",
"ptransformId",
"sideInputId",
ByteString.copyFromUtf8("encodedWindow"),
StringUtf8Coder.of(),
StringUtf8Coder.of());
assertArrayEquals(new String[]{ "A1", "A2", "A3" },
Iterables.toArray(multimapSideInput.get("A"), String.class));
assertArrayEquals(new String[]{ "B1", "B2" },
Iterables.toArray(multimapSideInput.get("B"), String.class));
assertArrayEquals(new String[]{ },
Iterables.toArray(multimapSideInput.get("unknown"), String.class));
}
private StateKey key(String id) throws IOException {
return StateKey.newBuilder().setMultimapSideInput(
StateKey.MultimapSideInput.newBuilder()
.setPtransformId("ptransformId")
.setSideInputId("sideInputId")
.setWindow(ByteString.copyFromUtf8("encodedWindow"))
.setKey(encode(id))).build();
}
private ByteString encode(String ... values) throws IOException {
ByteString.Output out = ByteString.newOutput();
for (String value : values) {
StringUtf8Coder.of().encode(value, out);
}
return out.toByteString();
}
}<|fim▁end|> | |
<|file_name|>xmlhttprequest.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::body::{BodySource, Extractable, ExtractedBody};
use crate::document_loader::DocumentLoader;
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::WindowBinding::WindowMethods;
use crate::dom::bindings::codegen::Bindings::XMLHttpRequestBinding::XMLHttpRequestMethods;
use crate::dom::bindings::codegen::Bindings::XMLHttpRequestBinding::XMLHttpRequestResponseType;
use crate::dom::bindings::codegen::UnionTypes::DocumentOrXMLHttpRequestBodyInit;
use crate::dom::bindings::conversions::ToJSValConvertible;
use crate::dom::bindings::error::{Error, ErrorResult, Fallible};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::{Dom, DomRoot, MutNullableDom};
use crate::dom::bindings::str::{is_token, ByteString, DOMString, USVString};
use crate::dom::blob::{normalize_type_string, Blob};
use crate::dom::document::DocumentSource;
use crate::dom::document::{Document, HasBrowsingContext, IsHTMLDocument};
use crate::dom::event::{Event, EventBubbles, EventCancelable};
use crate::dom::eventtarget::EventTarget;
use crate::dom::globalscope::GlobalScope;
use crate::dom::headers::{extract_mime_type, is_forbidden_header_name};
use crate::dom::node::Node;
use crate::dom::performanceresourcetiming::InitiatorType;
use crate::dom::progressevent::ProgressEvent;
use crate::dom::readablestream::ReadableStream;
use crate::dom::servoparser::ServoParser;
use crate::dom::window::Window;
use crate::dom::workerglobalscope::WorkerGlobalScope;
use crate::dom::xmlhttprequesteventtarget::XMLHttpRequestEventTarget;
use crate::dom::xmlhttprequestupload::XMLHttpRequestUpload;
use crate::fetch::FetchCanceller;
use crate::network_listener::{self, NetworkListener, PreInvoke, ResourceTimingListener};
use crate::script_runtime::JSContext;
use crate::task_source::networking::NetworkingTaskSource;
use crate::task_source::TaskSourceName;
use crate::timers::{OneshotTimerCallback, OneshotTimerHandle};
use dom_struct::dom_struct;
use encoding_rs::{Encoding, UTF_8};
use euclid::Length;
use headers::{ContentLength, ContentType, HeaderMapExt};
use html5ever::serialize;
use html5ever::serialize::SerializeOpts;
use http::header::{self, HeaderMap, HeaderName, HeaderValue};
use http::Method;
use hyper_serde::Serde;
use ipc_channel::ipc;
use ipc_channel::router::ROUTER;
use js::jsapi::JS_ClearPendingException;
use js::jsapi::{Heap, JSObject};
use js::jsval::{JSVal, NullValue, UndefinedValue};
use js::rust::wrappers::JS_ParseJSON;
use js::typedarray::{ArrayBuffer, CreateWith};
use mime::{self, Mime, Name};
use net_traits::request::{CredentialsMode, Destination, Referrer, RequestBuilder, RequestMode};
use net_traits::trim_http_whitespace;
use net_traits::CoreResourceMsg::Fetch;
use net_traits::{FetchChannels, FetchMetadata, FilteredMetadata};
use net_traits::{FetchResponseListener, NetworkError, ReferrerPolicy};
use net_traits::{ResourceFetchTiming, ResourceTimingType};
use script_traits::serializable::BlobImpl;
use script_traits::DocumentActivity;
use servo_atoms::Atom;
use servo_url::ServoUrl;
use std::borrow::ToOwned;
use std::cell::Cell;
use std::cmp;
use std::default::Default;
use std::ptr;
use std::ptr::NonNull;
use std::slice;
use std::str::{self, FromStr};
use std::sync::{Arc, Mutex};
use url::Position;
#[derive(Clone, Copy, Debug, JSTraceable, MallocSizeOf, PartialEq)]
enum XMLHttpRequestState {
Unsent = 0,
Opened = 1,
HeadersReceived = 2,
Loading = 3,
Done = 4,
}
#[derive(Clone, Copy, JSTraceable, MallocSizeOf, PartialEq)]
pub struct GenerationId(u32);
/// Closure of required data for each async network event that comprises the
/// XHR's response.
struct XHRContext {
xhr: TrustedXHRAddress,
gen_id: GenerationId,
sync_status: DomRefCell<Option<ErrorResult>>,
resource_timing: ResourceFetchTiming,
url: ServoUrl,
}
#[derive(Clone)]
pub enum XHRProgress {
/// Notify that headers have been received
HeadersReceived(GenerationId, Option<HeaderMap>, Option<(u16, Vec<u8>)>),
/// Partial progress (after receiving headers), containing portion of the response
Loading(GenerationId, Vec<u8>),
/// Loading is done
Done(GenerationId),
/// There was an error (only Error::Abort, Error::Timeout or Error::Network is used)
Errored(GenerationId, Error),
}
impl XHRProgress {
fn generation_id(&self) -> GenerationId {
match *self {
XHRProgress::HeadersReceived(id, _, _) |
XHRProgress::Loading(id, _) |
XHRProgress::Done(id) |
XHRProgress::Errored(id, _) => id,
}
}
}
#[dom_struct]
pub struct XMLHttpRequest {
eventtarget: XMLHttpRequestEventTarget,
ready_state: Cell<XMLHttpRequestState>,
timeout: Cell<u32>,
with_credentials: Cell<bool>,
upload: Dom<XMLHttpRequestUpload>,
response_url: DomRefCell<String>,
status: Cell<u16>,
status_text: DomRefCell<ByteString>,
response: DomRefCell<Vec<u8>>,
response_type: Cell<XMLHttpRequestResponseType>,
response_xml: MutNullableDom<Document>,
response_blob: MutNullableDom<Blob>,
#[ignore_malloc_size_of = "mozjs"]
response_arraybuffer: Heap<*mut JSObject>,
#[ignore_malloc_size_of = "Defined in rust-mozjs"]
response_json: Heap<JSVal>,
#[ignore_malloc_size_of = "Defined in hyper"]
response_headers: DomRefCell<HeaderMap>,
#[ignore_malloc_size_of = "Defined in hyper"]
override_mime_type: DomRefCell<Option<Mime>>,
override_charset: DomRefCell<Option<&'static Encoding>>,
// Associated concepts<|fim▁hole|> #[ignore_malloc_size_of = "Defined in hyper"]
request_headers: DomRefCell<HeaderMap>,
request_body_len: Cell<usize>,
sync: Cell<bool>,
upload_complete: Cell<bool>,
upload_listener: Cell<bool>,
send_flag: Cell<bool>,
timeout_cancel: DomRefCell<Option<OneshotTimerHandle>>,
fetch_time: Cell<i64>,
generation_id: Cell<GenerationId>,
response_status: Cell<Result<(), ()>>,
referrer: Referrer,
referrer_policy: Option<ReferrerPolicy>,
canceller: DomRefCell<FetchCanceller>,
}
impl XMLHttpRequest {
fn new_inherited(global: &GlobalScope) -> XMLHttpRequest {
//TODO - update this when referrer policy implemented for workers
let referrer_policy = if let Some(window) = global.downcast::<Window>() {
let document = window.Document();
document.get_referrer_policy()
} else {
None
};
XMLHttpRequest {
eventtarget: XMLHttpRequestEventTarget::new_inherited(),
ready_state: Cell::new(XMLHttpRequestState::Unsent),
timeout: Cell::new(0u32),
with_credentials: Cell::new(false),
upload: Dom::from_ref(&*XMLHttpRequestUpload::new(global)),
response_url: DomRefCell::new(String::new()),
status: Cell::new(0),
status_text: DomRefCell::new(ByteString::new(vec![])),
response: DomRefCell::new(vec![]),
response_type: Cell::new(XMLHttpRequestResponseType::_empty),
response_xml: Default::default(),
response_blob: Default::default(),
response_arraybuffer: Heap::default(),
response_json: Heap::default(),
response_headers: DomRefCell::new(HeaderMap::new()),
override_mime_type: DomRefCell::new(None),
override_charset: DomRefCell::new(None),
request_method: DomRefCell::new(Method::GET),
request_url: DomRefCell::new(None),
request_headers: DomRefCell::new(HeaderMap::new()),
request_body_len: Cell::new(0),
sync: Cell::new(false),
upload_complete: Cell::new(false),
upload_listener: Cell::new(false),
send_flag: Cell::new(false),
timeout_cancel: DomRefCell::new(None),
fetch_time: Cell::new(0),
generation_id: Cell::new(GenerationId(0)),
response_status: Cell::new(Ok(())),
referrer: global.get_referrer(),
referrer_policy: referrer_policy,
canceller: DomRefCell::new(Default::default()),
}
}
pub fn new(global: &GlobalScope) -> DomRoot<XMLHttpRequest> {
reflect_dom_object(Box::new(XMLHttpRequest::new_inherited(global)), global)
}
// https://xhr.spec.whatwg.org/#constructors
#[allow(non_snake_case)]
pub fn Constructor(global: &GlobalScope) -> Fallible<DomRoot<XMLHttpRequest>> {
Ok(XMLHttpRequest::new(global))
}
fn sync_in_window(&self) -> bool {
self.sync.get() && self.global().is::<Window>()
}
fn initiate_async_xhr(
context: Arc<Mutex<XHRContext>>,
task_source: NetworkingTaskSource,
global: &GlobalScope,
init: RequestBuilder,
cancellation_chan: ipc::IpcReceiver<()>,
) {
impl FetchResponseListener for XHRContext {
fn process_request_body(&mut self) {
// todo
}
fn process_request_eof(&mut self) {
// todo
}
fn process_response(&mut self, metadata: Result<FetchMetadata, NetworkError>) {
let xhr = self.xhr.root();
let rv = xhr.process_headers_available(self.gen_id, metadata);
if rv.is_err() {
*self.sync_status.borrow_mut() = Some(rv);
}
}
fn process_response_chunk(&mut self, chunk: Vec<u8>) {
self.xhr.root().process_data_available(self.gen_id, chunk);
}
fn process_response_eof(
&mut self,
response: Result<ResourceFetchTiming, NetworkError>,
) {
let rv = self
.xhr
.root()
.process_response_complete(self.gen_id, response.map(|_| ()));
*self.sync_status.borrow_mut() = Some(rv);
}
fn resource_timing_mut(&mut self) -> &mut ResourceFetchTiming {
&mut self.resource_timing
}
fn resource_timing(&self) -> &ResourceFetchTiming {
&self.resource_timing
}
fn submit_resource_timing(&mut self) {
network_listener::submit_timing(self)
}
}
impl ResourceTimingListener for XHRContext {
fn resource_timing_information(&self) -> (InitiatorType, ServoUrl) {
(InitiatorType::XMLHttpRequest, self.url.clone())
}
fn resource_timing_global(&self) -> DomRoot<GlobalScope> {
self.xhr.root().global()
}
}
impl PreInvoke for XHRContext {
fn should_invoke(&self) -> bool {
self.xhr.root().generation_id.get() == self.gen_id
}
}
let (action_sender, action_receiver) = ipc::channel().unwrap();
let listener = NetworkListener {
context: context,
task_source: task_source,
canceller: Some(global.task_canceller(TaskSourceName::Networking)),
};
ROUTER.add_route(
action_receiver.to_opaque(),
Box::new(move |message| {
listener.notify_fetch(message.to().unwrap());
}),
);
global
.core_resource_thread()
.send(Fetch(
init,
FetchChannels::ResponseMsg(action_sender, Some(cancellation_chan)),
))
.unwrap();
}
}
impl XMLHttpRequestMethods for XMLHttpRequest {
// https://xhr.spec.whatwg.org/#handler-xhr-onreadystatechange
event_handler!(
readystatechange,
GetOnreadystatechange,
SetOnreadystatechange
);
// https://xhr.spec.whatwg.org/#dom-xmlhttprequest-readystate
fn ReadyState(&self) -> u16 {
self.ready_state.get() as u16
}
// https://xhr.spec.whatwg.org/#the-open()-method
fn Open(&self, method: ByteString, url: USVString) -> ErrorResult {
// Step 8
self.Open_(method, url, true, None, None)
}
// https://xhr.spec.whatwg.org/#the-open()-method
fn Open_(
&self,
method: ByteString,
url: USVString,
asynch: bool,
username: Option<USVString>,
password: Option<USVString>,
) -> ErrorResult {
// Step 1
if let Some(window) = DomRoot::downcast::<Window>(self.global()) {
if !window.Document().is_fully_active() {
return Err(Error::InvalidState);
}
}
// Step 5
//FIXME(seanmonstar): use a Trie instead?
let maybe_method = method.as_str().and_then(|s| {
// Note: hyper tests against the uppercase versions
// Since we want to pass methods not belonging to the short list above
// without changing capitalization, this will actually sidestep rust-http's type system
// since methods like "patch" or "PaTcH" will be considered extension methods
// despite the there being a rust-http method variant for them
let upper = s.to_ascii_uppercase();
match &*upper {
"DELETE" | "GET" | "HEAD" | "OPTIONS" | "POST" | "PUT" | "CONNECT" | "TRACE" |
"TRACK" => upper.parse().ok(),
_ => s.parse().ok(),
}
});
match maybe_method {
// Step 4
Some(Method::CONNECT) | Some(Method::TRACE) => Err(Error::Security),
Some(ref t) if t.as_str() == "TRACK" => Err(Error::Security),
Some(parsed_method) => {
// Step 3
if !is_token(&method) {
return Err(Error::Syntax);
}
// Step 2
let base = self.global().api_base_url();
// Step 6
let mut parsed_url = match base.join(&url.0) {
Ok(parsed) => parsed,
// Step 7
Err(_) => return Err(Error::Syntax),
};
// Step 9
if parsed_url.host().is_some() {
if let Some(user_str) = username {
parsed_url.set_username(&user_str.0).unwrap();
}
if let Some(pass_str) = password {
parsed_url.set_password(Some(&pass_str.0)).unwrap();
}
}
// Step 10
if !asynch {
// FIXME: This should only happen if the global environment is a document environment
if self.timeout.get() != 0 ||
self.response_type.get() != XMLHttpRequestResponseType::_empty
{
return Err(Error::InvalidAccess);
}
}
// Step 11 - abort existing requests
self.terminate_ongoing_fetch();
// FIXME(#13767): In the WPT test: FileAPI/blob/Blob-XHR-revoke.html,
// the xhr.open(url) is expected to hold a reference to the URL,
// thus renders following revocations invalid. Though we won't
// implement this for now, if ever needed, we should check blob
// scheme and trigger corresponding actions here.
// Step 12
*self.request_method.borrow_mut() = parsed_method;
*self.request_url.borrow_mut() = Some(parsed_url);
self.sync.set(!asynch);
*self.request_headers.borrow_mut() = HeaderMap::new();
self.send_flag.set(false);
self.upload_listener.set(false);
*self.status_text.borrow_mut() = ByteString::new(vec![]);
self.status.set(0);
// Step 13
if self.ready_state.get() != XMLHttpRequestState::Opened {
self.change_ready_state(XMLHttpRequestState::Opened);
}
Ok(())
},
// Step 3
// This includes cases where as_str() returns None, and when is_token() returns false,
// both of which indicate invalid extension method names
_ => Err(Error::Syntax),
}
}
// https://xhr.spec.whatwg.org/#the-setrequestheader()-method
fn SetRequestHeader(&self, name: ByteString, value: ByteString) -> ErrorResult {
// Step 1, 2
if self.ready_state.get() != XMLHttpRequestState::Opened || self.send_flag.get() {
return Err(Error::InvalidState);
}
// Step 3
let value = trim_http_whitespace(&value);
// Step 4
if !is_token(&name) || !is_field_value(&value) {
return Err(Error::Syntax);
}
let name_lower = name.to_lower();
let name_str = match name_lower.as_str() {
Some(s) => {
// Step 5
// Disallowed headers and header prefixes:
// https://fetch.spec.whatwg.org/#forbidden-header-name
if is_forbidden_header_name(s) {
return Ok(());
} else {
s
}
},
None => unreachable!(),
};
debug!(
"SetRequestHeader: name={:?}, value={:?}",
name.as_str(),
str::from_utf8(value).ok()
);
let mut headers = self.request_headers.borrow_mut();
// Step 6
let value = match headers.get(name_str).map(HeaderValue::as_bytes) {
Some(raw) => {
let mut buf = raw.to_vec();
buf.extend_from_slice(b", ");
buf.extend_from_slice(value);
buf
},
None => value.into(),
};
headers.insert(
HeaderName::from_str(name_str).unwrap(),
HeaderValue::from_bytes(&value).unwrap(),
);
Ok(())
}
// https://xhr.spec.whatwg.org/#the-timeout-attribute
fn Timeout(&self) -> u32 {
self.timeout.get()
}
// https://xhr.spec.whatwg.org/#the-timeout-attribute
fn SetTimeout(&self, timeout: u32) -> ErrorResult {
// Step 1
if self.sync_in_window() {
return Err(Error::InvalidAccess);
}
// Step 2
self.timeout.set(timeout);
if self.send_flag.get() {
if timeout == 0 {
self.cancel_timeout();
return Ok(());
}
let progress = time::now().to_timespec().sec - self.fetch_time.get();
if timeout > (progress * 1000) as u32 {
self.set_timeout(timeout - (progress * 1000) as u32);
} else {
// Immediately execute the timeout steps
self.set_timeout(0);
}
}
Ok(())
}
// https://xhr.spec.whatwg.org/#the-withcredentials-attribute
fn WithCredentials(&self) -> bool {
self.with_credentials.get()
}
// https://xhr.spec.whatwg.org/#dom-xmlhttprequest-withcredentials
fn SetWithCredentials(&self, with_credentials: bool) -> ErrorResult {
match self.ready_state.get() {
// Step 1
XMLHttpRequestState::HeadersReceived |
XMLHttpRequestState::Loading |
XMLHttpRequestState::Done => Err(Error::InvalidState),
// Step 2
_ if self.send_flag.get() => Err(Error::InvalidState),
// Step 3
_ => {
self.with_credentials.set(with_credentials);
Ok(())
},
}
}
// https://xhr.spec.whatwg.org/#the-upload-attribute
fn Upload(&self) -> DomRoot<XMLHttpRequestUpload> {
DomRoot::from_ref(&*self.upload)
}
// https://xhr.spec.whatwg.org/#the-send()-method
fn Send(&self, data: Option<DocumentOrXMLHttpRequestBodyInit>) -> ErrorResult {
// Step 1, 2
if self.ready_state.get() != XMLHttpRequestState::Opened || self.send_flag.get() {
return Err(Error::InvalidState);
}
// Step 3
let data = match *self.request_method.borrow() {
Method::GET | Method::HEAD => None,
_ => data,
};
// Step 4 (first half)
let mut extracted_or_serialized = match data {
Some(DocumentOrXMLHttpRequestBodyInit::Document(ref doc)) => {
let bytes = Vec::from(serialize_document(&doc)?.as_ref());
let content_type = if doc.is_html_document() {
"text/html;charset=UTF-8"
} else {
"application/xml;charset=UTF-8"
};
let total_bytes = bytes.len();
let global = self.global();
let stream = ReadableStream::new_from_bytes(&global, bytes);
Some(ExtractedBody {
stream,
total_bytes: Some(total_bytes),
content_type: Some(DOMString::from(content_type)),
source: BodySource::Object,
})
},
Some(DocumentOrXMLHttpRequestBodyInit::Blob(ref b)) => {
let extracted_body = b.extract(&self.global()).expect("Couldn't extract body.");
if !extracted_body.in_memory() && self.sync.get() {
warn!("Sync XHR with not in-memory Blob as body not supported");
None
} else {
Some(extracted_body)
}
},
Some(DocumentOrXMLHttpRequestBodyInit::FormData(ref formdata)) => Some(
formdata
.extract(&self.global())
.expect("Couldn't extract body."),
),
Some(DocumentOrXMLHttpRequestBodyInit::String(ref str)) => {
Some(str.extract(&self.global()).expect("Couldn't extract body."))
},
Some(DocumentOrXMLHttpRequestBodyInit::URLSearchParams(ref urlsp)) => Some(
urlsp
.extract(&self.global())
.expect("Couldn't extract body."),
),
Some(DocumentOrXMLHttpRequestBodyInit::ArrayBuffer(ref typedarray)) => {
let bytes = typedarray.to_vec();
let total_bytes = bytes.len();
let global = self.global();
let stream = ReadableStream::new_from_bytes(&global, bytes);
Some(ExtractedBody {
stream,
total_bytes: Some(total_bytes),
content_type: None,
source: BodySource::Object,
})
},
Some(DocumentOrXMLHttpRequestBodyInit::ArrayBufferView(ref typedarray)) => {
let bytes = typedarray.to_vec();
let total_bytes = bytes.len();
let global = self.global();
let stream = ReadableStream::new_from_bytes(&global, bytes);
Some(ExtractedBody {
stream,
total_bytes: Some(total_bytes),
content_type: None,
source: BodySource::Object,
})
},
None => None,
};
self.request_body_len.set(
extracted_or_serialized
.as_ref()
.map_or(0, |e| e.total_bytes.unwrap_or(0)),
);
// Step 5
// If we dont have data to upload, we dont want to emit events
let has_handlers = self.upload.upcast::<EventTarget>().has_handlers();
self.upload_listener.set(has_handlers && data.is_some());
// todo preserved headers?
// Step 7
self.upload_complete.set(false);
// Step 8
// FIXME handle the 'timed out flag'
// Step 9
self.upload_complete.set(extracted_or_serialized.is_none());
// Step 10
self.send_flag.set(true);
// Step 11
if !self.sync.get() {
// If one of the event handlers below aborts the fetch by calling
// abort or open we will need the current generation id to detect it.
// Substep 1
let gen_id = self.generation_id.get();
self.dispatch_response_progress_event(atom!("loadstart"));
if self.generation_id.get() != gen_id {
return Ok(());
}
// Substep 2
if !self.upload_complete.get() && self.upload_listener.get() {
self.dispatch_upload_progress_event(atom!("loadstart"), Ok(Some(0)));
if self.generation_id.get() != gen_id {
return Ok(());
}
}
}
// Step 6
//TODO - set referrer_policy/referrer_url in request
let credentials_mode = if self.with_credentials.get() {
CredentialsMode::Include
} else {
CredentialsMode::CredentialsSameOrigin
};
let use_url_credentials = if let Some(ref url) = *self.request_url.borrow() {
!url.username().is_empty() || url.password().is_some()
} else {
unreachable!()
};
let content_type = match extracted_or_serialized.as_mut() {
Some(body) => body.content_type.take(),
None => None,
};
let mut request = RequestBuilder::new(
self.request_url.borrow().clone().unwrap(),
self.referrer.clone(),
)
.method(self.request_method.borrow().clone())
.headers((*self.request_headers.borrow()).clone())
.unsafe_request(true)
// XXXManishearth figure out how to avoid this clone
.body(extracted_or_serialized.map(|e| e.into_net_request_body().0))
// XXXManishearth actually "subresource", but it doesn't exist
// https://github.com/whatwg/xhr/issues/71
.destination(Destination::None)
.synchronous(self.sync.get())
.mode(RequestMode::CorsMode)
.use_cors_preflight(self.upload_listener.get())
.credentials_mode(credentials_mode)
.use_url_credentials(use_url_credentials)
.origin(self.global().origin().immutable().clone())
.referrer_policy(self.referrer_policy.clone())
.pipeline_id(Some(self.global().pipeline_id()));
// step 4 (second half)
match content_type {
Some(content_type) => {
let encoding = match data {
Some(DocumentOrXMLHttpRequestBodyInit::String(_)) |
Some(DocumentOrXMLHttpRequestBodyInit::Document(_)) =>
// XHR spec differs from http, and says UTF-8 should be in capitals,
// instead of "utf-8", which is what Hyper defaults to. So not
// using content types provided by Hyper.
{
Some("UTF-8")
},
_ => None,
};
let mut content_type_set = false;
if !request.headers.contains_key(header::CONTENT_TYPE) {
request.headers.insert(
header::CONTENT_TYPE,
HeaderValue::from_str(&content_type).unwrap(),
);
content_type_set = true;
}
if !content_type_set {
let ct = request.headers.typed_get::<ContentType>();
if let Some(ct) = ct {
if let Some(encoding) = encoding {
let mime: Mime = ct.into();
for param in mime.params() {
if param.0 == mime::CHARSET {
if !param.1.as_ref().eq_ignore_ascii_case(encoding) {
let new_params: Vec<(Name, Name)> = mime
.params()
.filter(|p| p.0 != mime::CHARSET)
.map(|p| (p.0, p.1))
.collect();
let new_mime = format!(
"{}/{}; charset={}{}{}",
mime.type_().as_ref(),
mime.subtype().as_ref(),
encoding,
if new_params.is_empty() { "" } else { "; " },
new_params
.iter()
.map(|p| format!("{}={}", p.0, p.1))
.collect::<Vec<String>>()
.join("; ")
);
let new_mime: Mime = new_mime.parse().unwrap();
request.headers.typed_insert(ContentType::from(new_mime))
}
}
}
}
}
}
},
_ => (),
}
self.fetch_time.set(time::now().to_timespec().sec);
let rv = self.fetch(request, &self.global());
// Step 10
if self.sync.get() {
return rv;
}
let timeout = self.timeout.get();
if timeout > 0 {
self.set_timeout(timeout);
}
Ok(())
}
// https://xhr.spec.whatwg.org/#the-abort()-method
fn Abort(&self) {
// Step 1
self.terminate_ongoing_fetch();
// Step 2
let state = self.ready_state.get();
if (state == XMLHttpRequestState::Opened && self.send_flag.get()) ||
state == XMLHttpRequestState::HeadersReceived ||
state == XMLHttpRequestState::Loading
{
let gen_id = self.generation_id.get();
self.process_partial_response(XHRProgress::Errored(gen_id, Error::Abort));
// If open was called in one of the handlers invoked by the
// above call then we should terminate the abort sequence
if self.generation_id.get() != gen_id {
return;
}
}
// Step 3
if self.ready_state.get() == XMLHttpRequestState::Done {
self.change_ready_state(XMLHttpRequestState::Unsent);
self.response_status.set(Err(()));
self.response.borrow_mut().clear();
self.response_headers.borrow_mut().clear();
}
}
// https://xhr.spec.whatwg.org/#the-responseurl-attribute
fn ResponseURL(&self) -> USVString {
USVString(self.response_url.borrow().clone())
}
// https://xhr.spec.whatwg.org/#the-status-attribute
fn Status(&self) -> u16 {
self.status.get()
}
// https://xhr.spec.whatwg.org/#the-statustext-attribute
fn StatusText(&self) -> ByteString {
self.status_text.borrow().clone()
}
// https://xhr.spec.whatwg.org/#the-getresponseheader()-method
fn GetResponseHeader(&self, name: ByteString) -> Option<ByteString> {
let headers = self.filter_response_headers();
let headers = headers.get_all(HeaderName::from_str(&name.as_str()?.to_lowercase()).ok()?);
let mut first = true;
let s = headers.iter().fold(Vec::new(), |mut vec, value| {
if !first {
vec.extend(", ".as_bytes());
}
if let Ok(v) = str::from_utf8(value.as_bytes()).map(|s| s.trim().as_bytes()) {
vec.extend(v);
first = false;
}
vec
});
// There was no header with that name so we never got to change that value
if first {
None
} else {
Some(ByteString::new(s))
}
}
// https://xhr.spec.whatwg.org/#the-getallresponseheaders()-method
fn GetAllResponseHeaders(&self) -> ByteString {
let headers = self.filter_response_headers();
let keys = headers.keys();
let v = keys.fold(Vec::new(), |mut vec, k| {
let values = headers.get_all(k);
vec.extend(k.as_str().as_bytes());
vec.extend(": ".as_bytes());
let mut first = true;
for value in values {
if !first {
vec.extend(", ".as_bytes());
first = false;
}
vec.extend(value.as_bytes());
}
vec.extend("\r\n".as_bytes());
vec
});
ByteString::new(v)
}
// https://xhr.spec.whatwg.org/#the-overridemimetype()-method
fn OverrideMimeType(&self, mime: DOMString) -> ErrorResult {
// Step 1
match self.ready_state.get() {
XMLHttpRequestState::Loading | XMLHttpRequestState::Done => {
return Err(Error::InvalidState);
},
_ => {},
}
// Step 2
let override_mime = mime.parse::<Mime>().map_err(|_| Error::Syntax)?;
// Step 3
let mime_str = override_mime.as_ref();
let mime_parts: Vec<&str> = mime_str.split(";").collect();
let mime_no_params = if mime_parts.len() > 1 {
mime_parts[0].parse().unwrap()
} else {
override_mime.clone()
};
*self.override_mime_type.borrow_mut() = Some(mime_no_params);
// Step 4
let value = override_mime.get_param(mime::CHARSET);
*self.override_charset.borrow_mut() =
value.and_then(|value| Encoding::for_label(value.as_ref().as_bytes()));
Ok(())
}
// https://xhr.spec.whatwg.org/#the-responsetype-attribute
fn ResponseType(&self) -> XMLHttpRequestResponseType {
self.response_type.get()
}
// https://xhr.spec.whatwg.org/#the-responsetype-attribute
fn SetResponseType(&self, response_type: XMLHttpRequestResponseType) -> ErrorResult {
// Step 1
if self.global().is::<WorkerGlobalScope>() &&
response_type == XMLHttpRequestResponseType::Document
{
return Ok(());
}
match self.ready_state.get() {
// Step 2
XMLHttpRequestState::Loading | XMLHttpRequestState::Done => Err(Error::InvalidState),
_ => {
if self.sync_in_window() {
// Step 3
Err(Error::InvalidAccess)
} else {
// Step 4
self.response_type.set(response_type);
Ok(())
}
},
}
}
#[allow(unsafe_code)]
// https://xhr.spec.whatwg.org/#the-response-attribute
fn Response(&self, cx: JSContext) -> JSVal {
rooted!(in(*cx) let mut rval = UndefinedValue());
match self.response_type.get() {
XMLHttpRequestResponseType::_empty | XMLHttpRequestResponseType::Text => unsafe {
let ready_state = self.ready_state.get();
// Step 2
if ready_state == XMLHttpRequestState::Done ||
ready_state == XMLHttpRequestState::Loading
{
self.text_response().to_jsval(*cx, rval.handle_mut());
} else {
// Step 1
"".to_jsval(*cx, rval.handle_mut());
}
},
// Step 1
_ if self.ready_state.get() != XMLHttpRequestState::Done => {
return NullValue();
},
// Step 2
XMLHttpRequestResponseType::Document => unsafe {
self.document_response().to_jsval(*cx, rval.handle_mut());
},
XMLHttpRequestResponseType::Json => unsafe {
self.json_response(cx).to_jsval(*cx, rval.handle_mut());
},
XMLHttpRequestResponseType::Blob => unsafe {
self.blob_response().to_jsval(*cx, rval.handle_mut());
},
XMLHttpRequestResponseType::Arraybuffer => match self.arraybuffer_response(cx) {
Some(js_object) => unsafe { js_object.to_jsval(*cx, rval.handle_mut()) },
None => return NullValue(),
},
}
rval.get()
}
// https://xhr.spec.whatwg.org/#the-responsetext-attribute
fn GetResponseText(&self) -> Fallible<USVString> {
match self.response_type.get() {
XMLHttpRequestResponseType::_empty | XMLHttpRequestResponseType::Text => {
Ok(USVString(String::from(match self.ready_state.get() {
// Step 3
XMLHttpRequestState::Loading | XMLHttpRequestState::Done => {
self.text_response()
},
// Step 2
_ => "".to_owned(),
})))
},
// Step 1
_ => Err(Error::InvalidState),
}
}
// https://xhr.spec.whatwg.org/#the-responsexml-attribute
fn GetResponseXML(&self) -> Fallible<Option<DomRoot<Document>>> {
match self.response_type.get() {
XMLHttpRequestResponseType::_empty | XMLHttpRequestResponseType::Document => {
// Step 3
if let XMLHttpRequestState::Done = self.ready_state.get() {
Ok(self.document_response())
} else {
// Step 2
Ok(None)
}
},
// Step 1
_ => Err(Error::InvalidState),
}
}
}
pub type TrustedXHRAddress = Trusted<XMLHttpRequest>;
impl XMLHttpRequest {
fn change_ready_state(&self, rs: XMLHttpRequestState) {
assert_ne!(self.ready_state.get(), rs);
self.ready_state.set(rs);
if rs != XMLHttpRequestState::Unsent {
let event = Event::new(
&self.global(),
atom!("readystatechange"),
EventBubbles::DoesNotBubble,
EventCancelable::Cancelable,
);
event.fire(self.upcast());
}
}
fn process_headers_available(
&self,
gen_id: GenerationId,
metadata: Result<FetchMetadata, NetworkError>,
) -> Result<(), Error> {
let metadata = match metadata {
Ok(meta) => match meta {
FetchMetadata::Unfiltered(m) => m,
FetchMetadata::Filtered { filtered, .. } => match filtered {
FilteredMetadata::Basic(m) => m,
FilteredMetadata::Cors(m) => m,
FilteredMetadata::Opaque => return Err(Error::Network),
FilteredMetadata::OpaqueRedirect(_) => return Err(Error::Network),
},
},
Err(_) => {
self.process_partial_response(XHRProgress::Errored(gen_id, Error::Network));
return Err(Error::Network);
},
};
*self.response_url.borrow_mut() = metadata.final_url[..Position::AfterQuery].to_owned();
// XXXManishearth Clear cache entries in case of a network error
self.process_partial_response(XHRProgress::HeadersReceived(
gen_id,
metadata.headers.map(Serde::into_inner),
metadata.status,
));
Ok(())
}
fn process_data_available(&self, gen_id: GenerationId, payload: Vec<u8>) {
self.process_partial_response(XHRProgress::Loading(gen_id, payload));
}
fn process_response_complete(
&self,
gen_id: GenerationId,
status: Result<(), NetworkError>,
) -> ErrorResult {
match status {
Ok(()) => {
self.process_partial_response(XHRProgress::Done(gen_id));
Ok(())
},
Err(_) => {
self.process_partial_response(XHRProgress::Errored(gen_id, Error::Network));
Err(Error::Network)
},
}
}
fn process_partial_response(&self, progress: XHRProgress) {
let msg_id = progress.generation_id();
// Aborts processing if abort() or open() was called
// (including from one of the event handlers called below)
macro_rules! return_if_fetch_was_terminated(
() => (
if msg_id != self.generation_id.get() {
return
}
);
);
// Ignore message if it belongs to a terminated fetch
return_if_fetch_was_terminated!();
// Ignore messages coming from previously-errored responses or requests that have timed out
if self.response_status.get().is_err() {
return;
}
match progress {
XHRProgress::HeadersReceived(_, headers, status) => {
assert!(self.ready_state.get() == XMLHttpRequestState::Opened);
// For synchronous requests, this should not fire any events, and just store data
// XXXManishearth Find a way to track partial progress of the send (onprogresss for XHRUpload)
// Part of step 13, send() (processing request end of file)
// Substep 1
self.upload_complete.set(true);
// Substeps 2-4
if !self.sync.get() && self.upload_listener.get() {
self.dispatch_upload_progress_event(atom!("progress"), Ok(None));
return_if_fetch_was_terminated!();
self.dispatch_upload_progress_event(atom!("load"), Ok(None));
return_if_fetch_was_terminated!();
self.dispatch_upload_progress_event(atom!("loadend"), Ok(None));
return_if_fetch_was_terminated!();
}
// Part of step 13, send() (processing response)
// XXXManishearth handle errors, if any (substep 1)
// Substep 2
status.map(|(code, reason)| {
self.status.set(code);
*self.status_text.borrow_mut() = ByteString::new(reason);
});
headers
.as_ref()
.map(|h| *self.response_headers.borrow_mut() = h.clone());
{
let len = headers.and_then(|h| h.typed_get::<ContentLength>());
let mut response = self.response.borrow_mut();
response.clear();
if let Some(len) = len {
// don't attempt to prereserve more than 4 MB of memory,
// to avoid giving servers the ability to DOS the client by
// providing arbitrarily large content-lengths.
//
// this number is arbitrary, it's basically big enough that most
// XHR requests won't hit it, but not so big that it allows for DOS
let size = cmp::min(0b100_0000000000_0000000000, len.0 as usize);
// preallocate the buffer
response.reserve(size);
}
}
// Substep 3
if !self.sync.get() {
self.change_ready_state(XMLHttpRequestState::HeadersReceived);
}
},
XHRProgress::Loading(_, mut partial_response) => {
// For synchronous requests, this should not fire any events, and just store data
// Part of step 11, send() (processing response body)
// XXXManishearth handle errors, if any (substep 2)
self.response.borrow_mut().append(&mut partial_response);
if !self.sync.get() {
if self.ready_state.get() == XMLHttpRequestState::HeadersReceived {
self.ready_state.set(XMLHttpRequestState::Loading);
}
let event = Event::new(
&self.global(),
atom!("readystatechange"),
EventBubbles::DoesNotBubble,
EventCancelable::Cancelable,
);
event.fire(self.upcast());
return_if_fetch_was_terminated!();
self.dispatch_response_progress_event(atom!("progress"));
}
},
XHRProgress::Done(_) => {
assert!(
self.ready_state.get() == XMLHttpRequestState::HeadersReceived ||
self.ready_state.get() == XMLHttpRequestState::Loading ||
self.sync.get()
);
self.cancel_timeout();
self.canceller.borrow_mut().ignore();
// Part of step 11, send() (processing response end of file)
// XXXManishearth handle errors, if any (substep 2)
// Subsubsteps 6-8
self.send_flag.set(false);
self.change_ready_state(XMLHttpRequestState::Done);
return_if_fetch_was_terminated!();
// Subsubsteps 11-12
self.dispatch_response_progress_event(atom!("load"));
return_if_fetch_was_terminated!();
self.dispatch_response_progress_event(atom!("loadend"));
},
XHRProgress::Errored(_, e) => {
self.cancel_timeout();
self.canceller.borrow_mut().ignore();
self.discard_subsequent_responses();
self.send_flag.set(false);
// XXXManishearth set response to NetworkError
self.change_ready_state(XMLHttpRequestState::Done);
return_if_fetch_was_terminated!();
let errormsg = match e {
Error::Abort => "abort",
Error::Timeout => "timeout",
_ => "error",
};
let upload_complete = &self.upload_complete;
if !upload_complete.get() {
upload_complete.set(true);
if self.upload_listener.get() {
self.dispatch_upload_progress_event(Atom::from(errormsg), Err(()));
return_if_fetch_was_terminated!();
self.dispatch_upload_progress_event(atom!("loadend"), Err(()));
return_if_fetch_was_terminated!();
}
}
self.dispatch_response_progress_event(Atom::from(errormsg));
return_if_fetch_was_terminated!();
self.dispatch_response_progress_event(atom!("loadend"));
},
}
}
fn terminate_ongoing_fetch(&self) {
self.canceller.borrow_mut().cancel();
let GenerationId(prev_id) = self.generation_id.get();
self.generation_id.set(GenerationId(prev_id + 1));
self.response_status.set(Ok(()));
}
fn dispatch_progress_event(&self, upload: bool, type_: Atom, loaded: u64, total: Option<u64>) {
let (total_length, length_computable) = if self
.response_headers
.borrow()
.contains_key(header::CONTENT_ENCODING)
{
(0, false)
} else {
(total.unwrap_or(0), total.is_some())
};
let progressevent = ProgressEvent::new(
&self.global(),
type_,
EventBubbles::DoesNotBubble,
EventCancelable::NotCancelable,
length_computable,
loaded,
total_length,
);
let target = if upload {
self.upload.upcast()
} else {
self.upcast()
};
progressevent.upcast::<Event>().fire(target);
}
fn dispatch_upload_progress_event(&self, type_: Atom, partial_load: Result<Option<u64>, ()>) {
// If partial_load is Ok(None), loading has completed and we can just use the value from the request body
// If an error occured, we pass 0 for both loaded and total
let request_body_len = self.request_body_len.get() as u64;
let (loaded, total) = match partial_load {
Ok(l) => match l {
Some(loaded) => (loaded, Some(request_body_len)),
None => (request_body_len, Some(request_body_len)),
},
Err(()) => (0, None),
};
self.dispatch_progress_event(true, type_, loaded, total);
}
fn dispatch_response_progress_event(&self, type_: Atom) {
let len = self.response.borrow().len() as u64;
let total = self
.response_headers
.borrow()
.typed_get::<ContentLength>()
.map(|v| v.0);
self.dispatch_progress_event(false, type_, len, total);
}
fn set_timeout(&self, duration_ms: u32) {
// Sets up the object to timeout in a given number of milliseconds
// This will cancel all previous timeouts
let callback = OneshotTimerCallback::XhrTimeout(XHRTimeoutCallback {
xhr: Trusted::new(self),
generation_id: self.generation_id.get(),
});
let duration = Length::new(duration_ms as u64);
*self.timeout_cancel.borrow_mut() =
Some(self.global().schedule_callback(callback, duration));
}
fn cancel_timeout(&self) {
if let Some(handle) = self.timeout_cancel.borrow_mut().take() {
self.global().unschedule_callback(handle);
}
}
// https://xhr.spec.whatwg.org/#text-response
fn text_response(&self) -> String {
// Step 3, 5
let charset = self.final_charset().unwrap_or(UTF_8);
// TODO: Step 4 - add support for XML encoding guess stuff using XML spec
// According to Simon, decode() should never return an error, so unwrap()ing
// the result should be fine. XXXManishearth have a closer look at this later
// Step 1, 2, 6
let response = self.response.borrow();
let (text, _, _) = charset.decode(&response);
text.into_owned()
}
// https://xhr.spec.whatwg.org/#blob-response
fn blob_response(&self) -> DomRoot<Blob> {
// Step 1
if let Some(response) = self.response_blob.get() {
return response;
}
// Step 2
let mime = self
.final_mime_type()
.as_ref()
.map(|m| normalize_type_string(&m.to_string()))
.unwrap_or("".to_owned());
// Step 3, 4
let bytes = self.response.borrow().to_vec();
let blob = Blob::new(&self.global(), BlobImpl::new_from_bytes(bytes, mime));
self.response_blob.set(Some(&blob));
blob
}
// https://xhr.spec.whatwg.org/#arraybuffer-response
#[allow(unsafe_code)]
fn arraybuffer_response(&self, cx: JSContext) -> Option<NonNull<JSObject>> {
// Step 1
let created = self.response_arraybuffer.get();
if let Some(nonnull) = NonNull::new(created) {
return Some(nonnull);
}
// Step 2
let bytes = self.response.borrow();
rooted!(in(*cx) let mut array_buffer = ptr::null_mut::<JSObject>());
unsafe {
ArrayBuffer::create(*cx, CreateWith::Slice(&bytes), array_buffer.handle_mut())
.ok()
.and_then(|()| {
self.response_arraybuffer.set(array_buffer.get());
Some(NonNull::new_unchecked(array_buffer.get()))
})
}
}
// https://xhr.spec.whatwg.org/#document-response
fn document_response(&self) -> Option<DomRoot<Document>> {
// Caching: if we have existing response xml, redirect it directly
let response = self.response_xml.get();
if response.is_some() {
return response;
}
// Step 1
if self.response_status.get().is_err() {
return None;
}
// Step 2
let mime_type = self.final_mime_type();
// Step 5.3, 7
let charset = self.final_charset().unwrap_or(UTF_8);
let temp_doc: DomRoot<Document>;
match mime_type {
Some(ref mime) if mime.type_() == mime::TEXT && mime.subtype() == mime::HTML => {
// Step 4
if self.response_type.get() == XMLHttpRequestResponseType::_empty {
return None;
} else {
// TODO Step 5.2 "If charset is null, prescan the first 1024 bytes of xhr’s received bytes"
// Step 5
temp_doc = self.document_text_html();
}
},
// Step 7
None => {
temp_doc = self.handle_xml();
// Not sure it the parser should throw an error for this case
// The specification does not indicates this test,
// but for now we check the document has no child nodes
let has_no_child_nodes = temp_doc.upcast::<Node>().children().next().is_none();
if has_no_child_nodes {
return None;
}
},
Some(ref mime)
if (mime.type_() == mime::TEXT && mime.subtype() == mime::XML) ||
(mime.type_() == mime::APPLICATION && mime.subtype() == mime::XML) ||
mime.suffix() == Some(mime::XML) =>
{
temp_doc = self.handle_xml();
// Not sure it the parser should throw an error for this case
// The specification does not indicates this test,
// but for now we check the document has no child nodes
let has_no_child_nodes = temp_doc.upcast::<Node>().children().next().is_none();
if has_no_child_nodes {
return None;
}
},
// Step 3
_ => {
return None;
},
}
// Step 8
temp_doc.set_encoding(charset);
// Step 9 to 11
// Done by handle_text_html and handle_xml
// Step 12
self.response_xml.set(Some(&temp_doc));
return self.response_xml.get();
}
#[allow(unsafe_code)]
// https://xhr.spec.whatwg.org/#json-response
fn json_response(&self, cx: JSContext) -> JSVal {
// Step 1
let response_json = self.response_json.get();
if !response_json.is_null_or_undefined() {
return response_json;
}
// Step 2
let bytes = self.response.borrow();
// Step 3
if bytes.len() == 0 {
return NullValue();
}
// Step 4
fn decode_to_utf16_with_bom_removal(bytes: &[u8], encoding: &'static Encoding) -> Vec<u16> {
let mut decoder = encoding.new_decoder_with_bom_removal();
let capacity = decoder
.max_utf16_buffer_length(bytes.len())
.expect("Overflow");
let mut utf16 = Vec::with_capacity(capacity);
let extra = unsafe { slice::from_raw_parts_mut(utf16.as_mut_ptr(), capacity) };
let last = true;
let (_, read, written, _) = decoder.decode_to_utf16(bytes, extra, last);
assert_eq!(read, bytes.len());
unsafe { utf16.set_len(written) }
utf16
}
// https://xhr.spec.whatwg.org/#json-response refers to
// https://infra.spec.whatwg.org/#parse-json-from-bytes which refers to
// https://encoding.spec.whatwg.org/#utf-8-decode which means
// that the encoding is always UTF-8 and the UTF-8 BOM is removed,
// if present, but UTF-16BE/LE BOM must not be honored.
let json_text = decode_to_utf16_with_bom_removal(&bytes, UTF_8);
// Step 5
rooted!(in(*cx) let mut rval = UndefinedValue());
unsafe {
if !JS_ParseJSON(
*cx,
json_text.as_ptr(),
json_text.len() as u32,
rval.handle_mut(),
) {
JS_ClearPendingException(*cx);
return NullValue();
}
}
// Step 6
self.response_json.set(rval.get());
self.response_json.get()
}
fn document_text_html(&self) -> DomRoot<Document> {
let charset = self.final_charset().unwrap_or(UTF_8);
let wr = self.global();
let response = self.response.borrow();
let (decoded, _, _) = charset.decode(&response);
let document = self.new_doc(IsHTMLDocument::HTMLDocument);
// TODO: Disable scripting while parsing
ServoParser::parse_html_document(&document, Some(DOMString::from(decoded)), wr.get_url());
document
}
fn handle_xml(&self) -> DomRoot<Document> {
let charset = self.final_charset().unwrap_or(UTF_8);
let wr = self.global();
let response = self.response.borrow();
let (decoded, _, _) = charset.decode(&response);
let document = self.new_doc(IsHTMLDocument::NonHTMLDocument);
// TODO: Disable scripting while parsing
ServoParser::parse_xml_document(&document, Some(DOMString::from(decoded)), wr.get_url());
document
}
fn new_doc(&self, is_html_document: IsHTMLDocument) -> DomRoot<Document> {
let wr = self.global();
let win = wr.as_window();
let doc = win.Document();
let docloader = DocumentLoader::new(&*doc.loader());
let base = wr.get_url();
let parsed_url = match base.join(&self.ResponseURL().0) {
Ok(parsed) => Some(parsed),
Err(_) => None, // Step 7
};
let content_type = self.final_mime_type();
Document::new(
win,
HasBrowsingContext::No,
parsed_url,
doc.origin().clone(),
is_html_document,
content_type,
None,
DocumentActivity::Inactive,
DocumentSource::FromParser,
docloader,
None,
None,
Default::default(),
)
}
fn filter_response_headers(&self) -> HeaderMap {
// https://fetch.spec.whatwg.org/#concept-response-header-list
let mut headers = self.response_headers.borrow().clone();
headers.remove(header::SET_COOKIE);
headers.remove(HeaderName::from_static("set-cookie2"));
// XXXManishearth additional CORS filtering goes here
headers
}
fn discard_subsequent_responses(&self) {
self.response_status.set(Err(()));
}
fn fetch(&self, init: RequestBuilder, global: &GlobalScope) -> ErrorResult {
let xhr = Trusted::new(self);
let context = Arc::new(Mutex::new(XHRContext {
xhr: xhr,
gen_id: self.generation_id.get(),
sync_status: DomRefCell::new(None),
resource_timing: ResourceFetchTiming::new(ResourceTimingType::Resource),
url: init.url.clone(),
}));
let (task_source, script_port) = if self.sync.get() {
let (tx, rx) = global.new_script_pair();
(NetworkingTaskSource(tx, global.pipeline_id()), Some(rx))
} else {
(global.networking_task_source(), None)
};
let cancel_receiver = self.canceller.borrow_mut().initialize();
XMLHttpRequest::initiate_async_xhr(
context.clone(),
task_source,
global,
init,
cancel_receiver,
);
if let Some(script_port) = script_port {
loop {
if !global.process_event(script_port.recv().unwrap()) {
// We're exiting.
return Err(Error::Abort);
}
let context = context.lock().unwrap();
let sync_status = context.sync_status.borrow();
if let Some(ref status) = *sync_status {
return status.clone();
}
}
}
Ok(())
}
fn final_charset(&self) -> Option<&'static Encoding> {
if self.override_charset.borrow().is_some() {
self.override_charset.borrow().clone()
} else {
match self.response_headers.borrow().typed_get::<ContentType>() {
Some(ct) => {
let mime: Mime = ct.into();
let value = mime.get_param(mime::CHARSET);
value.and_then(|value| Encoding::for_label(value.as_ref().as_bytes()))
},
None => None,
}
}
}
/// <https://xhr.spec.whatwg.org/#response-mime-type>
fn response_mime_type(&self) -> Option<Mime> {
return extract_mime_type(&self.response_headers.borrow())
.map(|mime_as_bytes| {
String::from_utf8(mime_as_bytes)
.unwrap_or_default()
.parse()
.ok()
})
.flatten()
.or(Some(mime::TEXT_XML));
}
/// <https://xhr.spec.whatwg.org/#final-mime-type>
fn final_mime_type(&self) -> Option<Mime> {
if self.override_mime_type.borrow().is_some() {
self.override_mime_type.borrow().clone()
} else {
return self.response_mime_type();
}
}
}
#[derive(JSTraceable, MallocSizeOf)]
pub struct XHRTimeoutCallback {
#[ignore_malloc_size_of = "Because it is non-owning"]
xhr: Trusted<XMLHttpRequest>,
generation_id: GenerationId,
}
impl XHRTimeoutCallback {
pub fn invoke(self) {
let xhr = self.xhr.root();
if xhr.ready_state.get() != XMLHttpRequestState::Done {
xhr.process_partial_response(XHRProgress::Errored(self.generation_id, Error::Timeout));
}
}
}
fn serialize_document(doc: &Document) -> Fallible<DOMString> {
let mut writer = vec![];
match serialize(&mut writer, &doc.upcast::<Node>(), SerializeOpts::default()) {
Ok(_) => Ok(DOMString::from(String::from_utf8(writer).unwrap())),
Err(_) => Err(Error::InvalidState),
}
}
/// Returns whether `bs` is a `field-value`, as defined by
/// [RFC 2616](http://tools.ietf.org/html/rfc2616#page-32).
pub fn is_field_value(slice: &[u8]) -> bool {
// Classifications of characters necessary for the [CRLF] (SP|HT) rule
#[derive(PartialEq)]
enum PreviousCharacter {
Other,
CR,
LF,
SPHT, // SP or HT
}
let mut prev = PreviousCharacter::Other; // The previous character
slice.iter().all(|&x| {
// http://tools.ietf.org/html/rfc2616#section-2.2
match x {
13 => {
// CR
if prev == PreviousCharacter::Other || prev == PreviousCharacter::SPHT {
prev = PreviousCharacter::CR;
true
} else {
false
}
},
10 => {
// LF
if prev == PreviousCharacter::CR {
prev = PreviousCharacter::LF;
true
} else {
false
}
},
32 => {
// SP
if prev == PreviousCharacter::LF || prev == PreviousCharacter::SPHT {
prev = PreviousCharacter::SPHT;
true
} else if prev == PreviousCharacter::Other {
// Counts as an Other here, since it's not preceded by a CRLF
// SP is not a CTL, so it can be used anywhere
// though if used immediately after a CR the CR is invalid
// We don't change prev since it's already Other
true
} else {
false
}
},
9 => {
// HT
if prev == PreviousCharacter::LF || prev == PreviousCharacter::SPHT {
prev = PreviousCharacter::SPHT;
true
} else {
false
}
},
0..=31 | 127 => false, // CTLs
x if x > 127 => false, // non ASCII
_ if prev == PreviousCharacter::Other || prev == PreviousCharacter::SPHT => {
prev = PreviousCharacter::Other;
true
},
_ => false, // Previous character was a CR/LF but not part of the [CRLF] (SP|HT) rule
}
})
}<|fim▁end|> | #[ignore_malloc_size_of = "Defined in hyper"]
request_method: DomRefCell<Method>,
request_url: DomRefCell<Option<ServoUrl>>, |
<|file_name|>590_remove_brief_status_column.py<|end_file_name|><|fim▁begin|>"""Remove brief status column
Revision ID: 590
Revises: 580
Create Date: 2016-03-03 14:56:59.218753<|fim▁hole|>revision = '590'
down_revision = '580'
from alembic import op
import sqlalchemy as sa
def upgrade():
op.drop_column('briefs', 'status')
def downgrade():
op.add_column('briefs', sa.Column('status', sa.VARCHAR(), autoincrement=False, nullable=True))
op.execute("""
UPDATE briefs SET status = (CASE WHEN published_at is not NULL THEN 'live' ELSE 'draft' END)
""")
op.alter_column('briefs', sa.Column('status', sa.VARCHAR(), nullable=False))<|fim▁end|> |
"""
# revision identifiers, used by Alembic. |
<|file_name|>HelpCommand.java<|end_file_name|><|fim▁begin|>package fr.lteconsulting.pomexplorer.commands;
import fr.lteconsulting.pomexplorer.AppFactory;
import fr.lteconsulting.pomexplorer.Client;
import fr.lteconsulting.pomexplorer.Log;
public class HelpCommand
{<|fim▁hole|> @Help( "gives this message" )
public void main( Client client, Log log )
{
log.html( AppFactory.get().commands().help() );
}
}<|fim▁end|> | |
<|file_name|>str.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
//! The `ByteString` struct.
use chrono::prelude::{Utc, Weekday};
use chrono::{Datelike, TimeZone};
use cssparser::CowRcStr;
use html5ever::{LocalName, Namespace};
use regex::Regex;
use servo_atoms::Atom;
use std::borrow::{Borrow, Cow, ToOwned};
use std::default::Default;
use std::fmt;
use std::hash::{Hash, Hasher};
use std::marker::PhantomData;
use std::ops;
use std::ops::{Deref, DerefMut};
use std::str;
use std::str::FromStr;
/// Encapsulates the IDL `ByteString` type.
#[derive(Clone, Debug, Default, Eq, JSTraceable, MallocSizeOf, PartialEq)]
pub struct ByteString(Vec<u8>);
impl ByteString {
/// Creates a new `ByteString`.
pub fn new(value: Vec<u8>) -> ByteString {
ByteString(value)
}
/// Returns `self` as a string, if it encodes valid UTF-8, and `None`
/// otherwise.
pub fn as_str(&self) -> Option<&str> {
str::from_utf8(&self.0).ok()
}
/// Returns the length.
pub fn len(&self) -> usize {
self.0.len()
}
/// Returns `self` with A–Z replaced by a–z.
pub fn to_lower(&self) -> ByteString {
ByteString::new(self.0.to_ascii_lowercase())
}
}
impl Into<Vec<u8>> for ByteString {
fn into(self) -> Vec<u8> {
self.0
}
}
impl Hash for ByteString {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash(state);
}
}
impl FromStr for ByteString {
type Err = ();
fn from_str(s: &str) -> Result<ByteString, ()> {
Ok(ByteString::new(s.to_owned().into_bytes()))
}
}
impl ops::Deref for ByteString {
type Target = [u8];
fn deref(&self) -> &[u8] {
&self.0
}
}
/// A string that is constructed from a UCS-2 buffer by replacing invalid code
/// points with the replacement character.
#[derive(Clone, Default, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)]
pub struct USVString(pub String);
impl Borrow<str> for USVString {
#[inline]
fn borrow(&self) -> &str {
&self.0
}
}
impl Deref for USVString {
type Target = str;
#[inline]
fn deref(&self) -> &str {
&self.0
}
}
impl DerefMut for USVString {
#[inline]
fn deref_mut(&mut self) -> &mut str {
&mut self.0
}
}
impl AsRef<str> for USVString {
fn as_ref(&self) -> &str {
&self.0
}
}
impl fmt::Display for USVString {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl PartialEq<str> for USVString {
fn eq(&self, other: &str) -> bool {
&**self == other
}
}
impl<'a> PartialEq<&'a str> for USVString {
fn eq(&self, other: &&'a str) -> bool {
&**self == *other
}
}
impl From<String> for USVString {
fn from(contents: String) -> USVString {
USVString(contents)
}
}
/// Returns whether `s` is a `token`, as defined by
/// [RFC 2616](http://tools.ietf.org/html/rfc2616#page-17).
pub fn is_token(s: &[u8]) -> bool {
if s.is_empty() {
return false; // A token must be at least a single character
}
s.iter().all(|&x| {
// http://tools.ietf.org/html/rfc2616#section-2.2
match x {
0..=31 | 127 => false, // CTLs
40 | 41 | 60 | 62 | 64 | 44 | 59 | 58 | 92 | 34 | 47 | 91 | 93 | 63 | 61 | 123 |
125 | 32 => false, // separators
x if x > 127 => false, // non-CHARs
_ => true,
}
})
}
/// A DOMString.
///
/// This type corresponds to the [`DOMString`](idl) type in WebIDL.
///
/// [idl]: https://heycam.github.io/webidl/#idl-DOMString
///
/// Conceptually, a DOMString has the same value space as a JavaScript String,
/// i.e., an array of 16-bit *code units* representing UTF-16, potentially with
/// unpaired surrogates present (also sometimes called WTF-16).
///
/// Currently, this type stores a Rust `String`, in order to avoid issues when
/// integrating with the rest of the Rust ecosystem and even the rest of the
/// browser itself.
///
/// However, Rust `String`s are guaranteed to be valid UTF-8, and as such have
/// a *smaller value space* than WTF-16 (i.e., some JavaScript String values
/// can not be represented as a Rust `String`). This introduces the question of
/// what to do with values being passed from JavaScript to Rust that contain
/// unpaired surrogates.
///
/// The hypothesis is that it does not matter much how exactly those values are
/// transformed, because passing unpaired surrogates into the DOM is very rare.
/// In order to test this hypothesis, Servo will panic when encountering any
/// unpaired surrogates on conversion to `DOMString` by default. (The command
/// line option `-Z replace-surrogates` instead causes Servo to replace the
/// unpaired surrogate by a U+FFFD replacement character.)
///
/// Currently, the lack of crash reports about this issue provides some
/// evidence to support the hypothesis. This evidence will hopefully be used to
/// convince other browser vendors that it would be safe to replace unpaired
/// surrogates at the boundary between JavaScript and native code. (This would
/// unify the `DOMString` and `USVString` types, both in the WebIDL standard
/// and in Servo.)
///
/// This type is currently `!Send`, in order to help with an independent
/// experiment to store `JSString`s rather than Rust `String`s.
#[derive(Clone, Debug, Eq, Hash, MallocSizeOf, Ord, PartialEq, PartialOrd)]
pub struct DOMString(String, PhantomData<*const ()>);
impl DOMString {
/// Creates a new `DOMString`.
pub fn new() -> DOMString {
DOMString(String::new(), PhantomData)
}
/// Creates a new `DOMString` from a `String`.
pub fn from_string(s: String) -> DOMString {
DOMString(s, PhantomData)
}
/// Appends a given string slice onto the end of this String.
pub fn push_str(&mut self, string: &str) {
self.0.push_str(string)
}
/// Clears this `DOMString`, removing all contents.
pub fn clear(&mut self) {
self.0.clear()
}
/// Shortens this String to the specified length.
pub fn truncate(&mut self, new_len: usize) {
self.0.truncate(new_len);
}
/// Removes newline characters according to <https://infra.spec.whatwg.org/#strip-newlines>.
pub fn strip_newlines(&mut self) {
self.0.retain(|c| c != '\r' && c != '\n');
}
/// Removes leading and trailing ASCII whitespaces according to
/// <https://infra.spec.whatwg.org/#strip-leading-and-trailing-ascii-whitespace>.
pub fn strip_leading_and_trailing_ascii_whitespace(&mut self) {
if self.0.len() == 0 {
return;
}
let trailing_whitespace_len = self
.0
.trim_end_matches(|ref c| char::is_ascii_whitespace(c))
.len();
self.0.truncate(trailing_whitespace_len);
if self.0.is_empty() {
return;
}
let first_non_whitespace = self.0.find(|ref c| !char::is_ascii_whitespace(c)).unwrap();
let _ = self.0.replace_range(0..first_non_whitespace, "");
}
/// Validates this `DOMString` is a time string according to
/// <https://html.spec.whatwg.org/multipage/#valid-time-string>.
pub fn is_valid_time_string(&self) -> bool {
enum State {
HourHigh,
HourLow09,
HourLow03,
MinuteColon,
MinuteHigh,
MinuteLow,
SecondColon,
SecondHigh,
SecondLow,
MilliStop,
MilliHigh,
MilliMiddle,
MilliLow,
Done,
Error,
}
let next_state = |valid: bool, next: State| -> State {
if valid {
next
} else {
State::Error
}
};
let state = self.chars().fold(State::HourHigh, |state, c| {
match state {
// Step 1 "HH"
State::HourHigh => match c {
'0' | '1' => State::HourLow09,
'2' => State::HourLow03,
_ => State::Error,
},
State::HourLow09 => next_state(c.is_digit(10), State::MinuteColon),
State::HourLow03 => next_state(c.is_digit(4), State::MinuteColon),
// Step 2 ":"
State::MinuteColon => next_state(c == ':', State::MinuteHigh),
// Step 3 "mm"
State::MinuteHigh => next_state(c.is_digit(6), State::MinuteLow),
State::MinuteLow => next_state(c.is_digit(10), State::SecondColon),
// Step 4.1 ":"
State::SecondColon => next_state(c == ':', State::SecondHigh),
// Step 4.2 "ss"
State::SecondHigh => next_state(c.is_digit(6), State::SecondLow),
State::SecondLow => next_state(c.is_digit(10), State::MilliStop),
// Step 4.3.1 "."
State::MilliStop => next_state(c == '.', State::MilliHigh),
// Step 4.3.2 "SSS"
State::MilliHigh => next_state(c.is_digit(10), State::MilliMiddle),
State::MilliMiddle => next_state(c.is_digit(10), State::MilliLow),
State::MilliLow => next_state(c.is_digit(10), State::Done),
_ => State::Error,
}
});
match state {
State::Done |
// Step 4 (optional)
State::SecondColon |
// Step 4.3 (optional)
State::MilliStop |
// Step 4.3.2 (only 1 digit required)
State::MilliMiddle | State::MilliLow => true,
_ => false
}
}
/// A valid date string should be "YYYY-MM-DD"
/// YYYY must be four or more digits, MM and DD both must be two digits
/// https://html.spec.whatwg.org/multipage/#valid-date-string
pub fn is_valid_date_string(&self) -> bool {
self.parse_date_string().is_ok()
}
/// https://html.spec.whatwg.org/multipage/#parse-a-date-string
pub fn parse_date_string(&self) -> Result<(i32, u32, u32), ()> {
let value = &self.0;
// Step 1, 2, 3
let (year_int, month_int, day_int) = parse_date_component(value)?;
// Step 4
if value.split('-').nth(3).is_some() {
return Err(());
}
// Step 5, 6
Ok((year_int, month_int, day_int))
}
/// https://html.spec.whatwg.org/multipage/#parse-a-time-string
pub fn parse_time_string(&self) -> Result<(u32, u32, f64), ()> {
let value = &self.0;
// Step 1, 2, 3
let (hour_int, minute_int, second_float) = parse_time_component(value)?;
// Step 4
if value.split(':').nth(3).is_some() {
return Err(());
}
// Step 5, 6
Ok((hour_int, minute_int, second_float))
}
/// A valid month string should be "YYYY-MM"
/// YYYY must be four or more digits, MM both must be two digits
/// https://html.spec.whatwg.org/multipage/#valid-month-string
pub fn is_valid_month_string(&self) -> bool {<|fim▁hole|> /// https://html.spec.whatwg.org/multipage/#parse-a-month-string
pub fn parse_month_string(&self) -> Result<(i32, u32), ()> {
let value = &self;
// Step 1, 2, 3
let (year_int, month_int) = parse_month_component(value)?;
// Step 4
if value.split("-").nth(2).is_some() {
return Err(());
}
// Step 5
Ok((year_int, month_int))
}
/// A valid week string should be like {YYYY}-W{WW}, such as "2017-W52"
/// YYYY must be four or more digits, WW both must be two digits
/// https://html.spec.whatwg.org/multipage/#valid-week-string
pub fn is_valid_week_string(&self) -> bool {
self.parse_week_string().is_ok()
}
/// https://html.spec.whatwg.org/multipage/#parse-a-week-string
pub fn parse_week_string(&self) -> Result<(i32, u32), ()> {
let value = &self.0;
// Step 1, 2, 3
let mut iterator = value.split('-');
let year = iterator.next().ok_or(())?;
// Step 4
let year_int = year.parse::<i32>().map_err(|_| ())?;
if year.len() < 4 || year_int == 0 {
return Err(());
}
// Step 5, 6
let week = iterator.next().ok_or(())?;
let (week_first, week_last) = week.split_at(1);
if week_first != "W" {
return Err(());
}
// Step 7
let week_int = week_last.parse::<u32>().map_err(|_| ())?;
if week_last.len() != 2 {
return Err(());
}
// Step 8
let max_week = max_week_in_year(year_int);
// Step 9
if week_int < 1 || week_int > max_week {
return Err(());
}
// Step 10
if iterator.next().is_some() {
return Err(());
}
// Step 11
Ok((year_int, week_int))
}
/// https://html.spec.whatwg.org/multipage/#valid-floating-point-number
pub fn is_valid_floating_point_number_string(&self) -> bool {
lazy_static! {
static ref RE: Regex =
Regex::new(r"^-?(?:\d+\.\d+|\d+|\.\d+)(?:(e|E)(\+|\-)?\d+)?$").unwrap();
}
RE.is_match(&self.0) && self.parse_floating_point_number().is_ok()
}
/// https://html.spec.whatwg.org/multipage/#rules-for-parsing-floating-point-number-values
pub fn parse_floating_point_number(&self) -> Result<f64, ()> {
// Steps 15-16 are telling us things about IEEE rounding modes
// for floating-point significands; this code assumes the Rust
// compiler already matches them in any cases where
// that actually matters. They are not
// related to f64::round(), which is for rounding to integers.
let input = &self.0;
match input.trim().parse::<f64>() {
Ok(val)
if !(
// A valid number is the same as what rust considers to be valid,
// except for +1., NaN, and Infinity.
val.is_infinite() ||
val.is_nan() ||
input.ends_with(".") ||
input.starts_with("+")
) =>
{
Ok(val)
},
_ => Err(()),
}
}
/// https://html.spec.whatwg.org/multipage/#best-representation-of-the-number-as-a-floating-point-number
pub fn set_best_representation_of_the_floating_point_number(&mut self) {
if let Ok(val) = self.parse_floating_point_number() {
self.0 = val.to_string();
}
}
/// A valid normalized local date and time string should be "{date}T{time}"
/// where date and time are both valid, and the time string must be as short as possible
/// https://html.spec.whatwg.org/multipage/#valid-normalised-local-date-and-time-string
pub fn convert_valid_normalized_local_date_and_time_string(&mut self) -> Result<(), ()> {
let ((year, month, day), (hour, minute, second)) =
self.parse_local_date_and_time_string()?;
if second == 0.0 {
self.0 = format!(
"{:04}-{:02}-{:02}T{:02}:{:02}",
year, month, day, hour, minute
);
} else {
self.0 = format!(
"{:04}-{:02}-{:02}T{:02}:{:02}:{}",
year, month, day, hour, minute, second
);
}
Ok(())
}
/// https://html.spec.whatwg.org/multipage/#parse-a-local-date-and-time-string
pub fn parse_local_date_and_time_string(
&self,
) -> Result<((i32, u32, u32), (u32, u32, f64)), ()> {
let value = &self;
// Step 1, 2, 4
let mut iterator = if value.contains('T') {
value.split('T')
} else {
value.split(' ')
};
// Step 3
let date = iterator.next().ok_or(())?;
let date_tuple = parse_date_component(date)?;
// Step 5
let time = iterator.next().ok_or(())?;
let time_tuple = parse_time_component(time)?;
// Step 6
if iterator.next().is_some() {
return Err(());
}
// Step 7, 8, 9
Ok((date_tuple, time_tuple))
}
}
impl Borrow<str> for DOMString {
#[inline]
fn borrow(&self) -> &str {
&self.0
}
}
impl Default for DOMString {
fn default() -> Self {
DOMString(String::new(), PhantomData)
}
}
impl Deref for DOMString {
type Target = str;
#[inline]
fn deref(&self) -> &str {
&self.0
}
}
impl DerefMut for DOMString {
#[inline]
fn deref_mut(&mut self) -> &mut str {
&mut self.0
}
}
impl AsRef<str> for DOMString {
fn as_ref(&self) -> &str {
&self.0
}
}
impl fmt::Display for DOMString {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
fmt::Display::fmt(&**self, f)
}
}
impl PartialEq<str> for DOMString {
fn eq(&self, other: &str) -> bool {
&**self == other
}
}
impl<'a> PartialEq<&'a str> for DOMString {
fn eq(&self, other: &&'a str) -> bool {
&**self == *other
}
}
impl From<String> for DOMString {
fn from(contents: String) -> DOMString {
DOMString(contents, PhantomData)
}
}
impl<'a> From<&'a str> for DOMString {
fn from(contents: &str) -> DOMString {
DOMString::from(String::from(contents))
}
}
impl<'a> From<Cow<'a, str>> for DOMString {
fn from(contents: Cow<'a, str>) -> DOMString {
match contents {
Cow::Owned(s) => DOMString::from(s),
Cow::Borrowed(s) => DOMString::from(s),
}
}
}
impl From<DOMString> for LocalName {
fn from(contents: DOMString) -> LocalName {
LocalName::from(contents.0)
}
}
impl From<DOMString> for Namespace {
fn from(contents: DOMString) -> Namespace {
Namespace::from(contents.0)
}
}
impl From<DOMString> for Atom {
fn from(contents: DOMString) -> Atom {
Atom::from(contents.0)
}
}
impl From<DOMString> for String {
fn from(contents: DOMString) -> String {
contents.0
}
}
impl Into<Vec<u8>> for DOMString {
fn into(self) -> Vec<u8> {
self.0.into()
}
}
impl<'a> Into<Cow<'a, str>> for DOMString {
fn into(self) -> Cow<'a, str> {
self.0.into()
}
}
impl<'a> Into<CowRcStr<'a>> for DOMString {
fn into(self) -> CowRcStr<'a> {
self.0.into()
}
}
impl Extend<char> for DOMString {
fn extend<I>(&mut self, iterable: I)
where
I: IntoIterator<Item = char>,
{
self.0.extend(iterable)
}
}
/// https://html.spec.whatwg.org/multipage/#parse-a-month-component
fn parse_month_component(value: &str) -> Result<(i32, u32), ()> {
// Step 3
let mut iterator = value.split('-');
let year = iterator.next().ok_or(())?;
let month = iterator.next().ok_or(())?;
// Step 1, 2
let year_int = year.parse::<i32>().map_err(|_| ())?;
if year.len() < 4 || year_int == 0 {
return Err(());
}
// Step 4, 5
let month_int = month.parse::<u32>().map_err(|_| ())?;
if month.len() != 2 || month_int > 12 || month_int < 1 {
return Err(());
}
// Step 6
Ok((year_int, month_int))
}
/// https://html.spec.whatwg.org/multipage/#parse-a-date-component
fn parse_date_component(value: &str) -> Result<(i32, u32, u32), ()> {
// Step 1
let (year_int, month_int) = parse_month_component(value)?;
// Step 3, 4
let day = value.split('-').nth(2).ok_or(())?;
let day_int = day.parse::<u32>().map_err(|_| ())?;
if day.len() != 2 {
return Err(());
}
// Step 2, 5
let max_day = max_day_in_month(year_int, month_int)?;
if day_int == 0 || day_int > max_day {
return Err(());
}
// Step 6
Ok((year_int, month_int, day_int))
}
/// https://html.spec.whatwg.org/multipage/#parse-a-time-component
fn parse_time_component(value: &str) -> Result<(u32, u32, f64), ()> {
// Step 1
let mut iterator = value.split(':');
let hour = iterator.next().ok_or(())?;
if hour.len() != 2 {
return Err(());
}
let hour_int = hour.parse::<u32>().map_err(|_| ())?;
// Step 2
if hour_int > 23 {
return Err(());
}
// Step 3, 4
let minute = iterator.next().ok_or(())?;
if minute.len() != 2 {
return Err(());
}
let minute_int = minute.parse::<u32>().map_err(|_| ())?;
// Step 5
if minute_int > 59 {
return Err(());
}
// Step 6, 7
let second_float = match iterator.next() {
Some(second) => {
let mut second_iterator = second.split('.');
if second_iterator.next().ok_or(())?.len() != 2 {
return Err(());
}
match second_iterator.next() {
Some(second_last) => {
if second_last.len() > 3 {
return Err(());
}
},
None => {},
}
second.parse::<f64>().map_err(|_| ())?
},
None => 0.0,
};
// Step 8
Ok((hour_int, minute_int, second_float))
}
fn max_day_in_month(year_num: i32, month_num: u32) -> Result<u32, ()> {
match month_num {
1 | 3 | 5 | 7 | 8 | 10 | 12 => Ok(31),
4 | 6 | 9 | 11 => Ok(30),
2 => {
if is_leap_year(year_num) {
Ok(29)
} else {
Ok(28)
}
},
_ => Err(()),
}
}
/// https://html.spec.whatwg.org/multipage/#week-number-of-the-last-day
fn max_week_in_year(year: i32) -> u32 {
match Utc.ymd(year as i32, 1, 1).weekday() {
Weekday::Thu => 53,
Weekday::Wed if is_leap_year(year) => 53,
_ => 52,
}
}
#[inline]
fn is_leap_year(year: i32) -> bool {
year % 400 == 0 || (year % 4 == 0 && year % 100 != 0)
}<|fim▁end|> | self.parse_month_string().is_ok()
}
|
<|file_name|>states.rs<|end_file_name|><|fim▁begin|>use v3::messages::a2a::MessageId;
use v3::messages::issuance::credential_request::CredentialRequest;
use v3::messages::issuance::credential_offer::CredentialOffer;
use v3::messages::issuance::credential::Credential;
use v3::messages::status::Status;
use v3::messages::error::ProblemReport;
// Possible Transitions:
// Initial -> OfferSent
// Initial -> Finished
// OfferSent -> CredentialSent
// OfferSent -> Finished
// CredentialSent -> Finished
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum IssuerState {
Initial(InitialState),
OfferSent(OfferSentState),
RequestReceived(RequestReceivedState),
CredentialSent(CredentialSentState),
Finished(FinishedState)
}
impl IssuerState {
pub fn get_connection_handle(&self) -> u32 {
match self {
IssuerState::Initial(_) => 0,
IssuerState::OfferSent(state) => state.connection_handle,
IssuerState::RequestReceived(state) => state.connection_handle,
IssuerState::CredentialSent(state) => state.connection_handle,
IssuerState::Finished(_) => 0
}
}
pub fn thread_id(&self) -> String {
match self {
IssuerState::Initial(_) => String::new(),
IssuerState::OfferSent(state) => state.thread_id.clone(),
IssuerState::RequestReceived(state) => state.thread_id.clone(),
IssuerState::CredentialSent(state) => state.thread_id.clone(),
IssuerState::Finished(state) => state.thread_id.clone(),
}
}
}
impl InitialState {
pub fn new(cred_def_id: &str, credential_json: &str, rev_reg_id: Option<String>, tails_file: Option<String>) -> Self {
InitialState {
cred_def_id: cred_def_id.to_string(),
credential_json: credential_json.to_string(),
rev_reg_id,
tails_file
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct InitialState {
pub cred_def_id: String,
pub credential_json: String,
pub rev_reg_id: Option<String>,
pub tails_file: Option<String>,
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct OfferSentState {
pub offer: String,
pub cred_data: String,
pub rev_reg_id: Option<String>,
pub tails_file: Option<String>,
pub connection_handle: u32,
pub thread_id: String
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct RequestReceivedState {
pub offer: String,
pub cred_data: String,
pub rev_reg_id: Option<String>,
pub tails_file: Option<String>,
pub connection_handle: u32,
pub request: CredentialRequest,
pub thread_id: String
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct RevocationInfoV1 {
pub cred_rev_id: Option<String>,
pub rev_reg_id: Option<String>,
pub tails_file: Option<String>
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct CredentialSentState {
pub connection_handle: u32,
pub revocation_info_v1: Option<RevocationInfoV1>,
pub thread_id: String
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct FinishedState {
pub cred_id: Option<String>,
pub thread_id: String,
pub revocation_info_v1: Option<RevocationInfoV1>,
pub status: Status
}
impl From<(InitialState, String, u32, MessageId)> for OfferSentState {
fn from((state, offer, connection_handle, sent_id): (InitialState, String, u32, MessageId)) -> Self {
trace!("SM is now in OfferSent state");
OfferSentState {
offer,
cred_data: state.credential_json,
rev_reg_id: state.rev_reg_id,
tails_file: state.tails_file,
connection_handle,
thread_id: sent_id.0,
}
}
}
impl From<InitialState> for FinishedState {
fn from(_state: InitialState) -> Self {
trace!("SM is now in Finished state");
FinishedState {
cred_id: None,
thread_id: String::new(),
revocation_info_v1: None,
status: Status::Undefined,
}
}
}
impl From<(OfferSentState, CredentialRequest)> for RequestReceivedState {
fn from((state, request): (OfferSentState, CredentialRequest)) -> Self {
trace!("SM is now in Request Received state");
RequestReceivedState {
offer: state.offer,
cred_data: state.cred_data,
rev_reg_id: state.rev_reg_id,
tails_file: state.tails_file,
connection_handle: state.connection_handle,
request,
thread_id: state.thread_id,
}
}
}
impl From<(RequestReceivedState, MessageId)> for CredentialSentState {
fn from((state, _sent_id): (RequestReceivedState, MessageId)) -> Self {
trace!("SM is now in CredentialSent state");
CredentialSentState {
connection_handle: state.connection_handle,
revocation_info_v1: Some(RevocationInfoV1 {
cred_rev_id: None,
rev_reg_id: state.rev_reg_id,
tails_file: state.tails_file,
}),
thread_id: state.thread_id,
}
}<|fim▁hole|>
impl From<OfferSentState> for FinishedState {
fn from(state: OfferSentState) -> Self {
trace!("SM is now in Finished state");
FinishedState {
cred_id: None,
thread_id: state.thread_id,
revocation_info_v1: Some(RevocationInfoV1 {
cred_rev_id: None,
rev_reg_id: state.rev_reg_id,
tails_file: state.tails_file,
}),
status: Status::Undefined,
}
}
}
impl From<(OfferSentState, ProblemReport)> for FinishedState {
fn from((state, err): (OfferSentState, ProblemReport)) -> Self {
trace!("SM is now in Finished state");
FinishedState {
cred_id: None,
thread_id: state.thread_id,
revocation_info_v1: Some(RevocationInfoV1 {
cred_rev_id: None,
rev_reg_id: state.rev_reg_id,
tails_file: state.tails_file,
}),
status: Status::Failed(err),
}
}
}
impl From<(RequestReceivedState, Option<String>)> for FinishedState {
fn from((state, cred_rev_id): (RequestReceivedState, Option<String>)) -> Self {
trace!("SM is now in Finished state");
FinishedState {
cred_id: None,
thread_id: state.thread_id,
revocation_info_v1: Some(RevocationInfoV1 {
cred_rev_id: cred_rev_id,
rev_reg_id: state.rev_reg_id,
tails_file: state.tails_file,
}),
status: Status::Success,
}
}
}
impl From<(RequestReceivedState, ProblemReport)> for FinishedState {
fn from((state, err): (RequestReceivedState, ProblemReport)) -> Self {
trace!("SM is now in Finished state");
FinishedState {
cred_id: None,
thread_id: state.thread_id,
revocation_info_v1: Some(RevocationInfoV1 {
cred_rev_id: None,
rev_reg_id: state.rev_reg_id,
tails_file: state.tails_file,
}),
status: Status::Failed(err),
}
}
}
impl From<CredentialSentState> for FinishedState {
fn from(state: CredentialSentState) -> Self {
trace!("SM is now in Finished state");
FinishedState {
cred_id: None,
thread_id: state.thread_id,
revocation_info_v1: state.revocation_info_v1,
status: Status::Success,
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub enum HolderState {
OfferReceived(OfferReceivedState),
RequestSent(RequestSentState),
Finished(FinishedHolderState)
}
impl HolderState {
pub fn get_connection_handle(&self) -> u32 {
match self {
HolderState::OfferReceived(_) => 0,
HolderState::RequestSent(state) => state.connection_handle,
HolderState::Finished(_) => 0
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct RequestSentState {
pub req_meta: String,
pub cred_def_json: String,
pub connection_handle: u32
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct OfferReceivedState {
pub offer: CredentialOffer
}
impl OfferReceivedState {
pub fn new(offer: CredentialOffer) -> Self {
OfferReceivedState {
offer,
}
}
}
#[derive(Serialize, Deserialize, Debug, Clone)]
pub struct FinishedHolderState {
pub cred_id: Option<String>,
pub credential: Option<Credential>,
pub status: Status,
pub rev_reg_def_json: Option<String>
}
impl From<(OfferReceivedState, String, String, u32)> for RequestSentState {
fn from((_state, req_meta, cred_def_json, connection_handle): (OfferReceivedState, String, String, u32)) -> Self {
trace!("SM is now in RequestSent state");
RequestSentState {
req_meta,
cred_def_json,
connection_handle,
}
}
}
impl From<(RequestSentState, String, Credential, Option<String>)> for FinishedHolderState {
fn from((_, cred_id, credential, rev_reg_def_json): (RequestSentState, String, Credential, Option<String>)) -> Self {
trace!("SM is now in Finished state");
FinishedHolderState {
cred_id: Some(cred_id),
credential: Some(credential),
status: Status::Success,
rev_reg_def_json: rev_reg_def_json
}
}
}
impl From<(RequestSentState, ProblemReport)> for FinishedHolderState {
fn from((_, problem_report): (RequestSentState, ProblemReport)) -> Self {
trace!("SM is now in Finished state");
FinishedHolderState {
cred_id: None,
credential: None,
status: Status::Failed(problem_report),
rev_reg_def_json: None
}
}
}
impl From<(OfferReceivedState, ProblemReport)> for FinishedHolderState {
fn from((_state, problem_report): (OfferReceivedState, ProblemReport)) -> Self {
trace!("SM is now in Finished state");
FinishedHolderState {
cred_id: None,
credential: None,
status: Status::Failed(problem_report),
rev_reg_def_json: None
}
}
}<|fim▁end|> | } |
<|file_name|>test_baseIterable.py<|end_file_name|><|fim▁begin|>import os
from unittest import TestCase
import mock
from marvel.iterables import BaseIterable
class FooIterable(BaseIterable):
def __init__(self):
self.total_pages = 20
super(FooIterable, self).__init__()
def get_items(self):
if self.total_pages == 0:
raise StopIteration
else:
self.total_pages = self.total_pages - 1<|fim▁hole|>
class TestBaseIterable(TestCase):
def test_limit_pages_not_defined(self):
count = 0
for _ in FooIterable():
count = count + 1
assert count == 20
@mock.patch.dict(os.environ, {'TC_LIMIT_PAGES': '3'})
def test_limit_pages_with_3(self):
count = 0
for _ in FooIterable():
count = count + 1
assert count == 3<|fim▁end|> | return [self.total_pages] |
<|file_name|>ldp.py<|end_file_name|><|fim▁begin|># kamene.contrib.description = Label Distribution Protocol (LDP)
# kamene.contrib.status = loads
# http://git.savannah.gnu.org/cgit/ldpscapy.git/snapshot/ldpscapy-5285b81d6e628043df2a83301b292f24a95f0ba1.tar.gz
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
# Copyright (C) 2010 Florian Duraffourg
import struct
from kamene.packet import *
from kamene.fields import *
from kamene.ansmachine import *
from kamene.layers.inet import UDP
from kamene.layers.inet import TCP
from kamene.base_classes import Net
# Guess payload
def guess_payload(p):
LDPTypes = {
0x0001: LDPNotification,
0x0100: LDPHello,
0x0200: LDPInit,
0x0201: LDPKeepAlive,
0x0300: LDPAddress,
0x0301: LDPAddressWM,
0x0400: LDPLabelMM,
0x0401: LDPLabelReqM,
0x0404: LDPLabelARM,
0x0402: LDPLabelWM,
0x0403: LDPLabelRelM,
}
type = struct.unpack("!H",p[0:2])[0]
type = type & 0x7fff
if type == 0x0001 and struct.unpack("!H",p[2:4])[0] > 20:
return LDP
if type in LDPTypes:
return LDPTypes[type]
else:
return conf.raw_layer
## Fields ##
# 3.4.1. FEC TLV
class FecTLVField(StrField):
islist=1
def m2i(self, pkt, x):
nbr = struct.unpack("!H",x[2:4])[0]
used = 0
x=x[4:]
list=[]
while x:
#if x[0] == 1:
# list.append('Wildcard')
#else:
#mask=ord(x[8*i+3])
#add=inet_ntoa(x[8*i+4:8*i+8])
mask=ord(x[3])
nbroctets = mask / 8
if mask % 8:
nbroctets += 1
add=inet_ntoa(x[4:4+nbroctets]+"\x00"*(4-nbroctets))
list.append( (add, mask) )
used += 4 + nbroctets
x=x[4+nbroctets:]
return list
def i2m(self, pkt, x):
if type(x) is str:
return x
s = "\x01\x00"
l = 0
fec = ""
for o in x:
fec += "\x02\x00\x01"
# mask length
fec += struct.pack("!B",o[1])
# Prefix
fec += inet_aton(o[0])
l += 8
s += struct.pack("!H",l)
s += fec
return s
def size(self, s):
"""Get the size of this field"""
l = 4 + struct.unpack("!H",s[2:4])[0]
return l
def getfield(self, pkt, s):
l = self.size(s)
return s[l:],self.m2i(pkt, s[:l])
# 3.4.2.1. Generic Label TLV
class LabelTLVField(StrField):
def m2i(self, pkt, x):
return struct.unpack("!I",x[4:8])[0]
def i2m(self, pkt, x):
if type(x) is str:
return x
s = "\x02\x00\x00\x04"
s += struct.pack("!I",x)
return s
def size(self, s):
"""Get the size of this field"""
l = 4 + struct.unpack("!H",s[2:4])[0]
return l
def getfield(self, pkt, s):
l = self.size(s)
return s[l:],self.m2i(pkt, s[:l])
# 3.4.3. Address List TLV
class AddressTLVField(StrField):
islist=1
def m2i(self, pkt, x):
nbr = struct.unpack("!H",x[2:4])[0] - 2
nbr /= 4
x=x[6:]
list=[]
for i in range(0,nbr):
add = x[4*i:4*i+4]
list.append(inet_ntoa(add))
return list
def i2m(self, pkt, x):
if type(x) is str:
return x
l=2+len(x)*4
s = "\x01\x01"+struct.pack("!H",l)+"\x00\x01"
for o in x:
s += inet_aton(o)
return s
def size(self, s):
"""Get the size of this field"""
l = 4 + struct.unpack("!H",s[2:4])[0]
return l
def getfield(self, pkt, s):
l = self.size(s)
return s[l:],self.m2i(pkt, s[:l])
# 3.4.6. Status TLV
class StatusTLVField(StrField):
islist=1
def m2i(self, pkt, x):
l = []
statuscode = struct.unpack("!I",x[4:8])[0]
l.append( (statuscode & 2**31) >> 31)
l.append( (statuscode & 2**30) >> 30)
l.append( statuscode & 0x3FFFFFFF )
l.append( struct.unpack("!I", x[8:12])[0] )
l.append( struct.unpack("!H", x[12:14])[0] )
return l
def i2m(self, pkt, x):
if type(x) is str:
return x
s = "\x03\x00" + struct.pack("!H",10)
statuscode = 0
if x[0] != 0:
statuscode += 2**31
if x[1] != 0:
statuscode += 2**30
statuscode += x[2]
s += struct.pack("!I",statuscode)
if len(x) > 3:
s += struct.pack("!I",x[3])
else:
s += "\x00\x00\x00\x00"
if len(x) > 4:
s += struct.pack("!H",x[4])
else:
s += "\x00\x00"
return s
def getfield(self, pkt, s):
l = 14
return s[l:],self.m2i(pkt, s[:l])
# 3.5.2 Common Hello Parameters TLV
class CommonHelloTLVField(StrField):
islist = 1
def m2i(self, pkt, x):
list = []
v = struct.unpack("!H",x[4:6])[0]
list.append(v)
flags = struct.unpack("B",x[6])[0]
v = ( flags & 0x80 ) >> 7
list.append(v)
v = ( flags & 0x40 ) >> 7
list.append(v)
return list
def i2m(self, pkt, x):
if type(x) is str:
return x
s = "\x04\x00\x00\x04"
s += struct.pack("!H",x[0])
byte = 0
if x[1] == 1:
byte += 0x80
if x[2] == 1:
byte += 0x40
s += struct.pack("!B",byte)
s += "\x00"
return s
def getfield(self, pkt, s):
l = 8
return s[l:],self.m2i(pkt, s[:l])
# 3.5.3 Common Session Parameters TLV
class CommonSessionTLVField(StrField):
islist = 1
def m2i(self, pkt, x):
l = []
l.append(struct.unpack("!H",x[6:8])[0])
octet = struct.unpack("B",x[8:9])[0]
l.append( (octet & 2**7 ) >> 7 )
l.append( (octet & 2**6 ) >> 6 )
l.append( struct.unpack("B",x[9:10])[0] )
l.append( struct.unpack("!H",x[10:12])[0] )
l.append( inet_ntoa(x[12:16]) )
l.append( struct.unpack("!H",x[16:18])[0] )
return l
def i2m(self, pkt, x):
if type(x) is str:
return x
s = "\x05\x00\x00\x0E\x00\x01"
s += struct.pack("!H",x[0])
octet = 0
if x[1] != 0:
octet += 2**7
if x[2] != 0:
octet += 2**6
s += struct.pack("!B",octet)
s += struct.pack("!B",x[3])
s += struct.pack("!H",x[4])
s += inet_aton(x[5])
s += struct.pack("!H",x[6])
return s
def getfield(self, pkt, s):
l = 18
return s[l:],self.m2i(pkt, s[:l])
## Messages ##
# 3.5.1. Notification Message
class LDPNotification(Packet):
name = "LDPNotification"
fields_desc = [ BitField("u",0,1),
BitField("type", 0x0001, 15),
ShortField("len", None),
IntField("id", 0) ,
StatusTLVField("status",(0,0,0,0,0)) ]
def post_build(self, p, pay):
if self.len is None:
l = len(p) - 4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)
# 3.5.2. Hello Message
class LDPHello(Packet):
name = "LDPHello"
fields_desc = [ BitField("u",0,1),
BitField("type", 0x0100, 15),
ShortField("len", None),
IntField("id", 0) ,
CommonHelloTLVField("params",[180,0,0]) ]
def post_build(self, p, pay):
if self.len is None:
l = len(p) - 4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)
# 3.5.3. Initialization Message
class LDPInit(Packet):
name = "LDPInit"
fields_desc = [ BitField("u",0,1),
XBitField("type", 0x0200, 15),
ShortField("len", None),
IntField("id", 0),
CommonSessionTLVField("params",None)]
def post_build(self, p, pay):
if self.len is None:
l = len(p) - 4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)
# 3.5.4. KeepAlive Message
class LDPKeepAlive(Packet):
name = "LDPKeepAlive"
fields_desc = [ BitField("u",0,1),
XBitField("type", 0x0201, 15),
ShortField("len", None),
IntField("id", 0)]
def post_build(self, p, pay):
if self.len is None:
l = len(p) - 4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)<|fim▁hole|>
class LDPAddress(Packet):
name = "LDPAddress"
fields_desc = [ BitField("u",0,1),
XBitField("type", 0x0300, 15),
ShortField("len", None),
IntField("id", 0),
AddressTLVField("address",None) ]
def post_build(self, p, pay):
if self.len is None:
l = len(p) - 4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)
# 3.5.6. Address Withdraw Message
class LDPAddressWM(Packet):
name = "LDPAddressWM"
fields_desc = [ BitField("u",0,1),
XBitField("type", 0x0301, 15),
ShortField("len", None),
IntField("id", 0),
AddressTLVField("address",None) ]
def post_build(self, p, pay):
if self.len is None:
l = len(p) - 4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)
# 3.5.7. Label Mapping Message
class LDPLabelMM(Packet):
name = "LDPLabelMM"
fields_desc = [ BitField("u",0,1),
XBitField("type", 0x0400, 15),
ShortField("len", None),
IntField("id", 0),
FecTLVField("fec",None),
LabelTLVField("label",0)]
def post_build(self, p, pay):
if self.len is None:
l = len(p) - 4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)
# 3.5.8. Label Request Message
class LDPLabelReqM(Packet):
name = "LDPLabelReqM"
fields_desc = [ BitField("u",0,1),
XBitField("type", 0x0401, 15),
ShortField("len", None),
IntField("id", 0),
FecTLVField("fec",None)]
def post_build(self, p, pay):
if self.len is None:
l = len(p) - 4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)
# 3.5.9. Label Abort Request Message
class LDPLabelARM(Packet):
name = "LDPLabelARM"
fields_desc = [ BitField("u",0,1),
XBitField("type", 0x0404, 15),
ShortField("len", None),
IntField("id", 0),
FecTLVField("fec",None),
IntField("labelRMid",0)]
def post_build(self, p, pay):
if self.len is None:
l = len(p) - 4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)
# 3.5.10. Label Withdraw Message
class LDPLabelWM(Packet):
name = "LDPLabelWM"
fields_desc = [ BitField("u",0,1),
XBitField("type", 0x0402, 15),
ShortField("len", None),
IntField("id", 0),
FecTLVField("fec",None),
LabelTLVField("label",0)]
def post_build(self, p, pay):
if self.len is None:
l = len(p) - 4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)
# 3.5.11. Label Release Message
class LDPLabelRelM(Packet):
name = "LDPLabelRelM"
fields_desc = [ BitField("u",0,1),
XBitField("type", 0x0403, 15),
ShortField("len", None),
IntField("id", 0),
FecTLVField("fec",None),
LabelTLVField("label",0)]
def post_build(self, p, pay):
if self.len is None:
l = len(p) - 4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)
# 3.1. LDP PDUs
class LDP(Packet):
name = "LDP"
fields_desc = [ ShortField("version",1),
ShortField("len", None),
IPField("id","127.0.0.1"),
ShortField("space",0) ]
def post_build(self, p, pay):
if self.len is None:
l = len(p)+len(pay)-4
p = p[:2]+struct.pack("!H", l)+p[4:]
return p+pay
def guess_payload_class(self, p):
return guess_payload(p)
bind_layers( TCP, LDP, sport=646, dport=646 )
bind_layers( UDP, LDP, sport=646, dport=646 )<|fim▁end|> |
# 3.5.5. Address Message |
<|file_name|>base.py<|end_file_name|><|fim▁begin|><|fim▁hole|># -*- coding: utf-8 -*-
from tornado.web import RequestHandler, HTTPError
from schema import Session, Feed
from jinja2.exceptions import TemplateNotFound
class Base(RequestHandler):
@property
def env(self):
return self.application.env
def get_error_html(self, status_code, **kwargs):
try:
self.render('error/%s.html' % status_code)
except TemplateNotFound:
try:
self.render('error/50x.html', status_code=status_code)
except TemplateNotFound:
self.write('epic fail')
Session.close()
def on_finish(self):
Session.remove()
def render(self, template, **kwds):
try:
template = self.env.get_template(template)
except TemplateNotFound:
raise HTTPError(404)
kwds['feeds'] = Session.query(Feed).order_by(Feed.title)
self.env.globals['request'] = self.request
self.env.globals['static_url'] = self.static_url
self.env.globals['xsrf_form_html'] = self.xsrf_form_html
self.write(template.render(kwds))
Session.close()
class NoDestinationHandler(Base):
def get(self):
raise HTTPError(404)<|fim▁end|> | #!/usr/bin/env python |
<|file_name|>trendfile.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# encoding: utf-8
"""
trend.datasource.trendfile.py
Handling and parsing of trendfiles (*.hdb)
Copyright (C) 2016/2017 Stefan Braun
This program is free software: you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation, either version 2 of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details.
You should have received a copy of the GNU General Public License along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
import ctypes
import os
import datetime
import calendar
from trend.datasource.dbdata import HighLevelDBData as DBData
from trend.datasource.dbdata import HighLevelDBData2 as DBData2
import configparser
import string
import re
import collections
import misc.timezone as timezone
import itertools
from operator import itemgetter
DEBUGGING = True
class DBData_Timestamp_Search_Result(object):
"""
contains lists of DBData elements after search for a specific point of time:
-exact: elements with equal timestamps
if "exact"-list is empty, then these lists help to calculate values in between:
-before: elements with same timestamps before point of time
-after: elements with same timestamps after point of time
"""
def __init__(self):
self.before_list = []
self.exact_list = []
self.after_list = []
def set_before(self, before_list):
self.before_list = before_list
def set_exact(self, exact_list):
self.exact_list = exact_list
def set_after(self, after_list):
self.after_list = after_list
def get_trendfile_structure_obj(file_fullpath):
"""
returns appropriate structure for accessing all DBData elements
(ctypes.Structure doesn't allow unknown amounts of elements)
"""
DMSDP_NOF_BYTES = 83 # based on observations made in class "PDBSData" (pdbsdata.py)
TRENDDATA_OFFSET = 1024 # based ob reverse engineering *.hdb file format
filesize = os.path.getsize(file_fullpath)
# DBData could be ProMoS NT(c) version 1.x or version 2 =>choosing right version
# trendfiles v1.x ends with ".hdb" , v2.x ends with ".hdbx"
file_ext = file_fullpath.split('.')[-1]
if file_ext.upper() == u'HDB':
# using ProMoS NT(c) version 1.x
curr_DBData_class = DBData
else:
# using ProMoS NT(c) version 2.x
curr_DBData_class = DBData2
nof_dbdata_elems = (filesize - TRENDDATA_OFFSET) / ctypes.sizeof(curr_DBData_class)
class Trendfile_structure(ctypes.LittleEndianStructure):
"""
Header contains DMS datapoint name,
data section contains all DBData elements, amount depends on filesize...
"""
# contains some hints from http://stackoverflow.com/questions/18536182/parsing-binary-data-into-ctypes-structure-object-via-readinto
_fields_ = [
("dmsDatapoint", ctypes.c_char * DMSDP_NOF_BYTES), # DMS datapoint name
("UNKNOWN_BYTES", ctypes.c_char * (TRENDDATA_OFFSET - DMSDP_NOF_BYTES)), # perhaps unused
("dbdata", curr_DBData_class * nof_dbdata_elems) # array of DBData elements
]
# return an instance to caller
return Trendfile_structure()
class RawTrendfile(object):
def __init__(self, fileFullpath):
self._fileFullpath = fileFullpath
self._trendstruct = get_trendfile_structure_obj(self._fileFullpath)
self._parseFile_()
def _parseFile_(self):
# reading binary trendfile into ctypes structure
# contains hints from http://stackoverflow.com/questions/18536182/parsing-binary-data-into-ctypes-structure-object-via-readinto
with open(self._fileFullpath, "rb") as f:
f.readinto(self._trendstruct)
def get_dms_Datapoint(self):
return self._trendstruct.dmsDatapoint
def get_nof_dbdata_elements(self):
return len(self._trendstruct.dbdata)
def get_first_timestamp(self):
return self._trendstruct.dbdata[0].get_datetime()
def get_last_timestamp(self):
return self._trendstruct.dbdata[-1].get_datetime()
def get_dbdata_elements_generator(self, start_datetime=None, end_datetime=None):
"""
a generator for memory efficient retrieving DBData elements
(caller can only loop once through generator,
read here: http://stackoverflow.com/questions/231767/what-does-the-yield-keyword-do-in-python )
=>optional arguments allows filtering of DBData elements
"""
# FIXME: implement some filtering (same as in "trendfile.py.old"?) Or is further filtering done in HighLevelTrendfile?
for elem in self._trendstruct.dbdata:
ignore = False
if start_datetime:
if elem.get_datetime() < start_datetime:
ignore = True
if end_datetime:
if elem.get_datetime() > end_datetime:
ignore = True
if not ignore:
yield elem
def get_dbdata_elements_as_set(self):
"""
returns DBData elements in a set()
"""
# FIXME: should we improve this code? How can we get good performance in Megabytes of trenddata?
# FIXME: Should we save the set() for next function execution, or does we allow altering of trenddata in-memory?
return set(self._trendstruct.dbdata)
def get_DBData_Timestamp_Search_Result(self, timestamp_datetime):
"""
returns an instance of DBData_Timestamp_Search_Result according to given timestamp
"""
# FIXME: method works as expected, but we should find a cleaner solution...
search_result = DBData_Timestamp_Search_Result()
# begin and end indeces of three lists don't overlap: [before_begin, ..., before_end] [exact_begin, ..., exact_end] [after_begin, ..., after_end]
# based on examples from https://docs.python.org/2/library/bisect.html
idx_bisect_left = self._get_bisect_left(timestamp_datetime)
# based on example: "Locate the leftmost value exactly equal to x"
# =>collecting all DBData elements with given timestamp
if idx_bisect_left == len(self._trendstruct.dbdata):
# special case: timestamp is higher than highest DBData-timestamp
# =>do workaround: taking last element and continue processing...
curr_elem = self._trendstruct.dbdata[-1]
else:
curr_elem = self._trendstruct.dbdata[idx_bisect_left]
if idx_bisect_left != len(self._trendstruct.dbdata) and curr_elem.get_datetime() == timestamp_datetime:
# we found "exact_begin"
# appending all elements with same timestamp
idx = idx_bisect_left
exact_timestamp = curr_elem.get_datetime()
while idx < len(self._trendstruct.dbdata):
curr_elem = self._trendstruct.dbdata[idx]
if curr_elem.get_datetime() == exact_timestamp:
search_result.exact_list.append(self._trendstruct.dbdata[idx])
idx = idx + 1
else:
break
else:
# no exact search hits found... =>populating list "before"
if idx_bisect_left > 0:
idx = idx_bisect_left - 1
before_timestamp = self._trendstruct.dbdata[idx].get_datetime()
while idx >= 0:
# collecting DBData elements with equal timestamps
curr_elem = self._trendstruct.dbdata[idx]
if curr_elem.get_datetime() == before_timestamp:
search_result.before_list.append(self._trendstruct.dbdata[idx])
idx = idx - 1
else:
break
# ... and populating list "after"
# based on example "Find leftmost value greater than x"
idx_bisect_right = self._get_bisect_right(timestamp_datetime)
if idx_bisect_right != len(self._trendstruct.dbdata):
idx = idx_bisect_right
after_timestamp = self._trendstruct.dbdata[idx].get_datetime()
while idx < len(self._trendstruct.dbdata):
# collecting DBData elements with equal timestamps
curr_elem = self._trendstruct.dbdata[idx]
if curr_elem.get_datetime() == after_timestamp:
search_result.after_list.append(self._trendstruct.dbdata[idx])
idx = idx + 1
else:
break
return search_result
def _get_bisect_left(self, timestamp_datetime):
"""
returns index of DBData element with exact timestamp or later
"""
# our DBData elements are sorted by timestamp
# =>we can use binary searching! There's already class "bisect" for this.
# =>problem: using "bisect" is impossible, it can't handle DBData directly...: https://docs.python.org/2/library/bisect.html
# =>now we adapt algorithm from it's source: https://hg.python.org/cpython/file/2.7/Lib/bisect.py
# Find DBData ("bisect.bisect_left()")
low = 0
high = len(self._trendstruct.dbdata)
while low < high:
mid = (low + high) // 2
if self._trendstruct.dbdata[mid].get_datetime() < timestamp_datetime:
low = mid + 1
else:
high = mid
return low
def _get_bisect_right(self, timestamp_datetime):
"""
returns index of DBData element at time point later as in given timestamp
"""
# our DBData elements are sorted by timestamp
# =>we can use binary searching! There's already class "bisect" for this.
# =>problem: using "bisect" is impossible, it can't handle DBData directly...: https://docs.python.org/2/library/bisect.html
# =>now we adapt algorithm from it's source: https://hg.python.org/cpython/file/2.7/Lib/bisect.py
# Find DBData ("bisect.bisect_right()")
low = 0
high = len(self._trendstruct.dbdata)
while low < high:
mid = (low + high) // 2
if timestamp_datetime < self._trendstruct.dbdata[mid].get_datetime():
high = mid
else:
low = mid + 1
return low
class IndexedTrendfile(RawTrendfile):
"""
enhances a trendfile with OrderedDict as index:
key: timestamp
value: list of DBData elements with same timestamp
second OrderedDict index allows retrieving of DBData-lists by its known position
==>both index dictionaries MUST have same size!!!
"""
def __init__(self, fileFullpath):
RawTrendfile.__init__(self, fileFullpath)
self._indexed_by_timestamp = collections.OrderedDict()
self._indexed_by_index = []
# some statistics over DBData items
# with help from http://stackoverflow.com/questions/10576548/python-usable-max-and-min-values
self.minValue = -float("inf")
self.maxValue = +float("inf")
self._create_index()
if DEBUGGING:
print('constructor of IndexedTrendfile(): file "' + fileFullpath + '" is ready.')
def _create_index(self):
curr_list = []
curr_timestamp = self.get_first_timestamp()
for item in self._trendstruct.dbdata:
# do some statistics, it's not much effort since we already process every item
curr_val = item.get_value_as_float
if curr_val < self.minValue:
self.minValue = curr_val
if curr_val > self.maxValue:
self.maxValue = curr_val
# append item to current list,
# when there's a new timestamp build a new list
if item.get_datetime() == curr_timestamp:
curr_list.append(item)
else:
# indexing old DBData elements
self._indexed_by_timestamp[curr_timestamp] = curr_list
self._indexed_by_index.append(curr_list)
# preparing new list
curr_list = [item]
curr_timestamp = item.get_datetime()
# indexing last element
if curr_timestamp not in self._indexed_by_timestamp:
self._indexed_by_timestamp[curr_timestamp] = curr_list
self._indexed_by_index.append(curr_list)
assert len(self._indexed_by_timestamp) == len(self._indexed_by_index), 'both indexes MUST have same size!'
def get_DBData_Timestamp_Search_Result(self, timestamp_datetime):
"""
returns an instance of DBData_Timestamp_Search_Result according to given timestamp
=>first we try to get it directly from dictionary,
alternative is binary searching.
"""
# DBData_Timestamp_Search_Result() has three lists of DBData elements:
# begin and end of three lists don't overlap because they represent three different points in time:
# [before_begin, ..., before_end] [exact_begin, ..., exact_end] [after_begin, ..., after_end]
# (based on examples from https://docs.python.org/2/library/bisect.html )
try:
# try to get it directly from dictionary
search_result = DBData_Timestamp_Search_Result()
search_result.before_list = []
search_result.exact_list = self._indexed_by_timestamp[timestamp_datetime]
search_result.after_list = []
except KeyError:
# we have to binary search...
search_result = DBData_Timestamp_Search_Result()
# =>we adapted algorithm from this source: https://hg.python.org/cpython/file/2.7/Lib/bisect.py
# Find list ("bisect.bisect_left()")
low = 0
high = len(self._indexed_by_index)
while low < high:
mid = (low + high) // 2
dbdata_list = self._indexed_by_index[mid]
if dbdata_list[0].get_datetime() < timestamp_datetime:
low = mid + 1
else:
high = mid
idx_after = low
# now we have to interpret the given index:
# FIXME: should we care for corrupted trendfiles? (e.g. an empty file would throw IndexError-exception...)
if idx_after == 0:
# timestamp_datetime is older than our trenddata
search_result.before_list = []
search_result.exact_list = []
search_result.after_list = self._indexed_by_index[0]
elif idx_after == len(self._indexed_by_index):
# timestamp_datetime is younger than our trenddata
search_result.before_list = self._indexed_by_index[-1]
search_result.exact_list = []
search_result.after_list = []
else:
# timestamp_datetime must be between timestamps in our trenddata
search_result.before_list = self._indexed_by_index[idx_after - 1]
search_result.exact_list = []
search_result.after_list = self._indexed_by_index[idx_after]
return search_result
def get_dbdata_lists_generator(self):
"""
generate lists with DBData-elements grouped by timestamp
(ProMoS NT(c) PDBS daemon stores them in sequence, so they should be sorted by timestamp)
"""
for curr_list in self._indexed_by_index:
yield curr_list
def get_dbdata_list_of_lists(self):
"""
return whole list containing lists with DBData-elements grouped by timestamp
(ProMoS NT(c) PDBS daemon stores them in sequence, so they should be sorted by timestamp)
"""
return self._indexed_by_index
def get_dbdata_timestamps_generator(self):
"""
return all contained timestamps
(they should be in ascending order, ProMoS NT(c) PDBS daemon stores them in sequence in HDB files,
and we put then into an OrderedDict)
"""
return self._indexed_by_timestamp.iterkeys()
class _Cached_Trendfile(object):
"""Metadata and reference to a trendfile object, used by Trendfile_Cache_Handler()"""
# code is adapted from "PSC_file_selector.py"
def __init__(self, fullpath):
self._fullpath = fullpath
self._whole_file = None
self._modification_time = 0
self._filesize = 0
self._last_readtime = -1
def _read_metadata(self):
stat = os.stat(self._fullpath)
self._filesize = stat.st_size
self._modification_time = stat.st_mtime
def get_whole_file(self):
self._read_metadata()
if self._last_readtime <> self._modification_time:
# first reading or file changed
self._whole_file = IndexedTrendfile(self._fullpath)
self._last_readtime = self._modification_time
return self._whole_file
def get_metadata(self):
# examples from http://stackoverflow.com/questions/39359245/from-stat-st-mtime-to-datetime
# and http://stackoverflow.com/questions/6591931/getting-file-size-in-python
# and https://docs.python.org/2/library/stat.html
# and http://stackoverflow.com/questions/455612/limiting-floats-to-two-decimal-points
# and http://stackoverflow.com/questions/311627/how-to-print-date-in-a-regular-format-in-python
self._read_metadata()
size = float("{0:.2f}".format(self._filesize / 1024.0))
mod_time = datetime.datetime.fromtimestamp(self._modification_time).strftime("%Y.%m.%d %H:%M:%S")
return size, mod_time
class Trendfile_Cache_Handler(object):
"""
Holds trendfile objects in a cache for more efficiency
=>currently it's one program-wide cache
"""
# class-variable with cache
# =>using OrderedDict() so it's simple to maintain FIFO-cache
# https://docs.python.org/2/library/collections.html#collections.OrderedDict
_trendfile_cache_dict = collections.OrderedDict()
used_cache_size = 0
# soft-limit of maximum cache size
CACHESIZE_KBYTES = 1024 * 50 # 50MBytes
def get_trendfile_obj(self, filename_fullpath, cached=True):
"""optional parameter 'cached': False means working on an isolated Trendfile without interfering other instance holders
(it's possible that these DBData-lists could get corrupted, but I'm not 100% shure...)"""
# maintain FIFO-cache: deleting oldest item if cache is too large
curr_size = 0
for trf in Trendfile_Cache_Handler._trendfile_cache_dict:
size, mod_time = Trendfile_Cache_Handler._trendfile_cache_dict[trf].get_metadata()
curr_size = curr_size + size
while curr_size > Trendfile_Cache_Handler.CACHESIZE_KBYTES:
# remove oldest item
dumped_obj = Trendfile_Cache_Handler._trendfile_cache_dict.popitem(last=False)
# handling request
if cached:
if not filename_fullpath in Trendfile_Cache_Handler._trendfile_cache_dict:
# first time handling of this file...
Trendfile_Cache_Handler._trendfile_cache_dict[filename_fullpath] = _Cached_Trendfile(filename_fullpath)
return Trendfile_Cache_Handler._trendfile_cache_dict[filename_fullpath].get_whole_file()
else:
# bypass whole caching
return IndexedTrendfile(filename_fullpath)
class MetaTrendfile(object):
"""
provides all trenddata of a specific DMS datapoint from HDB files in project directory and backup directory
"""
def __init__(self, projectpath_str, dms_dp_str):
self.projectpath_str = projectpath_str
self.dms_dp_str = dms_dp_str
self.dat_dir = os.path.join(projectpath_str, 'dat')
self.backup_dir = self._get_backup_dir()
self.backup_subdirs_dict = self._find_backup_subdirs() # stores subdir as string (key: tuple (year, month))
self.trend_filename_str = self._get_trend_filename()
self.trf_cache_handler = Trendfile_Cache_Handler()
# timezone awareness (FIXME: currently fixed to 'Europe/Zurich')
_tz = timezone.Timezone().get_tz()
def _get_backup_dir(self):
# we have to read INI-file <projectpath>\cfg\PDBSBACK.CFG
# and get this attribut:
# [Backup]
# Path=D:\Trend
cfg_parser = configparser.ConfigParser()
configfile_fullpath = os.path.join(self.projectpath_str, 'cfg', 'PDBSBACK.CFG')
cfg_parser.read(configfile_fullpath)
return cfg_parser["Backup"]["Path"]
def _get_trend_filename(self):
# FIXME: I assume that all illegal characters in a DMS-datapoint gets replaced by "_" for getting a valid filename....
# FIXME: It's a known problem that these datapoints stores trends in the SAME TRENDFILE (=>corrupted trend!!!)
# FIXME: should we abort processing file if we can't find a file with the right DMS-DP-string in trendfile-header?
# MSR_U02:Test:L01_02:foo:Input
# MSR_U02:Test:L01:02:foo:Input
# MSR_U02:Test:L01:02_foo:Input
# ===>trenddata of all three TRD-datapoints were combined into file "MSR_U02_Test_L01_02_foo_Input.hdb" !!!
# some help from http://stackoverflow.com/questions/295135/turn-a-string-into-a-valid-filename
# =>now we restrict filename and hope PDBS does it the same way...
valid_chars = set(string.ascii_letters) ^ set(string.digits)
char_list = []
for char in self.dms_dp_str:
if char in valid_chars:
char_list.append(char)
else:
char_list.append('_')
return ''.join(char_list) + '.hdb'
def _find_backup_subdirs(self):
"""
get a list of available backup subdirectories
"""
mydict = {}
regex_pattern = r'Month_(?P<month>\d\d)\.(?P<year>\d\d\d\d)'
for subdir in os.listdir(self.backup_dir):
# an example for backup subdirectory:
# february 2017: "Month_02.2017"
m = re.match(regex_pattern, subdir)
if m:
# key in our dictionary: tuple (year, month) => value is whole regex match
key = m.group('year'), m.group('month')
mydict[key] = m.group(0)
return mydict
def _get_backup_subdir(self, timestamp_datetime):
"""
locate trenddata by timestamp
"""
# an example for backup subdirectory:
# february 2017: "Month_02.2017"
month = timestamp_datetime.strftime('%m')
year = timestamp_datetime.strftime('%Y')
return ''.join(['Month_', month, '.', year])
def _get_endpoint_timestamp(self, position_str="first"):
"""
returns timestamp of our oldest or youngest DBData element,
combined from dat- and backup directory.
=>parameter position_str is either "first" or "last"
("first" is default, anything other means "last")
"""
endpoint_timestamp_list = []
try:
# searching in project directory
filename_fullpath = os.path.join(self.dat_dir, self.trend_filename_str)
dat_trendfile = self.trf_cache_handler.get_trendfile_obj(filename_fullpath, cached=True)
if os.path.exists(filename_fullpath):
# processing this trendfile
if position_str == "first":
# getting oldest DBData
found_timestamp = dat_trendfile.get_first_timestamp()
else:
# getting youngest DBData
found_timestamp = dat_trendfile.get_last_timestamp()
endpoint_timestamp_list.append(found_timestamp)
except Exception as ex:
print('WARNING: MetaTrendfile._get_endpoint_timestamp(): got exception "' + repr(ex) + '" while getting trend from "' + filename_fullpath + '"')
try:
# searching in backup subdirectory
if position_str == "first":
# searching oldest DBData =>ascending sorting
reversed = False
else:
# searching youngest DBData =>descending sorting
reversed = True
filename_fullpath = ''
for year, month in sorted(self.backup_subdirs_dict.keys(), reverse=reversed):
subdir_str = self.backup_subdirs_dict[year, month]
filename_fullpath = os.path.join(self.backup_dir, subdir_str, self.trend_filename_str)
if os.path.exists(filename_fullpath):
# we found a backup, it contains perhaps older trenddata than in project dir...
break
if filename_fullpath:
bak_trendfile = self.trf_cache_handler.get_trendfile_obj(filename_fullpath, cached=True)
if position_str == "first":
# getting oldest DBData
found_timestamp = bak_trendfile.get_first_timestamp()
else:
# getting youngest DBData
found_timestamp = bak_trendfile.get_last_timestamp()
endpoint_timestamp_list.append(found_timestamp)
except Exception as ex:
print('WARNING: MetaTrendfile._get_endpoint_timestamp(): got exception "' + repr(ex) + '" while getting trend from "' + filename_fullpath + '"')
try:
if position_str == "first":
# getting oldest DBData
return min(endpoint_timestamp_list)
else:
# getting youngest DBData
return max(endpoint_timestamp_list)
except ValueError:
# seems we didn't found trenddata (list is empty)
return None
def get_first_timestamp(self):
"""
returns timestamp of our oldest DBData element
"""
return self._get_endpoint_timestamp(position_str="first")
def get_last_timestamp(self):
"""
returns timestamp of our youngest DBData element
"""
return self._get_endpoint_timestamp(position_str="last")
def get_DBData_Timestamp_Search_Result(self, timestamp_datetime):
"""
returns an instance of DBData_Timestamp_Search_Result according to given timestamp
=>remember: every search must return either an exact match or the values just before and after it, except first or last DBData!
"""
# FIXME: this method is too heavy and should be optimized... =>rewrite it!!!
search_result_list = []
try:
# searching in project directory
filename_fullpath = os.path.join(self.dat_dir, self.trend_filename_str)
if os.path.exists(filename_fullpath):
dat_trendfile = self.trf_cache_handler.get_trendfile_obj(filename_fullpath, cached=True)
search_result = dat_trendfile.get_DBData_Timestamp_Search_Result(timestamp_datetime)
if search_result:
search_result_list.append(search_result)
except Exception as ex:
print('WARNING: MetaTrendfile.get_DBData_Timestamp_Search_Result(): got exception "' + repr(ex) + '" while getting trend from "' + filename_fullpath + '"')
try:
# searching in backup directory:
# first we try to get a "exact_list"-hit, then we
# walk in both directions through directories and choose best match
# for "file containing before_list" <= timestamp <= "file containing after_list"
# trying specific timestamp
# (following flags are preparation for further searching)
bak_searching_past = True
bak_searching_future = True
curr_subdir = self._get_backup_subdir(timestamp_datetime)
filename_fullpath = os.path.join(self.backup_dir, curr_subdir, self.trend_filename_str)
if os.path.exists(filename_fullpath):
bak_trendfile = self.trf_cache_handler.get_trendfile_obj(filename_fullpath, cached=True)
search_result = bak_trendfile.get_DBData_Timestamp_Search_Result(timestamp_datetime)
if search_result:
# got a match... we need to decide how to search further...
search_result_list.append(search_result)
if search_result.exact_list:
# no need to search further...
bak_searching_past = False
bak_searching_future = False
elif search_result.before_list and not search_result.after_list:
bak_searching_past = False
bak_searching_future = True
elif search_result.after_list and not search_result.before_list:
bak_searching_past = True
bak_searching_future = False
except Exception as ex:
print('WARNING: [1] MetaTrendfile.get_DBData_Timestamp_Search_Result(): got exception "' + repr(ex) + '" while getting trend from "' + filename_fullpath + '"')
try:
if bak_searching_past:
# walking backwards through available directories
for year, month in sorted(self.backup_subdirs_dict.keys(), reverse=True):
backupdir_timestamp = datetime.datetime(year=int(year), month=int(month), day=1, tzinfo=MetaTrendfile._tz)
if backupdir_timestamp < timestamp_datetime:
subdir_str = self.backup_subdirs_dict[year, month]
filename_fullpath = os.path.join(self.backup_dir, subdir_str, self.trend_filename_str)
if os.path.exists(filename_fullpath):
# we found a backup, it should contain DBData before timestamp...
bak_trendfile = self.trf_cache_handler.get_trendfile_obj(filename_fullpath, cached=True)
search_result = bak_trendfile.get_DBData_Timestamp_Search_Result(timestamp_datetime)
if search_result:
search_result_list.append(search_result)
break
except Exception as ex:
print('WARNING: [2] MetaTrendfile.get_DBData_Timestamp_Search_Result(): got exception "' + repr(ex) + '" while getting trend from "' + filename_fullpath + '"')
try:
if bak_searching_future:
# walking forward through available directories
for year, month in sorted(self.backup_subdirs_dict.keys(), reverse=False):
# with help from http://stackoverflow.com/questions/42950/get-last-day-of-the-month-in-python
last_day_of_month = calendar.monthrange(int(year), int(month))[1]
backupdir_timestamp = datetime.datetime(year=int(year), month=int(month), day=last_day_of_month, tzinfo=MetaTrendfile._tz)
if backupdir_timestamp > timestamp_datetime:
subdir_str = self.backup_subdirs_dict[year, month]
filename_fullpath = os.path.join(self.backup_dir, subdir_str, self.trend_filename_str)
if os.path.exists(filename_fullpath):
# we found a backup, it should contain DBData after timestamp...
bak_trendfile = self.trf_cache_handler.get_trendfile_obj(filename_fullpath, cached=True)
search_result = bak_trendfile.get_DBData_Timestamp_Search_Result(timestamp_datetime)
if search_result:
search_result_list.append(search_result)
break
except Exception as ex:
print('WARNING: [3] MetaTrendfile.get_DBData_Timestamp_Search_Result(): got exception "' + repr(ex) + '" while getting trend from "' + filename_fullpath + '"')
# getting closest match from all search results
# FIXME: should we care for mismatch between amount of stored DBData items for one timestamp in DAT and Backup?
combined_sr = DBData_Timestamp_Search_Result()
# first try: getting exact match
if search_result_list:
dbdata_set = set()
for sr in search_result_list:
if sr.exact_list:
# using all DBData elements of all exact search results
dbdata_set.update(sr.exact_list)
if dbdata_set:
# got exact search results... =>give a list back to caller
combined_sr.exact_list = list(dbdata_set)
assert combined_sr.exact_list and not combined_sr.before_list and not combined_sr.after_list, 'exact match for this timestamp expected!'
return combined_sr
# second try: getting match as close as possible from all available sources
if search_result_list:
# collecting closest timestamp-lists
past_timestamp = datetime.datetime(year=1900, month=1, day=1, tzinfo=MetaTrendfile._tz)
future_timestamp = datetime.datetime(year=2100, month=1, day=1, tzinfo=MetaTrendfile._tz)
for sr in search_result_list:
# nearest timestamp in the past ("before_list")
if sr.before_list:
curr_timestamp = sr.before_list[0].get_datetime()
if curr_timestamp > past_timestamp:
# found a closer match
combined_sr.before_list = sr.before_list
past_timestamp = curr_timestamp
elif curr_timestamp == past_timestamp:
# found result from other source => inserting DBData elements in case some were missing
combined_sr.before_list.extend(sr.before_list)
# nearest timestamp in the future ("after_list")
if sr.after_list:
curr_timestamp = sr.after_list[0].get_datetime()
if curr_timestamp < future_timestamp:
# found a closer match
combined_sr.after_list = sr.after_list
future_timestamp = curr_timestamp
elif curr_timestamp == past_timestamp:
# found result from other source => inserting DBData elements in case some were missing
combined_sr.after_list.extend(sr.after_list)
assert not combined_sr.exact_list, 'no exact match for this timestamp expected!'
# get unique DBData elements
dbdata_before_set = set(combined_sr.before_list)
combined_sr.before_list = list(dbdata_before_set)
dbdata_after_set = set(combined_sr.after_list)
combined_sr.after_list = list(dbdata_after_set)
return combined_sr
def get_dbdata_lists_generator(self, start_datetime=None, end_datetime=None):
"""
a generator over all available trenddata for (perhaps) memory efficient retrieving lists with DBData elements,
items with same timestamp are grouped
(caller can only loop once through generator,
read here: http://stackoverflow.com/questions/231767/what-does-the-yield-keyword-do-in-python )
=>optional arguments allows filtering of DBData elements
=>using something similar like "mergesort" algorithm: https://en.wikipedia.org/wiki/Merge_sort
=>using "deque" objects for efficient popleft: https://docs.python.org/2/library/collections.html#collections.deque
=>using uncached trendfile, since we MODIFY the internal DBData-lists
"""
# FIXME: do a cleaner implementation of this...
# trenddata in project directory:
# =>using one queue
dat_deque = collections.deque()
try:
# trendfile in project directory:
filename_fullpath = os.path.join(self.dat_dir, self.trend_filename_str)
if os.path.exists(filename_fullpath):
# disable cache because we alter DBData-list...!!
dat_trendfile = self.trf_cache_handler.get_trendfile_obj(filename_fullpath, cached=False)
dat_deque = collections.deque(dat_trendfile.get_dbdata_list_of_lists())
except Exception as ex:
print('WARNING: MetaTrendfile.get_dbdata_lists_generator(): got exception "' + repr(ex) + '" while getting trend from "' + filename_fullpath + '"')
# trenddata in backup subdirectories:
# =>interpretation as one long queue, combined from different trendfiles
# (no subclassing of deque since we don't want to implement all methods of deque()...)
class _deque_wrapper(object):
def __init__(self, backup_subdirs_dict, backup_dir, trend_filename_str, trf_cache_handler):
self._deque_obj = collections.deque()
self._backup_subdirs_dict = backup_subdirs_dict
self._backup_dir = backup_dir
self._trend_filename_str = trend_filename_str
self.trf_cache_handler = trf_cache_handler
self._subdir_iter = iter(sorted(backup_subdirs_dict.keys(), reverse=False))
self._load_next_trendfile()
def _load_next_trendfile(self):
# "deque" is getting empty... trying to append next trendfile
try:
subdir_str = self._backup_subdirs_dict[self._subdir_iter.next()]
filename_fullpath = os.path.join(self._backup_dir, subdir_str, self._trend_filename_str)
if os.path.exists(filename_fullpath):
# we found a backup file
# disable cache because we alter DBData-list...!!
bak_trendfile = self.trf_cache_handler.get_trendfile_obj(filename_fullpath, cached=False)
self._deque_obj.extend(bak_trendfile.get_dbdata_list_of_lists())
except StopIteration:
# there are no more backup subdirs to check...
pass
def popleft(self):
# make shure this class contains enough trenddata, then return next element
# (if we let deque ran out of elements then statement "if bak_deque" will fail)
if len(self._deque_obj) <= 1:
# "deque" is empty... trying to append next trendfile
self._load_next_trendfile()
return self._deque_obj.popleft()
def __len__(self):
# overriding this hook method for allowing getting current size of deque object
# (with help from http://stackoverflow.com/questions/15114023/using-len-and-def-len-self-to-build-a-class
# and http://stackoverflow.com/questions/7816363/if-a-vs-if-a-is-not-none
# )
return len(self._deque_obj)
bak_deque = _deque_wrapper(self.backup_subdirs_dict, self.backup_dir, self.trend_filename_str, self.trf_cache_handler)
# checking tail of both deques and return list with unique DBData elements at oldest timestamp
# =>do until we returned all available trenddata
dat_list = []
bak_list = []
while True:
# get DBData-list from each tail
curr_list = []
if dat_deque and bak_deque:
# both trenddata source available...
# =>only get new items when there's nothing left from earlier round
if not dat_list:
dat_list = dat_deque.popleft()
if not bak_list:
bak_list = bak_deque.popleft()
# return older items to caller
# if we have same timestamp then we collect all unique DBData element
dat_timestamp = dat_list[0].get_datetime()
bak_timestamp = bak_list[0].get_datetime()
if bak_timestamp < dat_timestamp:
curr_list = bak_list
bak_list = []
elif dat_timestamp < bak_timestamp:
curr_list = dat_list
dat_list = []
else:
my_set = set(dat_list + bak_list)
curr_list = list(my_set)
dat_list = []
bak_list = []
elif dat_deque:
# only trenddata in project directory available...
curr_list = dat_deque.popleft()
elif bak_deque:
# only trenddata in backup directory available...
curr_list = bak_deque.popleft()
else:
# no more trenddata left...
curr_list = []
if curr_list:
# check filter
ignore = False
if start_datetime:
if curr_list[0].get_datetime() < start_datetime:
ignore = True
if end_datetime:
if curr_list[0].get_datetime() > end_datetime:
ignore = True
# nothing to do, stop iteration
break
if not ignore:
yield curr_list
else:
# nothing to do, stop iteration
break
def get_search_result_generator(self, start_datetime=None, stop_datetime=None):
"""
a generator creating DBData_Timestamp_Search_Result objects with all available trenddata as exact-list
(reusing all DBData lists from get_dbdata_lists_generator()
"""<|fim▁hole|> sr.exact_list.extend(curr_list)
yield sr
def get_dbdata_timestamps_generator(self, start_datetime=None, stop_datetime=None):
"""
a generator creating objects with timestamps and time difference to last timestamp of all available trenddata
(contains some copied code from "self.get_DBData_Timestamp_Search_Result(self, timestamp_datetime()" )
"""
# getting generators of all timestamp sources,
# then always yield the oldest timestamp of all active timestamp sources
# helper class for combining timestamp and time difference
class Tstamp(object):
"""
tstamp: timestamp as datetime.datetime object
diff: difference to last timestamp in seconds
"""
old_tstamp_dt = None
def __init__(self, curr_tstamp_dt):
self.tstamp_dt = curr_tstamp_dt
self.is_interpolated = False
if not Tstamp.old_tstamp_dt:
# first run =>first timestamp is always okay and should have timediff = 0
self.timediff = 0.0
else:
self.timediff = (curr_tstamp_dt - Tstamp.old_tstamp_dt).total_seconds()
Tstamp.old_tstamp_dt = curr_tstamp_dt
if not start_datetime:
start_datetime = datetime.datetime.fromtimestamp(0, tz=MetaTrendfile._tz)
if not stop_datetime:
stop_datetime = datetime.datetime(year=3000, month=1, day=1).replace(tzinfo=MetaTrendfile._tz)
prj_iter = iter([])
# trenddata in project directory
filename_fullpath = os.path.join(self.dat_dir, self.trend_filename_str)
if os.path.exists(filename_fullpath):
dat_trendfile = self.trf_cache_handler.get_trendfile_obj(filename_fullpath, cached=True)
usable = True
if dat_trendfile.get_last_timestamp() < start_datetime:
# trenddata is too old
usable = False
if dat_trendfile.get_first_timestamp() > stop_datetime:
# trenddata is too new
usable = False
if usable:
prj_iter = dat_trendfile.get_dbdata_timestamps_generator()
# lazily generating timestamp iterators from backup
# (idea from http://stackoverflow.com/questions/15004772/what-is-the-difference-between-chain-and-chain-from-iterable-in-itertools )
def generate_backup_iterators():
# walking forward through available directories
for year, month in sorted(self.backup_subdirs_dict.keys(), reverse=False):
if int(year) >= start_datetime.year and int(month) >= start_datetime.month and \
int(year) <= stop_datetime.year and int(month) <= stop_datetime.month:
# current backup directory should contain trenddata in requested timerange
subdir_str = self.backup_subdirs_dict[year, month]
filename_fullpath = os.path.join(self.backup_dir, subdir_str, self.trend_filename_str)
if os.path.exists(filename_fullpath):
# we found a backup, it should contain trenddata...
bak_trendfile = self.trf_cache_handler.get_trendfile_obj(filename_fullpath, cached=True)
yield bak_trendfile.get_dbdata_timestamps_generator()
# combine this generator of generators with trenddata from project
bak_iter = itertools.chain.from_iterable(generate_backup_iterators())
tstamp_generator_list = []
for source in [prj_iter, bak_iter]:
try:
# this list always contains head element from iterator, and iterator itself
new_source = [source.next(), source]
tstamp_generator_list.append(new_source)
except StopIteration:
pass
# request items from both generators, always returning smaller value
while tstamp_generator_list:
# consuming timestamps, returning always oldest one, updating first element
# sorting list of tuples: http://stackoverflow.com/questions/10695139/sort-a-list-of-tuples-by-2nd-item-integer-value
# =>getting source list with oldest timestamp
tstamp_generator_list = sorted(tstamp_generator_list, key=itemgetter(0))
oldest_source_list = tstamp_generator_list[0]
curr_tstamp, curr_iter = oldest_source_list[0], oldest_source_list[1]
if curr_tstamp >= start_datetime and curr_tstamp <= stop_datetime:
yield Tstamp(curr_tstamp)
try:
# update head-element of current timestamp source
oldest_source_list[0] = curr_iter.next()
except StopIteration:
# iterator is empty... =>removing this timestamp-source
tstamp_generator_list = tstamp_generator_list[1:]
def main(argv=None):
# for filename in ['C:\Promos15\proj\Winterthur_MFH_Schaffhauserstrasse\dat\MSR01_Allg_Aussentemp_Istwert.hdb']:
# #trf = RawTrendfile(filename)
# trf = IndexedTrendfile(filename)
# print('IndexedTrendfile "' + filename + '" contains trenddata of DMS datapoint ' + trf.get_dms_Datapoint())
# print('number of DBData elements: ' + str(trf.get_nof_dbdata_elements()))
# print('number of unique timestamps: ' + str(len(trf._indexed_by_timestamp)))
# print('timestamp of first DBData element: ' + trf.get_first_timestamp().strftime('%Y-%m-%d %H:%M:%S'))
# print('timestamp of last DBData element: ' + trf.get_last_timestamp().strftime('%Y-%m-%d %H:%M:%S'))
# print('(timespan is ' + str((trf.get_last_timestamp() - trf.get_first_timestamp()).days) + ' days)')
#
# # getting some values...
# # hint from http://stackoverflow.com/questions/4741243/how-to-pick-just-one-item-from-a-generator-in-python
# # =>we need to get another generator object when we want to get the same interation!
# for x in range(2):
# print('interpretation of values of some DBData elements: (run number ' + str(x) + ')')
# my_generator = trf.get_dbdata_elements_generator()
# for x in range(10):
# elem = my_generator.next()
# print('as boolean: ' + str(elem.get_value_as_boolean()) + '\tas int: ' + str(elem.get_value_as_int())+ '\tas float: ' + str(elem.get_value_as_float()))
#
# # getting trenddata by timestamp:
# timestamps_list = [datetime.datetime(year=2016, month=1, day=6, hour=4, minute=27, second=23),
# datetime.datetime(year=2016, month=1, day=6, hour=4, minute=27, second=24),
# datetime.datetime(year=2016, month=1, day=6, hour=4, minute=27, second=25),
# datetime.datetime(year=2017, month=2, day=6, hour=20, minute=15, second=13),
# datetime.datetime(year=2017, month=2, day=6, hour=20, minute=15, second=14),
# datetime.datetime(year=2017, month=2, day=6, hour=20, minute=15, second=15)]
# for timestamp in timestamps_list:
# print('getting DBData elements with timestamp "' + timestamp.strftime('%Y-%m-%d %H:%M:%S') + '"')
# result = trf.get_DBData_Timestamp_Search_Result(timestamp)
# print('\t"before_list" contains:')
# for item in result.before_list:
# print('\t\t' + item.get_datetime().strftime('%Y-%m-%d %H:%M:%S') + ' / ' + str(item.get_value_as_float()))
# print('\t"exact_list" contains:')
# for item in result.exact_list:
# print('\t\t' + item.get_datetime().strftime('%Y-%m-%d %H:%M:%S') + ' / ' + str(item.get_value_as_float()))
# print('\t"after_list" contains:')
# for item in result.after_list:
# print('\t\t' + item.get_datetime().strftime('%Y-%m-%d %H:%M:%S') + ' / ' + str(item.get_value_as_float()))
# trying backup and projekt directory:
print('######################################################################')
print('\nTEST: MetaTrendfile() ')
mytrf = MetaTrendfile('C:\Promos15\proj\Winterthur_MFH_Schaffhauserstrasse', 'MSR01:Allg:Aussentemp:Istwert')
print('get_first_timestamp(): ' + repr(mytrf.get_first_timestamp()))
print('get_last_timestamp(): ' + repr(mytrf.get_last_timestamp()))
# getting trenddata by timestamp:
timestamps_list = [datetime.datetime(year=2016, month=1, day=6, hour=4, minute=27, second=23, tzinfo=MetaTrendfile._tz),
datetime.datetime(year=2016, month=1, day=6, hour=4, minute=27, second=24, tzinfo=MetaTrendfile._tz),
datetime.datetime(year=2016, month=1, day=6, hour=4, minute=27, second=25, tzinfo=MetaTrendfile._tz),
datetime.datetime(year=2017, month=2, day=6, hour=20, minute=15, second=13, tzinfo=MetaTrendfile._tz),
datetime.datetime(year=2017, month=2, day=6, hour=20, minute=15, second=14, tzinfo=MetaTrendfile._tz),
datetime.datetime(year=2017, month=2, day=6, hour=20, minute=15, second=15, tzinfo=MetaTrendfile._tz),
datetime.datetime(year=1950, month=1, day=1, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz),
datetime.datetime(year=2999, month=1, day=1, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz)
]
for timestamp in timestamps_list:
print('getting DBData elements with timestamp "' + timestamp.strftime('%Y-%m-%d %H:%M:%S') + '"')
result = mytrf.get_DBData_Timestamp_Search_Result(timestamp)
print('\t"before_list" contains:')
for item in result.before_list:
print('\t\t' + item.get_datetime().strftime('%Y-%m-%d %H:%M:%S') + ' / ' + str(item.get_value_as_float()) + ' / ' + item.getStatusBitsString())
print('\t"exact_list" contains:')
for item in result.exact_list:
print('\t\t' + item.get_datetime().strftime('%Y-%m-%d %H:%M:%S') + ' / ' + str(item.get_value_as_float()) + ' / ' + item.getStatusBitsString())
print('\t"after_list" contains:')
for item in result.after_list:
print('\t\t' + item.get_datetime().strftime('%Y-%m-%d %H:%M:%S') + ' / ' + str(item.get_value_as_float()) + ' / ' + item.getStatusBitsString())
# test filtering identical timestamps
print('\n\ntest filtering identical timestamps')
print('######################################')
filename_fullpath = r'C:\Promos15\proj\Winterthur_MFH_Schaffhauserstrasse\dat\MSR01_Allg_Aussentemp_Istwert_LAST_VALUE.hdb'
#trf_test = IndexedTrendfile()
# TESTING cache:
trf_test = Trendfile_Cache_Handler().get_trendfile_obj(filename_fullpath, cached=True)
print('DMS-datapoint= ' + trf_test.get_dms_Datapoint())
print('\tcontained DBData-elements:')
for curr_dbdata in trf_test.get_dbdata_elements_generator():
print('\ttimestamp: ' + repr(curr_dbdata.get_datetime()))
print('\tvalue: ' + str(curr_dbdata.get_value_as_float()))
print('\thash()= ' + str(hash(curr_dbdata)))
print('\n\tDBData-elements retrieved as set():')
for curr_dbdata in trf_test.get_dbdata_elements_as_set():
print('\ttimestamp: ' + repr(curr_dbdata.get_datetime()))
print('\tvalue: ' + str(curr_dbdata.get_value_as_float()))
print('\thash()= ' + str(hash(curr_dbdata)))
# test number of unique timestamps
print('\n\ntest number of unique timestamps')
print('#####################################')
timespans = [#(None, None),
(datetime.datetime(year=2013, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz), datetime.datetime(year=2014, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz)),
(datetime.datetime(year=2014, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz), datetime.datetime(year=2015, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz)),
(datetime.datetime(year=2015, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz), datetime.datetime(year=2016, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz)),
(datetime.datetime(year=2016, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz), datetime.datetime(year=2017, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz)),
(datetime.datetime(year=2017, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz), datetime.datetime(year=2018, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz)),
(datetime.datetime(year=2013, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz), datetime.datetime(year=2020, month=1, day=6, hour=0, minute=0, second=0, tzinfo=MetaTrendfile._tz)),
(datetime.datetime(year=2016, month=1, day=6, hour=4, minute=27, second=24, tzinfo=MetaTrendfile._tz), datetime.datetime(year=2017, month=2, day=6, hour=20, minute=15, second=14, tzinfo=MetaTrendfile._tz))]
for start, end in timespans:
try:
print('\tbetween ' + start.strftime('%Y-%m-%d %H:%M:%S') + ' and ' + end.strftime('%Y-%m-%d %H:%M:%S') + ':')
except AttributeError:
# this is testcase with (None, None)
print('\tin all available trenddata:')
x = 0
for item in mytrf.get_dbdata_lists_generator(start, end):
x = x + 1
print('\t\t=>' + str(x) + ' unique timestamps.')
# testing MetaTrendfile.get_dbdata_timestamps_generator()
print('\n\ntesting MetaTrendfile.get_dbdata_timestamps_generator()')
print('**********************************************************')
curr_trf = MetaTrendfile(r'C:\Promos15\proj\Foo', 'NS_MSR01a:H01:AussenTemp:Istwert')
with open(r'd:\foo_Aussentemp.csv', "w") as f:
for tstamp in curr_trf.get_dbdata_timestamps_generator(
start_datetime=datetime.datetime(year=2017, month=2, day=1, hour=0, minute=0, tzinfo=MetaTrendfile._tz),
stop_datetime=datetime.datetime(year=2017, month=2, day=6, hour=0, minute=0, tzinfo=MetaTrendfile._tz)
):
tstamp_str = str(tstamp.tstamp_dt)
timediff_str = str(tstamp.timediff)
f.write(';'.join([tstamp_str, timediff_str]) + '\n')
return 0 # success
if __name__ == '__main__':
status = main()
# disable closing of Notepad++
# sys.exit(status)<|fim▁end|> | for curr_list in self.get_dbdata_lists_generator(start_datetime, stop_datetime):
sr = DBData_Timestamp_Search_Result()
# returning this list of DBData elements as exact search hit |
<|file_name|>tut2.go<|end_file_name|><|fim▁begin|>package main
import (
_ "github.com/go-sql-driver/mysql"
"github.com/jinzhu/gorm"
)
type User struct {
gorm.Model
Name string
}
// `Profile` belongs to `User`, `UserID` is the foreign key
type Profile struct {
gorm.Model
UserID int // Cho nay quan trong => Tao ra column tuong ung trong table<|fim▁hole|> Name string
}
func main() {
db, err := gorm.Open("mysql", "root:root@/gorm?charset=utf8&parseTime=True&loc=Local")
if err != nil {
panic("failed to connect database")
}
defer db.Close()
// Enable Logger, show detailed log
db.LogMode(true)
// Migrate the schema
db.AutoMigrate(&User{}, &Profile{})
user := User{Name: "Phu"}
profile := Profile{User: user, Name: "Phu Profile"}
db.Create(&user)
db.Create(&profile)
}<|fim▁end|> | User User |
<|file_name|>issue-78549-ref-pat-and-str.rs<|end_file_name|><|fim▁begin|>// check-pass
// From https://github.com/rust-lang/rust/issues/78549
fn main() {
match "foo" {
"foo" => {},
&_ => {},
}
match "foo" {<|fim▁hole|> }
match ("foo", 0, "bar") {
(&_, 0, &_) => {},
("foo", _, "bar") => {},
(&_, _, &_) => {},
}
match (&"foo", "bar") {
(&"foo", &_) => {},
(&&_, &_) => {},
}
}<|fim▁end|> | &_ => {},
"foo" => {}, |
<|file_name|>TargetQueryRenderer.java<|end_file_name|><|fim▁begin|>package it.unibz.inf.ontop.renderer;
/*
* #%L
* ontop-obdalib-core
* %%
* Copyright (C) 2009 - 2014 Free University of Bozen-Bolzano
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
import it.unibz.inf.ontop.io.PrefixManager;
import it.unibz.inf.ontop.io.SimplePrefixManager;
import it.unibz.inf.ontop.model.Constant;
import it.unibz.inf.ontop.model.DatatypeFactory;
import it.unibz.inf.ontop.model.ExpressionOperation;
import it.unibz.inf.ontop.model.Function;
import it.unibz.inf.ontop.model.Predicate;
import it.unibz.inf.ontop.model.Term;
import it.unibz.inf.ontop.model.URIConstant;
import it.unibz.inf.ontop.model.URITemplatePredicate;
import it.unibz.inf.ontop.model.ValueConstant;
import it.unibz.inf.ontop.model.Variable;
import it.unibz.inf.ontop.model.impl.OBDADataFactoryImpl;
import it.unibz.inf.ontop.model.impl.OBDAVocabulary;
import java.util.ArrayList;
import java.util.List;
import java.util.Map;
/**
* A utility class to render a Target Query object into its representational
* string.
*/
public class TargetQueryRenderer {
private static final DatatypeFactory dtfac = OBDADataFactoryImpl.getInstance().getDatatypeFactory();
/**
* Transforms the given <code>OBDAQuery</code> into a string. The method requires
* a prefix manager to shorten full IRI name.
*/
public static String encode(List<Function> input, PrefixManager prefixManager) {
TurtleWriter turtleWriter = new TurtleWriter();
List<Function> body = input;
for (Function atom : body) {
String subject, predicate, object = "";
String originalString = atom.getFunctionSymbol().toString();
if (isUnary(atom)) {
Term subjectTerm = atom.getTerm(0);
subject = getDisplayName(subjectTerm, prefixManager);
predicate = "a";
object = getAbbreviatedName(originalString, prefixManager, false);
if (originalString.equals(object)) {
object = "<" + object + ">";
}
}
else if (originalString.equals("triple")) {
Term subjectTerm = atom.getTerm(0);
subject = getDisplayName(subjectTerm, prefixManager);
Term predicateTerm = atom.getTerm(1);
predicate = getDisplayName(predicateTerm, prefixManager);
Term objectTerm = atom.getTerm(2);
object = getDisplayName(objectTerm, prefixManager);
}
else {
Term subjectTerm = atom.getTerm(0);
subject = getDisplayName(subjectTerm, prefixManager);
predicate = getAbbreviatedName(originalString, prefixManager, false);
if (originalString.equals(predicate)) {
predicate = "<" + predicate + ">";
}
Term objectTerm = atom.getTerm(1);
object = getDisplayName(objectTerm, prefixManager);
}
turtleWriter.put(subject, predicate, object);
}
return turtleWriter.print();
}
/**
* Checks if the atom is unary or not.
*/
private static boolean isUnary(Function atom) {
return atom.getArity() == 1 ? true : false;
}
/**
* Prints the short form of the predicate (by omitting the complete URI and
* replacing it by a prefix name).
*
* Note that by default this method will consider a set of predefined
* prefixes, i.e., rdf:, rdfs:, owl:, xsd: and quest: To support this
* prefixes the method will temporally add the prefixes if they dont exist
* already, taken care to remove them if they didn't exist.
*
* The implementation requires at the moment, the implementation requires
* cloning the existing prefix manager, and hence this is highly inefficient
* method. *
*/
private static String getAbbreviatedName(String uri, PrefixManager pm, boolean insideQuotes) {
// Cloning the existing manager
PrefixManager prefManClone = new SimplePrefixManager();
Map<String,String> currentMap = pm.getPrefixMap();
for (String prefix: currentMap.keySet()) {
prefManClone.addPrefix(prefix, pm.getURIDefinition(prefix));
}
return prefManClone.getShortForm(uri, insideQuotes);
}
private static String appendTerms(Term term){
if (term instanceof Constant){
String st = ((Constant) term).getValue();
if (st.contains("{")){
st = st.replace("{", "\\{");
st = st.replace("}", "\\}");
}
return st;
}else{
return "{"+((Variable) term).getName()+"}";
}
}
//Appends nested concats
public static void getNestedConcats(StringBuilder stb, Term term1, Term term2){
if (term1 instanceof Function){
Function f = (Function) term1;
getNestedConcats(stb, f.getTerms().get(0), f.getTerms().get(1));
}else{
stb.append(appendTerms(term1));
}
if (term2 instanceof Function){
Function f = (Function) term2;
getNestedConcats(stb, f.getTerms().get(0), f.getTerms().get(1));
}else{
stb.append(appendTerms(term2));
}
}
/**
* Prints the text representation of different terms.
*/
private static String getDisplayName(Term term, PrefixManager prefixManager) {
StringBuilder sb = new StringBuilder();
if (term instanceof Function) {
Function function = (Function) term;
Predicate functionSymbol = function.getFunctionSymbol();
String fname = getAbbreviatedName(functionSymbol.toString(), prefixManager, false);
if (function.isDataTypeFunction()) {
// if the function symbol is a data type predicate
if (dtfac.isLiteral(functionSymbol)) {
// if it is rdfs:Literal
int arity = function.getArity();
if (arity == 1) {
// without the language tag
Term var = function.getTerms().get(0);
sb.append(getDisplayName(var, prefixManager));
sb.append("^^rdfs:Literal");
} else if (arity == 2) {
// with the language tag
Term var = function.getTerms().get(0);
Term lang = function.getTerms().get(1);
sb.append(getDisplayName(var, prefixManager));
sb.append("@");
if (lang instanceof ValueConstant) {
// Don't pass this to getDisplayName() because
// language constant is not written as @"lang-tag"
sb.append(((ValueConstant) lang).getValue());
} else {
sb.append(getDisplayName(lang, prefixManager));
}
}
} else { // for the other data types
Term var = function.getTerms().get(0);
sb.append(getDisplayName(var, prefixManager));<|fim▁hole|> Term firstTerm = function.getTerms().get(0);
if(firstTerm instanceof Variable)
{
sb.append("<{");
sb.append(((Variable) firstTerm).getName());
sb.append("}>");
}
else {
String template = ((ValueConstant) firstTerm).getValue();
// Utilize the String.format() method so we replaced placeholders '{}' with '%s'
String templateFormat = template.replace("{}", "%s");
List<String> varNames = new ArrayList<String>();
for (Term innerTerm : function.getTerms()) {
if (innerTerm instanceof Variable) {
varNames.add(getDisplayName(innerTerm, prefixManager));
}
}
String originalUri = String.format(templateFormat, varNames.toArray());
if (originalUri.equals(OBDAVocabulary.RDF_TYPE)) {
sb.append("a");
} else {
String shortenUri = getAbbreviatedName(originalUri, prefixManager, false); // shorten the URI if possible
if (!shortenUri.equals(originalUri)) {
sb.append(shortenUri);
} else {
// If the URI can't be shorten then use the full URI within brackets
sb.append("<");
sb.append(originalUri);
sb.append(">");
}
}
}
}
else if (functionSymbol == ExpressionOperation.CONCAT) { //Concat
List<Term> terms = function.getTerms();
sb.append("\"");
getNestedConcats(sb, terms.get(0),terms.get(1));
sb.append("\"");
//sb.append("^^rdfs:Literal");
}
else { // for any ordinary function symbol
sb.append(fname);
sb.append("(");
boolean separator = false;
for (Term innerTerm : function.getTerms()) {
if (separator) {
sb.append(", ");
}
sb.append(getDisplayName(innerTerm, prefixManager));
separator = true;
}
sb.append(")");
}
} else if (term instanceof Variable) {
sb.append("{");
sb.append(((Variable) term).getName());
sb.append("}");
} else if (term instanceof URIConstant) {
String originalUri = term.toString();
String shortenUri = getAbbreviatedName(originalUri, prefixManager, false); // shorten the URI if possible
if (!shortenUri.equals(originalUri)) {
sb.append(shortenUri);
} else {
// If the URI can't be shorten then use the full URI within brackets
sb.append("<");
sb.append(originalUri);
sb.append(">");
}
} else if (term instanceof ValueConstant) {
sb.append("\"");
sb.append(((ValueConstant) term).getValue());
sb.append("\"");
}
return sb.toString();
}
private TargetQueryRenderer() {
// Prevent initialization
}
}<|fim▁end|> | sb.append("^^");
sb.append(fname);
}
} else if (functionSymbol instanceof URITemplatePredicate) { |
<|file_name|>package_test.go<|end_file_name|><|fim▁begin|>package packages
import (
"testing"
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/container"
"github.com/stretchr/testify/assert"
)
func TestNewPackageFromImage(t *testing.T) {
// with tag
pkg, err := NewPackageFromImage("whalebrew/foo:bar", types.ImageInspect{})
assert.Nil(t, err)
assert.Equal(t, pkg.Name, "foo")
assert.Equal(t, pkg.Image, "whalebrew/foo:bar")
// test labels
pkg, err = NewPackageFromImage("whalebrew/whalesay", types.ImageInspect{
ContainerConfig: &container.Config{
Labels: map[string]string{
"io.whalebrew.name": "ws",
"io.whalebrew.config.environment": "[\"SOME_CONFIG_OPTION\"]",<|fim▁hole|> },
})
assert.Nil(t, err)
assert.Equal(t, pkg.Name, "ws")
assert.Equal(t, pkg.Image, "whalebrew/whalesay")
assert.Equal(t, pkg.Environment, []string{"SOME_CONFIG_OPTION"})
assert.Equal(t, pkg.Volumes, []string{"/somesource:/somedest"})
assert.Equal(t, pkg.Ports, []string{"8100:8100"})
assert.Equal(t, pkg.Networks, []string{"host"})
}
func TestPreinstallMessage(t *testing.T) {
pkg := &Package{}
assert.Equal(t, pkg.PreinstallMessage(), "")
pkg = &Package{
Environment: []string{"AWS_ACCESS_KEY"},
Ports: []string{
"80:80",
"81:81:udp",
},
Volumes: []string{
"/etc/passwd:/passwdtosteal",
"/etc/readonly:/readonly:ro",
},
}
assert.Equal(t, pkg.PreinstallMessage(),
"This package needs additional access to your system. It wants to:\n"+
"\n"+
"* Read the environment variable AWS_ACCESS_KEY\n"+
"* Listen on TCP port 80\n"+
"* Listen on UDP port 81\n"+
"* Read and write to the file or directory \"/etc/passwd\"\n"+
"* Read the file or directory \"/etc/readonly\"\n",
)
}<|fim▁end|> | "io.whalebrew.config.volumes": "[\"/somesource:/somedest\"]",
"io.whalebrew.config.ports": "[\"8100:8100\"]",
"io.whalebrew.config.networks": "[\"host\"]",
}, |
<|file_name|>gui_charts.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
# -*- coding: utf-8 -*-
import tkinter as tk
from tkinter import ttk
from multiprocessing import pool
import numpy as np
import stat_functions as stat
from scipy import stats as scstats
from gui import gui_functions as guif
import os
import functions as fu
class process_charts(ttk.Frame):
def __init__(self,window,master,main_tabs,tabs):
style = ttk.Style()
style.configure("TFrame", background='white')
ttk.Frame.__init__(self,master,style='new.TFrame')
self.window=window
self.ll=None
self.initialized=False
self.subplot=tabs.subplot
self.print_subplot=tabs.print_subplot
self.img_tmp=tabs.img_tmp
def get_images_for_storage(self):
charts=[]
for i in self.charts:
charts.append((i.path,i.name))
return charts
def charts_from_stored(self,charts):
self.add_content()
if charts is None:
return
for i in range(len(charts)):
path,name=charts[i]
guif.display_from_img(self.charts[i],path,name,i)
def add_content(self):
self.n_charts=3
self.columnconfigure(0,weight=1)
for i in range(self.n_charts+1):
self.rowconfigure(i,weight=1)
tk.Label(self,text='Charts on normalized residuals:',bg='white',font='Tahoma 10 bold').grid(row=0,column=0)
self.charts=[]
for i in range(self.n_charts):
frm=tk.Frame(self,background='white')
frm.rowconfigure(0,weight=1)
frm.rowconfigure(1)
frm.columnconfigure(0,weight=1)
self.charts.append(tk.Label(frm,background='white'))
self.charts[i].grid(row=0,column=0)
chart_path=os.path.join(os.getcwd(),'img',f'chart{i}.png')
self.charts[i].path=fu.obtain_fname(chart_path)# self.img_tmp.TemporaryFile()
guif.setbutton(frm, 'Save image', lambda: self.save(self.n_charts-i-1),bg='white').grid(row=1,column=0)
frm.grid(row=i+1)
def save(self,i):
if not hasattr(self.charts[i],'graph_file') or not hasattr(self,'panel'):
print('No graphics displayed yet')
return
name=self.charts[i].name
f = tk.filedialog.asksaveasfile(mode='bw', defaultextension=".jpg",initialfile=f"{name}.jpg")
if f is None:
return
flst=[
self.histogram,
self.correlogram,
self.correlogram_variance,
]
flst[i](self.ll,self.print_subplot,f)
f.close()
def initialize(self,panel):
if not self.initialized:
self.panel=panel
self.add_content()
self.initialized=True
def plot(self,ll):
self.initialize(ll.panel)
self.ll=ll
self.histogram(ll,self.subplot)
self.correlogram(ll,self.subplot)
self.correlogram_variance(ll,self.subplot)
def histogram(self,ll,subplot,f=None):
N,T,k=ll.panel.X.shape
fgr,axs=subplot
n=ll.e_norm_centered.shape[2]
e=ll.e_norm_centered[self.panel.included[2]].flatten()
N=e.shape[0]
e=e.reshape((N,1))
grid_range=4
grid_step=0.05
h,grid=histogram(e,grid_range,grid_step)
norm=scstats.norm.pdf(grid)*grid_step
axs.bar(grid,h,color='grey', width=0.025,label='histogram')
axs.plot(grid,norm,'green',label='normal distribution')
axs.legend(prop={'size': 6})
name='Histogram - frequency'
axs.set_title(name)
if f is None:
guif.display(self.charts[0],name,0,subplot)
else:<|fim▁hole|> fgr,axs=subplot
lags=20
rho=stat.correlogram(self.panel, ll.e_norm_centered,lags)
x=np.arange(lags+1)
axs.bar(x,rho,color='grey', width=0.5,label='correlogram')
name='Correlogram - residuals'
axs.set_title(name)
if f is None:
guif.display(self.charts[1],name,1,subplot)
else:
guif.save(subplot,f)
def correlogram_variance(self,ll,subplot,f=None):
N,T,k=ll.panel.X.shape
fgr,axs=subplot
lags=20
e2=ll.e_norm_centered**2
e2=(e2-self.panel.mean(e2))*self.panel.included[3]
rho=stat.correlogram(self.panel, e2,lags)
x=np.arange(lags+1)
axs.bar(x,rho,color='grey', width=0.5,label='correlogram')
name='Correlogram - squared residuals'
axs.set_title(name)
if f is None:
guif.display(self.charts[2],name,2,subplot)
else:
guif.save(subplot,f)
def histogram(x,grid_range,grid_step):
N,k=x.shape
grid_n=int(2*grid_range/grid_step)
grid=np.array([i*grid_step-grid_range for i in range(grid_n)]).reshape((1,grid_n))
ones=np.ones((N,1))
x_u=np.concatenate((ones,x>=grid),1)
x_l=np.concatenate((x<grid,ones),1)
grid=np.concatenate((grid.flatten(),[grid[0,-1]+grid_step]))
histogram=np.sum((x_u*x_l),0)
if int(np.sum(histogram))!=N:
raise RuntimeError('Error in histogram calculation')
return histogram/N,grid<|fim▁end|> | guif.save(subplot,f)
def correlogram(self,ll,subplot,f=None): |
<|file_name|>migrations.go<|end_file_name|><|fim▁begin|>package accesscontrol
import "github.com/grafana/grafana/pkg/services/sqlstore/migrator"
func AddMigration(mg *migrator.Migrator) {
permissionV1 := migrator.Table{
Name: "permission",
Columns: []*migrator.Column{
{Name: "id", Type: migrator.DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true},
{Name: "role_id", Type: migrator.DB_BigInt},
{Name: "action", Type: migrator.DB_Varchar, Length: 190, Nullable: false},
{Name: "scope", Type: migrator.DB_Varchar, Length: 190, Nullable: false},
{Name: "created", Type: migrator.DB_DateTime, Nullable: false},
{Name: "updated", Type: migrator.DB_DateTime, Nullable: false},
},
Indices: []*migrator.Index{
{Cols: []string{"role_id"}},
{Cols: []string{"role_id", "action", "scope"}, Type: migrator.UniqueIndex},
},
}
mg.AddMigration("create permission table", migrator.NewAddTableMigration(permissionV1))
//------- indexes ------------------
mg.AddMigration("add unique index permission.role_id", migrator.NewAddIndexMigration(permissionV1, permissionV1.Indices[0]))
mg.AddMigration("add unique index role_id_action_scope", migrator.NewAddIndexMigration(permissionV1, permissionV1.Indices[1]))
roleV1 := migrator.Table{
Name: "role",
Columns: []*migrator.Column{
{Name: "id", Type: migrator.DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true},
{Name: "name", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
{Name: "description", Type: migrator.DB_Text, Nullable: true},
{Name: "version", Type: migrator.DB_BigInt, Nullable: false},
{Name: "org_id", Type: migrator.DB_BigInt},
{Name: "uid", Type: migrator.DB_NVarchar, Length: 40, Nullable: false},
{Name: "created", Type: migrator.DB_DateTime, Nullable: false},
{Name: "updated", Type: migrator.DB_DateTime, Nullable: false},
},
Indices: []*migrator.Index{
{Cols: []string{"org_id"}},
{Cols: []string{"org_id", "name"}, Type: migrator.UniqueIndex},
{Cols: []string{"org_id", "uid"}, Type: migrator.UniqueIndex},
},
}
mg.AddMigration("create role table", migrator.NewAddTableMigration(roleV1))
mg.AddMigration("add column display_name", migrator.NewAddColumnMigration(roleV1, &migrator.Column{
Name: "display_name", Type: migrator.DB_NVarchar, Length: 190, Nullable: true,
}))
mg.AddMigration("add column group_name", migrator.NewAddColumnMigration(roleV1, &migrator.Column{
Name: "group_name", Type: migrator.DB_NVarchar, Length: 190, Nullable: true,
}))
//------- indexes ------------------
mg.AddMigration("add index role.org_id", migrator.NewAddIndexMigration(roleV1, roleV1.Indices[0]))
mg.AddMigration("add unique index role_org_id_name", migrator.NewAddIndexMigration(roleV1, roleV1.Indices[1]))
mg.AddMigration("add index role_org_id_uid", migrator.NewAddIndexMigration(roleV1, roleV1.Indices[2]))
teamRoleV1 := migrator.Table{
Name: "team_role",
Columns: []*migrator.Column{
{Name: "id", Type: migrator.DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true},
{Name: "org_id", Type: migrator.DB_BigInt},
{Name: "team_id", Type: migrator.DB_BigInt},
{Name: "role_id", Type: migrator.DB_BigInt},
{Name: "created", Type: migrator.DB_DateTime, Nullable: false},
},
Indices: []*migrator.Index{
{Cols: []string{"org_id"}},
{Cols: []string{"org_id", "team_id", "role_id"}, Type: migrator.UniqueIndex},
{Cols: []string{"team_id"}},
},
}
mg.AddMigration("create team role table", migrator.NewAddTableMigration(teamRoleV1))
//------- indexes ------------------
mg.AddMigration("add index team_role.org_id", migrator.NewAddIndexMigration(teamRoleV1, teamRoleV1.Indices[0]))
mg.AddMigration("add unique index team_role_org_id_team_id_role_id", migrator.NewAddIndexMigration(teamRoleV1, teamRoleV1.Indices[1]))
mg.AddMigration("add index team_role.team_id", migrator.NewAddIndexMigration(teamRoleV1, teamRoleV1.Indices[2]))
userRoleV1 := migrator.Table{
Name: "user_role",
Columns: []*migrator.Column{
{Name: "id", Type: migrator.DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true},
{Name: "org_id", Type: migrator.DB_BigInt},
{Name: "user_id", Type: migrator.DB_BigInt},
{Name: "role_id", Type: migrator.DB_BigInt},
{Name: "created", Type: migrator.DB_DateTime, Nullable: false},
},
Indices: []*migrator.Index{
{Cols: []string{"org_id"}},
{Cols: []string{"org_id", "user_id", "role_id"}, Type: migrator.UniqueIndex},
{Cols: []string{"user_id"}},
},
}
mg.AddMigration("create user role table", migrator.NewAddTableMigration(userRoleV1))
//------- indexes ------------------
mg.AddMigration("add index user_role.org_id", migrator.NewAddIndexMigration(userRoleV1, userRoleV1.Indices[0]))
mg.AddMigration("add unique index user_role_org_id_user_id_role_id", migrator.NewAddIndexMigration(userRoleV1, userRoleV1.Indices[1]))
mg.AddMigration("add index user_role.user_id", migrator.NewAddIndexMigration(userRoleV1, userRoleV1.Indices[2]))
builtinRoleV1 := migrator.Table{
Name: "builtin_role",
Columns: []*migrator.Column{
{Name: "id", Type: migrator.DB_BigInt, IsPrimaryKey: true, IsAutoIncrement: true},
{Name: "role", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
{Name: "role_id", Type: migrator.DB_BigInt},<|fim▁hole|> },
Indices: []*migrator.Index{
{Cols: []string{"role_id"}},
{Cols: []string{"role"}},
},
}
mg.AddMigration("create builtin role table", migrator.NewAddTableMigration(builtinRoleV1))
//------- indexes ------------------
mg.AddMigration("add index builtin_role.role_id", migrator.NewAddIndexMigration(builtinRoleV1, builtinRoleV1.Indices[0]))
mg.AddMigration("add index builtin_role.name", migrator.NewAddIndexMigration(builtinRoleV1, builtinRoleV1.Indices[1]))
// Add org_id column to the builtin_role table
mg.AddMigration("Add column org_id to builtin_role table", migrator.NewAddColumnMigration(builtinRoleV1, &migrator.Column{
Name: "org_id", Type: migrator.DB_BigInt, Default: "0",
}))
mg.AddMigration("add index builtin_role.org_id", migrator.NewAddIndexMigration(builtinRoleV1, &migrator.Index{
Cols: []string{"org_id"},
}))
mg.AddMigration("add unique index builtin_role_org_id_role_id_role", migrator.NewAddIndexMigration(builtinRoleV1, &migrator.Index{
Cols: []string{"org_id", "role_id", "role"}, Type: migrator.UniqueIndex,
}))
// Make role.uid unique across Grafana instance
mg.AddMigration("Remove unique index role_org_id_uid", migrator.NewDropIndexMigration(roleV1, &migrator.Index{
Cols: []string{"org_id", "uid"}, Type: migrator.UniqueIndex,
}))
mg.AddMigration("add unique index role.uid", migrator.NewAddIndexMigration(roleV1, &migrator.Index{
Cols: []string{"uid"}, Type: migrator.UniqueIndex,
}))
seedAssignmentV1 := migrator.Table{
Name: "seed_assignment",
Columns: []*migrator.Column{
{Name: "builtin_role", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
{Name: "role_name", Type: migrator.DB_NVarchar, Length: 190, Nullable: false},
},
Indices: []*migrator.Index{
{Cols: []string{"builtin_role", "role_name"}, Type: migrator.UniqueIndex},
},
}
mg.AddMigration("create seed assignment table", migrator.NewAddTableMigration(seedAssignmentV1))
//------- indexes ------------------
mg.AddMigration("add unique index builtin_role_role_name", migrator.NewAddIndexMigration(seedAssignmentV1, seedAssignmentV1.Indices[0]))
}<|fim▁end|> | {Name: "created", Type: migrator.DB_DateTime, Nullable: false},
{Name: "updated", Type: migrator.DB_DateTime, Nullable: false}, |
<|file_name|>conftest.py<|end_file_name|><|fim▁begin|>""" pytest fixtures for test suite """
import pytest
import sqlalchemy as sa
import sqlalchemy.orm as orm
import testing.postgresql
import temporal_sqlalchemy as temporal
from . import models
@pytest.yield_fixture(scope='session')
def engine():
"""Creates a postgres database for testing, returns a sqlalchemy engine"""
db = testing.postgresql.Postgresql()
engine_ = sa.create_engine(db.url())
yield engine_
engine_.dispose()<|fim▁hole|> db.stop()
@pytest.yield_fixture(scope='session')
def connection(engine): # pylint: disable=redefined-outer-name
"""Session-wide test database."""
conn = engine.connect()
for extension in ['uuid-ossp', 'btree_gist']:
conn.execute("""\
CREATE EXTENSION IF NOT EXISTS "%s"
WITH SCHEMA pg_catalog
""" % extension)
for schema in [models.SCHEMA, models.TEMPORAL_SCHEMA]:
conn.execute('CREATE SCHEMA IF NOT EXISTS ' + schema)
models.basic_metadata.create_all(conn)
yield conn
conn.close()
@pytest.yield_fixture(scope="session")
def sessionmaker():
""" yields a temporalized sessionmaker -- per test session """
Session = orm.sessionmaker()
yield temporal.temporal_session(Session)
Session.close_all()
@pytest.yield_fixture()
def session(connection: sa.engine.Connection, sessionmaker: orm.sessionmaker): # pylint: disable=redefined-outer-name
""" yields temporalized session -- per test """
transaction = connection.begin()
sess = sessionmaker(bind=connection)
yield sess
transaction.rollback()
sess.close()<|fim▁end|> | |
<|file_name|>GuestMutations.test.ts<|end_file_name|><|fim▁begin|>import { expect } from 'chai';
import config from 'config';
import gqlV2 from 'fake-tag';
import sinon from 'sinon';
import { verifyJwt } from '../../../../../server/lib/auth';
import emailLib from '../../../../../server/lib/email';
import { randEmail } from '../../../../stores';
import { fakeUser, randStr } from '../../../../test-helpers/fake-data';
import { graphqlQueryV2, resetTestDB, waitForCondition } from '../../../../utils';
const sendConfirmationMutation = gqlV2/* GraphQL */ `
mutation SendGuestConfirmation($email: EmailAddress!) {
sendGuestConfirmationEmail(email: $email)
}
`;
const confirmGuestAccountMutation = gqlV2/* GraphQL */ `
mutation ConfirmGuestAccount($email: EmailAddress!, $emailConfirmationToken: String!) {
confirmGuestAccount(email: $email, emailConfirmationToken: $emailConfirmationToken) {
accessToken
account {
id
legacyId
slug
}
}
}
`;
const callSendConfirmation = (email, remoteUser = null) => {
return graphqlQueryV2(sendConfirmationMutation, { email }, remoteUser);
};
const callConfirmGuestAccount = (email, emailConfirmationToken, remoteUser = null) => {
return graphqlQueryV2(confirmGuestAccountMutation, { email, emailConfirmationToken }, remoteUser);
};
describe('server/graphql/v2/mutation/GuestMutations', () => {
let sandbox, emailSendMessageSpy;
before(async () => {
await resetTestDB();
sandbox = sinon.createSandbox();
emailSendMessageSpy = sandbox.spy(emailLib, 'sendMessage');
sandbox.stub(config, 'limits').value({
sendGuestConfirmPerMinutePerIp: 1000000,
sendGuestConfirmPerMinutePerEmail: 1000000,
confirmGuestAccountPerMinutePerIp: 1000000,
});
});
after(() => {
sandbox.restore();
});
describe('sendGuestConfirmationEmail', () => {
it('rejects if the user is signed in', async () => {
const user = await fakeUser();<|fim▁hole|>
it('rejects if the user is already verified', async () => {
const user = await fakeUser();
const result = await callSendConfirmation(user.email);
expect(result.errors).to.exist;
expect(result.errors[0].message).to.include('This account has already been confirmed');
});
it('rejects if the user does not exist', async () => {
const result = await callSendConfirmation(randEmail());
expect(result.errors).to.exist;
expect(result.errors[0].message).to.include('No user found for this email address');
});
it('sends the confirmation email', async () => {
const user = await fakeUser({ confirmedAt: null, emailConfirmationToken: randStr() });
const result = await callSendConfirmation(user.email);
result.errors && console.error(result.errors);
expect(result.errors).to.not.exist;
expect(result.data.sendGuestConfirmationEmail).to.be.true;
console.log(result);
await waitForCondition(() => emailSendMessageSpy.callCount === 1);
expect(emailSendMessageSpy.callCount).to.equal(1);
const [recipient, subject, body] = emailSendMessageSpy.args[0];
expect(recipient).to.eq(user.email);
expect(subject).to.eq('Open Collective: Verify your email');
expect(body).to.include(`/confirm/guest/${user.emailConfirmationToken}?email=${encodeURIComponent(user.email)}`);
});
});
describe('confirmGuestAccount', () => {
it('fails if account is already confirmed', async () => {
const user = await fakeUser({ confirmedAt: new Date(), emailConfirmationToken: randStr() });
const response = await callConfirmGuestAccount(user.email, user.emailConfirmationToken);
expect(response.errors).to.exist;
expect(response.errors[0].message).to.include('This account has already been verified');
});
it('fails if email is invalid', async () => {
const user = await fakeUser({ confirmedAt: null, emailConfirmationToken: randStr() });
const response = await callConfirmGuestAccount(randEmail(), user.emailConfirmationToken);
expect(response.errors).to.exist;
expect(response.errors[0].message).to.include('No account found for');
});
it('fails if confirmation token is invalid', async () => {
const user = await fakeUser({ confirmedAt: null, emailConfirmationToken: randStr() });
const response = await callConfirmGuestAccount(user.email, 'INVALID TOKEN');
expect(response.errors).to.exist;
expect(response.errors[0].message).to.include('Invalid email confirmation token');
});
it('returns a valid login token', async () => {
const user = await fakeUser({ confirmedAt: null, emailConfirmationToken: randStr() });
const response = await callConfirmGuestAccount(user.email, user.emailConfirmationToken);
response.errors && console.error(response.errors);
expect(response.errors).to.not.exist;
const { account, accessToken } = response.data.confirmGuestAccount;
expect(account.legacyId).to.eq(user.CollectiveId);
const decodedJwt = verifyJwt(accessToken);
expect(decodedJwt.sub).to.eq(user.id.toString());
});
});
describe('rate limiting', () => {
afterEach(() => {
sandbox.restore();
});
it('sendGuestConfirmationEmail is rate limited on IP', async () => {
sandbox.stub(config, 'limits').value({
sendGuestConfirmPerMinutePerIp: 0,
sendGuestConfirmPerMinutePerEmail: 1000000,
});
const user = await fakeUser({ confirmedAt: null });
const result = await callSendConfirmation(user.email);
expect(result.errors).to.exist;
expect(result.errors[0].message).to.include(
'An email has already been sent recently. Please try again in a few minutes.',
);
});
it('sendGuestConfirmationEmailis rate limited on email', async () => {
sandbox.stub(config, 'limits').value({
sendGuestConfirmPerMinutePerIp: 1000000,
sendGuestConfirmPerMinutePerEmail: 0,
});
const user = await fakeUser({ confirmedAt: null });
const result = await callSendConfirmation(user.email);
expect(result.errors).to.exist;
expect(result.errors[0].message).to.include(
'An email has already been sent for this address recently. Please check your SPAM folder, or try again in a few minutes.',
);
});
it('confirmGuestAccount rate limited on IP', async () => {
sandbox.stub(config, 'limits').value({ confirmGuestAccountPerMinutePerIp: 0 });
const user = await fakeUser({ confirmedAt: null, emailConfirmationToken: randStr() });
const response = await callConfirmGuestAccount(user.email, user.emailConfirmationToken);
expect(response.errors).to.exist;
expect(response.errors[0].message).to.include('Rate limit exceeded');
});
});
});<|fim▁end|> | const result = await callSendConfirmation(randEmail(), user);
expect(result.errors).to.exist;
expect(result.errors[0].message).to.include("You're signed in");
}); |
<|file_name|>PromiseObservable.d.ts<|end_file_name|><|fim▁begin|>import { Scheduler } from '../Scheduler';
import { Observable } from '../Observable';
import { Subscriber } from '../Subscriber';
import { TeardownLogic } from '../Subscription';
/**
* We need this JSDoc comment for affecting ESDoc.
* @extends {Ignored}
* @hide true
*/
export declare class PromiseObservable<T> extends Observable<T> {
private promise;
scheduler: Scheduler;
value: T;
/**
* Converts a Promise to an Observable.
*
* <span class="informal">Returns an Observable that just emits the Promise's
* resolved value, then completes.</span>
*
* Converts an ES2015 Promise or a Promises/A+ spec compliant Promise to an
* Observable. If the Promise resolves with a value, the output Observable
* emits that resolved value as a `next`, and then completes. If the Promise
* is rejected, then the output Observable emits the corresponding Error.<|fim▁hole|> *
* @example <caption>Convert the Promise returned by Fetch to an Observable</caption>
* var result = Rx.Observable.fromPromise(fetch('http://myserver.com/'));
* result.subscribe(x => console.log(x), e => console.error(e));
*
* @see {@link bindCallback}
* @see {@link from}
*
* @param {Promise<T>} promise The promise to be converted.
* @param {Scheduler} [scheduler] An optional Scheduler to use for scheduling
* the delivery of the resolved value (or the rejection).
* @return {Observable<T>} An Observable which wraps the Promise.
* @static true
* @name fromPromise
* @owner Observable
*/
static create<T>(promise: Promise<T>, scheduler?: Scheduler): Observable<T>;
constructor(promise: Promise<T>, scheduler?: Scheduler);
protected _subscribe(subscriber: Subscriber<T>): TeardownLogic;
}<|fim▁end|> | |
<|file_name|>files.py<|end_file_name|><|fim▁begin|>"""The image module provides basic functions for working with images in nipy.
Functions are provided to load, save and create image objects, along with
iterators to easily slice through volumes.
load : load an image from a file
save : save an image to a file
fromarray : create an image from a numpy array
Examples
--------
See documentation for load and save functions for 'working' examples.
"""
import os
import numpy as np
import nipy.io.imageformats as formats
from nipy.core.api import Image, is_image
from nifti_ref import (coordmap_from_affine, coerce_coordmap,
ijk_from_fps, fps_from_ijk)
def load(filename):
"""Load an image from the given filename.
Parameters
----------
filename : string
Should resolve to a complete filename path.
Returns
-------
image : An `Image` object
If successful, a new `Image` object is returned.
See Also
--------
save_image : function for saving images
fromarray : function for creating images from numpy arrays
Examples
--------
>>> from nipy.io.api import load_image
>>> from nipy.testing import anatfile
>>> img = load_image(anatfile)
>>> img.shape
(33, 41, 25)
"""
img = formats.load(filename)
aff = img.get_affine()
shape = img.get_shape()
hdr = img.get_header()
# Get info from NIFTI header, if present, to tell which axes are
# which. This is a NIFTI-specific kludge, that might be abstracted
# out into the image backend in a general way. Similarly for
# getting zooms
try:
fps = hdr.get_dim_info()
except (TypeError, AttributeError):
fps = (None, None, None)
ijk = ijk_from_fps(fps)
try:<|fim▁hole|> zooms = np.ones(len(shape))
aff = _match_affine(aff, len(shape), zooms)
coordmap = coordmap_from_affine(aff, ijk)
img = Image(img.get_data(), coordmap)
img.header = hdr
return img
def _match_affine(aff, ndim, zooms=None):
''' Fill or prune affine to given number of dimensions
>>> aff = np.arange(16).reshape(4,4)
>>> _match_affine(aff, 3)
array([[ 0, 1, 2, 3],
[ 4, 5, 6, 7],
[ 8, 9, 10, 11],
[12, 13, 14, 15]])
>>> _match_affine(aff, 2)
array([[ 0., 1., 3.],
[ 4., 5., 7.],
[ 0., 0., 1.]])
>>> _match_affine(aff, 4)
array([[ 0., 1., 2., 0., 3.],
[ 4., 5., 6., 0., 7.],
[ 8., 9., 10., 0., 11.],
[ 0., 0., 0., 1., 0.],
[ 0., 0., 0., 0., 1.]])
>>> aff = np.arange(9).reshape(3,3)
>>> _match_affine(aff, 2)
array([[0, 1, 2],
[3, 4, 5],
[6, 7, 8]])
'''
if aff.shape[0] != aff.shape[1]:
raise ValueError('Need square affine')
aff_dim = aff.shape[0] - 1
if ndim == aff_dim:
return aff
aff_diag = np.ones(ndim+1)
if not zooms is None:
n = min(len(zooms), ndim)
aff_diag[:n] = zooms[:n]
mod_aff = np.diag(aff_diag)
n = min(ndim, aff_dim)
# rotations zooms shears
mod_aff[:n,:n] = aff[:n,:n]
# translations
mod_aff[:n,-1] = aff[:n,-1]
return mod_aff
def save(img, filename, dtype=None):
"""Write the image to a file.
Parameters
----------
img : An `Image` object
filename : string
Should be a valid filename.
Returns
-------
image : An `Image` object
See Also
--------
load_image : function for loading images
fromarray : function for creating images from numpy arrays
Examples
--------
>>> import os
>>> import numpy as np
>>> from tempfile import mkstemp
>>> from nipy.core.api import fromarray
>>> from nipy.io.api import save_image
>>> data = np.zeros((91,109,91), dtype=np.uint8)
>>> img = fromarray(data, 'kji', 'zxy')
>>> fd, fname = mkstemp(suffix='.nii.gz')
>>> saved_img = save_image(img, fname)
>>> saved_img.shape
(91, 109, 91)
>>> os.unlink(fname)
>>> fd, fname = mkstemp(suffix='.img.gz')
>>> saved_img = save_image(img, fname)
>>> saved_img.shape
(91, 109, 91)
>>> os.unlink(fname)
>>> fname = 'test.mnc'
>>> saved_image = save_image(img, fname)
Traceback (most recent call last):
...
ValueError: Cannot save file type "minc"
Notes
-----
Filetype is determined by the file extension in 'filename'. Currently the
following filetypes are supported:
* Nifti single file : ['.nii', '.nii.gz']
* Nifti file pair : ['.hdr', '.hdr.gz']
* Analyze file pair : ['.img', 'img.gz']
"""
# Get header from image
try:
original_hdr = img.header
except AttributeError:
original_hdr = None
# Make NIFTI compatible version of image
newcmap, order = coerce_coordmap(img.coordmap)
Fimg = Image(np.transpose(np.asarray(img), order), newcmap)
# Expand or contract affine to 4x4 (3 dimensions)
rzs = Fimg.affine[:-1,:-1]
zooms = np.sqrt(np.sum(rzs * rzs, axis=0))
aff = _match_affine(Fimg.affine, 3, zooms)
ftype = _type_from_filename(filename)
if ftype.startswith('nifti1'):
klass = formats.Nifti1Image
elif ftype == 'analyze':
klass = formats.Spm2AnalyzeImage
else:
raise ValueError('Cannot save file type "%s"' % ftype)
# make new image
out_img = klass(data=np.asarray(Fimg),
affine=aff,
header=original_hdr)
hdr = out_img.get_header()
# work out phase, freqency, slice from coordmap names
ijk = newcmap.input_coords.coord_names
fps = fps_from_ijk(ijk)
# put fps into header if possible
try:
hdr.set_dim_info(*fps)
except AttributeError:
pass
# Set zooms
hdr.set_zooms(zooms)
# save to disk
out_img.to_filename(filename)
return Fimg
def _type_from_filename(filename):
''' Return image type determined from filename
Filetype is determined by the file extension in 'filename'.
Currently the following filetypes are supported:
* Nifti single file : ['.nii', '.nii.gz']
* Nifti file pair : ['.hdr', '.hdr.gz']
* Analyze file pair : ['.img', '.img.gz']
>>> _type_from_filename('test.nii')
'nifti1single'
>>> _type_from_filename('test')
'nifti1single'
>>> _type_from_filename('test.hdr')
'nifti1pair'
>>> _type_from_filename('test.hdr.gz')
'nifti1pair'
>>> _type_from_filename('test.img.gz')
'analyze'
>>> _type_from_filename('test.mnc')
'minc'
'''
if filename.endswith('.gz'):
filename = filename[:-3]
elif filename.endswith('.bz2'):
filename = filename[:-4]
_, ext = os.path.splitext(filename)
if ext in ('', '.nii'):
return 'nifti1single'
if ext == '.hdr':
return 'nifti1pair'
if ext == '.img':
return 'analyze'
if ext == '.mnc':
return 'minc'
raise ValueError('Strange file extension "%s"' % ext)
def as_image(image_input):
''' Load image from filename or pass through image instance
Parameters
----------
image_input : str or Image instance
image or string filename of image. If a string, load image and
return. If an image, pass through without modification
Returns
-------
img : Image or Image-like instance
Input object if `image_input` seemed to be an image, loaded Image
object if `image_input` was a string.
Raises
------
TypeError : if neither string nor image-like passed
Examples
--------
>>> from nipy.testing import anatfile
>>> from nipy.io.api import load_image
>>> img = as_image(anatfile)
>>> img2 = as_image(img)
>>> img2 is img
True
'''
if is_image(image_input):
return image_input
if isinstance(image_input, basestring):
return load(image_input)
raise TypeError('Expecting an image-like object or filename string')<|fim▁end|> | zooms = hdr.get_zooms()
except AttributeError: |
<|file_name|>multilevel-path-1.rs<|end_file_name|><|fim▁begin|>// edition:2021
#![feature(rustc_attrs)]
#![allow(unused)]
struct Point {
x: i32,
y: i32,
}
struct Wrapper {
p: Point,
}
fn main() {
let mut w = Wrapper { p: Point { x: 10, y: 10 } };
// Only paths that appears within the closure that directly start off
// a variable defined outside the closure are captured.
//
// Therefore `w.p` is captured
// Note that `wp.x` doesn't start off a variable defined outside the closure.
let c = #[rustc_capture_analysis]
//~^ ERROR: attributes on expressions are experimental
//~| NOTE: see issue #15701 <https://github.com/rust-lang/rust/issues/15701>
|| {
//~^ ERROR: First Pass analysis includes:
//~| ERROR: Min Capture analysis includes:
let wp = &w.p;
//~^ NOTE: Capturing w[(0, 0)] -> ImmBorrow
//~| NOTE: Min Capture w[(0, 0)] -> ImmBorrow
println!("{}", wp.x);
};
// Since `c` captures `w.p` by an ImmBorrow, `w.p.y` can't be mutated.<|fim▁hole|> c();
*py = 20
}<|fim▁end|> | let py = &mut w.p.y; |
<|file_name|>SimulateP2PNetwork.py<|end_file_name|><|fim▁begin|># Author:
# Ross Sbriscia, April 2016
import random
import sys
import traceback
import os
import math
import argparse
import time
import random_connected_graph
# Parses Arguments
parser = argparse.ArgumentParser(formatter_class=argparse.RawDescriptionHelpFormatter,
description='Simulates a P2P network with random dynamic connectivity in order to examines runtime \
and space complexity of search algorithms.',
epilog="Examples:\n\
\n\
python SimulateP2PNetwork.py 30 randomwalk -o output\n\
This will simulate a network of 30 vertices and use the random walk algorithm, outputs in output.csv\n\
\n\
python SimulateP2PNetwork.py 500 bfs -e 20\n\
This will simulate a network of 500 verticies, using the BFS algorithm, and run a \
new experiment (assign new start and end nodes) on each graph 20 times.\n\
\n\
python SimulateP2PNetwork.py 350 randomwalk -e 30 -t 200\n\
This will simulate a network of 500 verticies, using the randomwalk algorithm, run a \
new trial (assign new start and end nodes) on each graph 30 times and re-build (assign new edges) the graph 200 times.\n\
\n\
Output: a csv in the following form (one line per experiment);\n\
num vertices, num edges, algorithm used, average length of path found, if file NEVER found, average data per hop (bytes), runningtime (seconds)\n\
Ex:\n\
250,10898,randomwalk,32373,False,32,3.237650\n\
250,10898,randomwalk,25520,False,32,2.553203\n\
250,10898,randomwalk,28501,False,32,2.851121\n\
.\n\
.\n\
.")
parser.add_argument('vertices', type=int,
help='Number of vertices in the simulated network (Recommend <= 1000)')
parser.add_argument('algorithm', choices=['randomwalk', 'bfs', 'lazyrandomwalk', 'rw', 'lrw'],
help='Choose an algorithm to use in the simulation')
parser.add_argument('-r', type=int,
help='(Default 10) Number of RUNS per EXPERIMENTS (exact same start and end nodes, on network with same edges)')
parser.add_argument('-e', type=int,
help='(Default 50) Number of EXPERIMENTS per TRIAL (new start and end nodes, on network with same edges)')
parser.add_argument('-t', type=int,
help='(Default 100) Number of TRIALS (times graph will be re-built with new edges)')
parser.add_argument('-o',
help='Specify output filename')
args = parser.parse_args()
numberOfVertices = args.vertices
algorithm = args.algorithm
numberOfFailiures = 0
pofEdgeFail = 0.001
maxPathLength = 4 * (math.pow(numberOfVertices, 3))
if args.t:
numberOfTrails = args.t
else:
numberOfTrails = 100
if (algorithm == "rw"):
algorithm = "randomwalk";
if (algorithm == "lrw"):
algorithm = "lazyrandomwalk";
if args.o:
outfileName = "./Data/" + args.o<|fim▁hole|>else:
if (algorithm == "randomwalk"):
outfileName = "./Data/RandomWalkSimulation.csv"
if (algorithm == "bfs"):
outfileName = "./Data/BFSSimulation.csv"
if (algorithm == "lazyrandomwalk"):
outfileName = "./Data/LazyRandomWalkSimulation.csv"
if args.e:
numberOfExperiments = args.e
else:
numberOfExperiments = 50
if args.r:
numberOfRuns = args.r
else:
numberOfRuns = 10
# Code Starts Here!
# Returns the maximum possible number of edges of an undirected graph with
# n verticies
def maxEdges(n):
return (n * (n - 1)) / 2
# Runs the algorithm and collects data
def runAlgorithm(graph, startHost, endHost):
# Algorithm sends a constant ammount of data per hop. 8 bytes of data.
if (algorithm == "randomwalk"):
hops = []
currHost = random.choice(graph.neighborSet[startHost])
start_Per_Host_Computation_Time = time.time()*1000
while (len(hops) <= maxPathLength and currHost != endHost):
deadset = []
for neighbor in graph.neighborSet[currHost]: # Calculates random edge failiure
if (pofEdgeFail > random.random()):
deadset.append(neighbor)
activeneighbors = list(set(graph.neighborSet[currHost])-set(deadset));
if not activeneighbors: # if every edge dies
currHost = random.choice(graph.neighborSet[currHost]);
else:
currHost = random.choice(activeneighbors)
hops.append(currHost)
finish_Per_Host_Computation_Time = time.time()*1000
return hops, (finish_Per_Host_Computation_Time - start_Per_Host_Computation_Time)
if (algorithm == "bfs"):
# maintain a queue of paths
queue = []
# push the first path into the queue
queue.append([startHost])
starttime = time.time()*1000
while queue:
# get the first path from the queue
path = queue.pop(0)
# get the last node from the path
currHost = path[-1]
# path found
if currHost == endHost:
finishtime = time.time()*1000
return path, (finishtime - starttime)
# enumerate all adjacent nodes, construct a new path and push it
# into the queue
for adjacent in graph.neighborSet[currHost]:
if (pofEdgeFail > random.random()):
continue;
new_path = list(path)
new_path.append(adjacent)
queue.append(new_path)
finishtime = time.time()*1000
return path, (finishtime - starttime);
if (algorithm == "lazyrandomwalk"):
hops = []
currHost = random.choice(graph.neighborSet[startHost])
start_Per_Host_Computation_Time = time.time()*1000
while (len(hops) <= maxPathLength and currHost != endHost):
takeSelfLoop = random.random();
deadset = []
for neighbor in graph.neighborSet[currHost]: # Calculates random edge failiure
if (pofEdgeFail > random.random()):
deadset.append(neighbor)
activeneighbors = list(set(graph.neighborSet[currHost])-set(deadset));
if not activeneighbors:
currHost = random.choice(graph.neighborSet[currHost]);
else:
if (takeSelfLoop < .5): # If we do NOT take a self-loop
currHost = random.choice(activeneighbors)
hops.append(currHost)
finish_Per_Host_Computation_Time = time.time()*1000
return hops, (finish_Per_Host_Computation_Time - start_Per_Host_Computation_Time)
# Returns a connected graph with randomized edges.
# This simulates the reality of real p2p networks,
# as hosts very often come online and go offline.
def shuffleConnections():
edges = random.randrange(numberOfVertices - 1, maxEdges(numberOfVertices))
verts = [x for x in xrange(int(numberOfVertices))]
network = random_connected_graph.random_walk(verts, edges)
network.sort_edges()
# print "Generated network containing:\n\
#%d hosts (vertices)\n\
#%d connections (edges)" % (len(network.nodes), len(network.edges));
return network
# Shuffles node looking for the file, and node which has the file
def shuffleHostsOfInterest():
startNode = random.randrange(0, numberOfVertices - 1)
endNode = random.randrange(0, numberOfVertices - 1)
if (startNode == endNode):
return shuffleHostsOfInterest()
else:
return startNode, endNode
# setup loading bar
print "\n\nRunning Simulations..."
trialRatio = math.ceil(numberOfTrails * 2 / 100)
# sys.stdout.write("[%s]" % (" " * 50))
# sys.stdout.flush()
# sys.stdout.write("\b" * (50 + 1)) # return to start of line, after '['
# Run the expirement
outputCSV = open(outfileName, 'w')
sys.stdout.write("\033[92m0\033[0m")
for currentTrial in range(numberOfTrails):
network = shuffleConnections()
for currentExeriment in range(numberOfExperiments):
startHost, endHost = shuffleHostsOfInterest()
hops = []
runtime = []
spacePerHost = 32 # Estimated 32 bytes of data for the base request.
for currentRun in range(numberOfRuns):
numhops, searchtime = runAlgorithm(network, startHost, endHost)
runtime.append(searchtime)
hops.append(sum(numhops))
averageRunTime = sum(runtime) / len(runtime)
averageHopLength = sum(hops) / len(hops)
# Adds link latency into computation, estimating 0.0001 second
# transmission delay/hop
averageRunTime += (averageHopLength * 0.1)
if algorithm == "bfs":
spacePerHost += averageHopLength * 32 # Each new host IP needs to be enqueued into the datastructure
includedFailiure = False
# Allows for a 10Mbs (average) upload speed bottleneck on all hosts
averageRunTime += (spacePerHost / 1250)
# Processing Time For Algorithm
averageRunTime += (spacePerHost / 100)
if maxPathLength in hops:
includedFailiure = True
outputCSV.write("%d,%d,%s,%d,%r,%d,%.6f\n" % (numberOfVertices, len(
network.edges), algorithm, averageHopLength, includedFailiure, spacePerHost, averageRunTime))
# Progress
number_of_chars_to_erase = len(str(currentTrial)) + 11 + len(str(numberOfTrails))
print("\033[92mTrial:\t%d/%d\033[0m " % (currentTrial+1,numberOfTrails))
sys.stdout.write('\n')<|fim▁end|> | |
<|file_name|>app.routing.ts<|end_file_name|><|fim▁begin|>import { ModuleWithProviders } from '@angular/core'
import { Routes, RouterModule } from '@angular/router'
//anything not maching a registered URL will go to the login page
const routes: Routes = [
{ path: '**', redirectTo: '/login', pathMatch: 'full' }
]
<|fim▁hole|><|fim▁end|> | export const routing: ModuleWithProviders = RouterModule.forRoot(routes) |
<|file_name|>ClangFunctionDifferenceBear.py<|end_file_name|><|fim▁begin|>import functools
from itertools import combinations
from bears.c_languages.ClangBear import clang_available, ClangBear
from bears.c_languages.codeclone_detection.ClangCountingConditions import (
condition_dict)
from bears.c_languages.codeclone_detection.ClangCountVectorCreator import (
ClangCountVectorCreator)<|fim▁hole|>from bears.c_languages.codeclone_detection.CloneDetectionRoutines import (
compare_functions, get_count_matrices)
from coala_utils.string_processing.StringConverter import StringConverter
from coalib.bears.GlobalBear import GlobalBear
from dependency_management.requirements.PipRequirement import PipRequirement
from coalib.collecting.Collectors import collect_dirs
from coalib.results.HiddenResult import HiddenResult
from coalib.settings.Setting import path_list, typed_ordered_dict
from coala_utils.decorators import (enforce_signature, generate_ordering,
generate_repr)
# counting_condition_dict is a function object generated by typed_dict. This
# function takes a setting and creates a dictionary out of it while it
# converts all keys to counting condition function objects (via the
# condition_dict) and all values to floats while unset values default to 1.
counting_condition_dict = typed_ordered_dict(
lambda setting: condition_dict[str(setting).lower()],
float,
1)
default_cc_dict = counting_condition_dict(StringConverter(
"""
used: 0,
returned: 1.4,
is_condition: 0,
in_condition: 1.4,
in_second_level_condition: 1.4,
in_third_level_condition: 1.0,
is_assignee: 0,
is_assigner: 0.6,
loop_content: 0,
second_level_loop_content,
third_level_loop_content,
is_param: 2,
is_called: 1.4,
is_call_param: 0.0,
in_sum: 2.0,
in_product: 0,
in_binary_operation,
member_accessed"""))
@generate_repr(('id', hex),
'origin',
'differences',
'count_matrices',
'message')
@generate_ordering('origin',
'differences',
'count_matrices',
'message')
class ClangFunctionDifferenceResult(HiddenResult):
@enforce_signature
def __init__(self, origin,
differences: list,
count_matrices: dict):
super().__init__(origin,
[differences, count_matrices])
self.differences = differences
self.count_matrices = count_matrices
def get_difference(function_pair,
count_matrices,
average_calculation,
poly_postprocessing,
exp_postprocessing):
"""
Retrieves the difference between two functions using the munkres algorithm.
:param function_pair: A tuple containing both indices for the
count_matrices dictionary.
:param count_matrices: A dictionary holding CMs.
:param average_calculation: If set to true the difference calculation
function will take the average of all variable
differences as the difference, else it will
normalize the function as a whole and thus
weighting in variables dependent on their size.
:param poly_postprocessing: If set to true, the difference value of big
function pairs will be reduced using a
polynomial approach.
:param exp_postprocessing: If set to true, the difference value of big
function pairs will be reduced using an
exponential approach.
:return: A tuple containing both function ids and their
difference.
"""
function_1, function_2 = function_pair
return (function_1,
function_2,
compare_functions(count_matrices[function_1],
count_matrices[function_2],
average_calculation,
poly_postprocessing,
exp_postprocessing))
class ClangFunctionDifferenceBear(GlobalBear):
check_prerequisites = classmethod(clang_available)
LANGUAGES = ClangBear.LANGUAGES
REQUIREMENTS = ClangBear.REQUIREMENTS | {PipRequirement('munkres3', '1.0')}
def run(self,
counting_conditions: counting_condition_dict = default_cc_dict,
average_calculation: bool = False,
poly_postprocessing: bool = True,
exp_postprocessing: bool = False,
extra_include_paths: path_list = (),
):
"""
Retrieves similarities for code clone detection. Those can be reused in
another bear to produce results.
Postprocessing may be done because small functions are less likely to
be clones at the same difference value than big functions which may
provide a better refactoring opportunity for the user.
:param counting_conditions: A comma seperated list of counting
conditions. Possible values are: used,
returned, is_condition, in_condition,
in_second_level_condition,
in_third_level_condition, is_assignee,
is_assigner, loop_content,
second_level_loop_content,
third_level_loop_content, is_param,
in_sum, in_product, in_binary_operation,
member_accessed.
Weightings can be assigned to each
condition due to providing a dict
value, i.e. having used weighted in
half as much as other conditions would
simply be: "used: 0.5, is_assignee".
Weightings default to 1 if unset.
:param average_calculation: If set to true the difference calculation
function will take the average of all
variable differences as the difference,
else it will normalize the function as a
whole and thus weighting in variables
dependent on their size.
:param poly_postprocessing: If set to true, the difference value of big
function pairs will be reduced using a
polynomial approach.
:param extra_include_paths: A list containing additional include paths.
:param exp_postprocessing: If set to true, the difference value of big
function pairs will be reduced using an
exponential approach.
"""
self.debug('Using the following counting conditions:')
for key, val in counting_conditions.items():
self.debug(' *', key.__name__, '(weighting: {})'.format(val))
self.debug('Creating count matrices...')
count_matrices = get_count_matrices(
ClangCountVectorCreator(list(counting_conditions.keys()),
list(counting_conditions.values())),
list(self.file_dict.keys()),
lambda prog: self.debug('{:2.4f}%...'.format(prog)),
self.section['files'].origin,
collect_dirs(extra_include_paths))
self.debug('Calculating differences...')
differences = []
function_count = len(count_matrices)
# Thats n over 2, hardcoded to simplify calculation
combination_length = function_count * (function_count-1) / 2
partial_get_difference = functools.partial(
get_difference,
count_matrices=count_matrices,
average_calculation=average_calculation,
poly_postprocessing=poly_postprocessing,
exp_postprocessing=exp_postprocessing)
for i, elem in enumerate(
map(partial_get_difference,
[(f1, f2) for f1, f2 in combinations(count_matrices, 2)])):
if i % 50 == 0:
self.debug('{:2.4f}%...'.format(100*i/combination_length))
differences.append(elem)
yield ClangFunctionDifferenceResult(self, differences, count_matrices)<|fim▁end|> | |
<|file_name|>SetUserPictureCmd.java<|end_file_name|><|fim▁begin|>/* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.flowable.idm.engine.impl.cmd;
import java.io.Serializable;
import org.flowable.engine.common.api.FlowableIllegalArgumentException;
import org.flowable.engine.common.api.FlowableObjectNotFoundException;
import org.flowable.engine.common.impl.interceptor.Command;
import org.flowable.engine.common.impl.interceptor.CommandContext;
import org.flowable.idm.api.Picture;
import org.flowable.idm.api.User;
import org.flowable.idm.engine.impl.util.CommandContextUtil;
/**
* @author Tom Baeyens
*/
public class SetUserPictureCmd implements Command<Object>, Serializable {
private static final long serialVersionUID = 1L;
protected String userId;
protected Picture picture;
public SetUserPictureCmd(String userId, Picture picture) {
this.userId = userId;
this.picture = picture;
}
@Override
public Object execute(CommandContext commandContext) {
if (userId == null) {
throw new FlowableIllegalArgumentException("userId is null");
}
User user = CommandContextUtil.getIdmEngineConfiguration().getIdmIdentityService()
.createUserQuery().userId(userId)
.singleResult();
if (user == null) {
throw new FlowableObjectNotFoundException("user " + userId + " doesn't exist", User.class);
}
CommandContextUtil.getUserEntityManager(commandContext).setUserPicture(user, picture);
return null;
}
<|fim▁hole|>}<|fim▁end|> | |
<|file_name|>gen.rs<|end_file_name|><|fim▁begin|><|fim▁hole|>
include!(concat!(env!("OUT_DIR"), "/glk.rs"));<|fim▁end|> | #![allow(non_upper_case_globals)]
#![allow(non_camel_case_types)] |
<|file_name|>json.py<|end_file_name|><|fim▁begin|># -*- coding:utf-8 -*-
import json
import sys
from importlib import import_module
from importlib.util import find_spec
from owlmixin import OwlMixin, TOption
from owlmixin.util import load_json
from jumeaux.addons.res2res import Res2ResExecutor
from jumeaux.logger import Logger
from jumeaux.models import Res2ResAddOnPayload, Response, Request
from jumeaux.utils import when_filter
logger: Logger = Logger(__name__)
LOG_PREFIX = "[res2res/json]"
def wrap(anything: bytes, encoding: str) -> str:
"""Use for example of Transformer.function
"""
return json.dumps({"wrap": load_json(anything.decode(encoding))}, ensure_ascii=False)
class Transformer(OwlMixin):
module: str
function: str = "transform"
class Config(OwlMixin):
transformer: Transformer
default_encoding: str = "utf8"
when: TOption[str]
class Executor(Res2ResExecutor):
def __init__(self, config: dict) -> None:
self.config: Config = Config.from_dict(config or {})
t: Transformer = self.config.transformer
try:
if not find_spec(t.module):
raise ModuleNotFoundError
except ModuleNotFoundError as e:
logger.error(f"{LOG_PREFIX} Module {t.module} is not existed.")
sys.exit(1)
try:
self.module = getattr(import_module(t.module), t.function)
except AttributeError as e:
logger.error(f"{LOG_PREFIX} {t.function} is not existed in {t.module} module")<|fim▁hole|> def exec(self, payload: Res2ResAddOnPayload) -> Res2ResAddOnPayload:
req: Request = payload.req
res: Response = payload.response
if not self.config.when.map(lambda x: when_filter(x, {"req": req, "res": res})).get_or(
True
):
return payload
json_str: str = self.module(res.body, res.encoding.get())
new_encoding: str = res.encoding.get_or(self.config.default_encoding)
return Res2ResAddOnPayload.from_dict(
{
"response": {
"body": json_str.encode(new_encoding, errors="replace"),
"type": "json",
"encoding": new_encoding,
"headers": res.headers,
"url": res.url,
"status_code": res.status_code,
"elapsed": res.elapsed,
"elapsed_sec": res.elapsed_sec,
},
"req": req,
"tags": payload.tags,
}
)<|fim▁end|> | sys.exit(1)
|
<|file_name|>setupSampledLOSO_writeIndices.py<|end_file_name|><|fim▁begin|># @Author
# Chloe-Agathe Azencott
# [email protected]
# April 2016
import argparse
import h5py
import numpy as np
import os
import sys
def main():
""" Create train/test indices for one repeat of a 10-fold sampled leave-one-study-out
experiment on the RFS data.
The indices will be stored under
<data_dir>/outputs/U133A_combat_RFS/sampled_loso/repeat<repeat idx><|fim▁hole|> <k>/train.indices
List of indices of the training set (one per line).
<k>/train.labels
List of (0/1) labels of the training set (one per line).
<k>/test.indices
List of indices of the test set (one per line).
<k>/test.labels
List of (0/1) labels of the test set (one per line).
Parameters
----------
data_dir: path
Path to the data folder.
ACES, GSE_RFS, and the outputs directory must be under <data_dir>.
repeat: int
Repeat index.
Example
-------
$ python setUpSampledLOSO_writeIndices.py $SHAREDAT/SamSpecCoEN 0
Reference
---------
Allahyar, A., and Ridder, J. de (2015).
FERAL: network-based classifier with application to breast cancer outcome prediction.
Bioinformatics 31, i311--i319.
"""
parser = argparse.ArgumentParser(description="Build sample-specific co-expression networks" + \
"for a sampled LOSO on the RFS data",
add_help=True)
parser.add_argument("data_dir", help="Path to the data")
parser.add_argument("repeat", help="Index of the repeat", type=int)
args = parser.parse_args()
outDir = '%s/outputs/U133A_combat_RFS/sampled_loso/repeat%d' % (args.data_dir, args.repeat)
# Create outDir if it does not exist
if not os.path.isdir(outDir):
sys.stdout.write("Creating %s\n" % outDir)
try:
os.makedirs(outDir)
except OSError:
if not os.path.isdir(outDir):
raise
# Get expression data, sample labels.
# Do not normalize the data while loading it (so as not to use test data for normalization).
f = h5py.File("%s/ACES/experiments/data/U133A_combat.h5" % args.data_dir)
expressionData = np.array(f['U133A_combat_RFS']['ExpressionData'])
sampleLabels = np.array(f['U133A_combat_RFS']['PatientClassLabels'])
sampleAccess = np.array(f['U133A_combat_RFS']['PatientLabels']).tolist()
f.close()
# Map the indices to the studies
studyDict = {} # studyId:[sampleIdx]
gse_rfs_dir = '%s/GSE_RFS/' % args.data_dir
for studyFile in os.listdir(gse_rfs_dir):
studyPath = '%s/%s' % (gse_rfs_dir, studyFile)
print studyPath
with open(studyPath, 'r') as f:
gsmNames = set([x.split()[0] for x in f.readlines()])
f.close()
gsmNames = gsmNames.intersection(set(sampleAccess))
studyDict[studyFile.split(".")[0]] = [sampleAccess.index(gsm) for gsm in gsmNames]
studyList = studyDict.keys()
numStudies = len(studyList)
print "Found %d studies" % numStudies
np.random.seed(seed=args.repeat)
for foldNr in range(numStudies):
# Training data:
# randomly sample 50% of each study that is not foldNr
trIndices = []
for studyId in [x for x in studyList if x!=foldNr]:
studyIndices = np.random.choice(studyDict[studyId],
size=len(studyDict[studyId])/2,
replace=False)
trIndices.extend(studyIndices)
# studyIndices = studyDict[studyId]
# random.shuffle(studyIndices)
# n = len(studyIndices)
# trIndices.extend(studyIndices[:(n/2)])
# Test data:
# the data from foldNr
teIndices = studyDict[studyList[foldNr]]
# Create output directory
foldDir = "%s/fold%d" % (outDir, foldNr)
try:
os.makedirs(foldDir)
except OSError:
if not os.path.isdir(foldDir):
raise
# Save train indices to file
trIndicesF = '%s/train.indices' % foldDir
np.savetxt(trIndicesF, trIndices, fmt='%d')
sys.stdout.write("Wrote training indices for fold %d to %s\n" % (foldNr, trIndicesF))
# Save test indices to file
teIndicesF = '%s/test.indices' % foldDir
np.savetxt(teIndicesF, teIndices, fmt='%d')
sys.stdout.write("Wrote test indices for fold %d to %s\n" % (foldNr, teIndicesF))
# Save train labels to file
trLabelsF = '%s/train.labels' % foldDir
np.savetxt(trLabelsF, np.array(sampleLabels[trIndices], dtype='int'),
fmt='%d')
sys.stdout.write("Wrote training labels for fold %d to %s\n" % (foldNr, trLabelsF))
# Save test labels to file
teLabelsF = '%s/test.labels' % foldDir
np.savetxt(teLabelsF, np.array(sampleLabels[teIndices], dtype='int'),
fmt='%d')
sys.stdout.write("Wrote test labels for fold %d to %s\n" % (foldNr, teLabelsF))
if __name__ == "__main__":
main()<|fim▁end|> | with the following structure:
For k=1..numFolds: |
<|file_name|>transactions.rs<|end_file_name|><|fim▁begin|>use crypto::digest::Digest;
use crypto::sha2::Sha256;
use preamble::*;
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub struct Transactions<'a> {
pub count: u64,
pub slice: &'a [u8],
}
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub struct Transaction<'a> {
pub version: u32,
pub txid: Hash,
pub txins_count: u64,
pub txouts_count: u64,
pub lock_time: u32,
pub slice: &'a [u8],
}
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub struct TransactionInput<'a> {
pub prev_hash: &'a Hash,
pub prev_index: u32,
pub script: Script<'a>,
pub sequence_no: u32,
pub slice: &'a [u8],
}
#[derive(PartialEq, Eq, Debug, Clone, Copy)]
pub struct TransactionOutput<'a> {
pub value: u64,
pub script: Script<'a>,
pub slice: &'a [u8],
}
impl<'a> Transactions<'a> {
pub fn new(mut slice: &[u8]) -> Result<Transactions> {
let count = read_var_int(&mut slice)?;
Ok(Transactions { count, slice })
}
pub fn walk<V: BlockChainVisitor<'a>>(
self,
visitor: &mut V,
timestamp: u32,
height: u64,
block_item: &mut V::BlockItem,
output_items: &mut HashMap<Hash, VecMap<V::OutputItem>>,
) -> ParseResult<()> {
let mut slice = self.slice;
for _ in 0..self.count {
Transaction::read_and_walk(
&mut slice,
visitor,
timestamp,
height,
block_item,
output_items,
)?;
}
assert_eq!(slice.len(), 0);
Ok(())
}
}
impl<'a> Transaction<'a> {
pub fn read_and_walk<V: BlockChainVisitor<'a>>(
slice: &mut &'a [u8],
visitor: &mut V,
timestamp: u32,
height: u64,
block_item: &mut V::BlockItem,
output_items: &mut HashMap<Hash, VecMap<V::OutputItem>>,
) -> ParseResult<Transaction<'a>> {
// Visit the raw transaction before parsing
let mut transaction_item = visitor.visit_transaction_begin(block_item);
let mut tx_hash = [0u8; 32];
let mut sha256_hasher1 = Sha256::new();
let mut sha256_hasher2 = sha256_hasher1;
// Save the initial position in two slices
let mut init_slice = *slice;
sha256_hasher1.input(&slice[..4]);
let version = read_u32(slice)?;
let marker = slice[0];
let txins_count: u64;
let mut slice_inputs_and_outputs = *slice;
if marker == 0x00 {
// Consume marker
*slice = &slice[1..];
let flag = read_u8(slice)?;
slice_inputs_and_outputs = *slice;
if flag == 0x01 {
txins_count = read_var_int(slice)?;
} else {
return Err(ParseError::Invalid);
}
} else {
txins_count = read_var_int(slice)?;
}
// Read the inputs
for _ in 0..txins_count {
let i = TransactionInput::read(slice, timestamp, height)?;
let mut output_item = None;
if let HashEntry::Occupied(mut occupied) = output_items.entry(*i.prev_hash) {
output_item = occupied.get_mut().remove(i.prev_index as usize);
if occupied.get().len() == 0 {
occupied.remove();
}
}
visitor.visit_transaction_input(i, block_item, &mut transaction_item, output_item);
}
// Read the outputs
let txouts_count = read_var_int(slice)?;
let mut cur_output_items = VecMap::with_capacity(txouts_count as usize);
for n in 0..txouts_count {
let o = TransactionOutput::read(slice, timestamp, height)?;
let output_item =
visitor.visit_transaction_output(o, block_item, &mut transaction_item);
if let Some(output_item) = output_item {
cur_output_items.insert(n as usize, output_item);
}
}
// Hash the transaction data before the witnesses
let slice_inputs_and_outputs_len = slice_inputs_and_outputs.len();
sha256_hasher1.input(read_slice(
&mut slice_inputs_and_outputs,
slice_inputs_and_outputs_len - slice.len(),
)?);
// Read the witnesses
if marker == 0x00 {
for _ in 0..txins_count {
let item_count = read_var_int(slice)?;
for _ in 0..item_count {
let witness_len = read_var_int(slice)? as usize;
let _witness = read_slice(slice, witness_len);
}
}
}
sha256_hasher1.input(&slice[..4]);
let lock_time = read_u32(slice)?;
sha256_hasher1.result(&mut tx_hash);
sha256_hasher2.input(&tx_hash);
sha256_hasher2.result(&mut tx_hash);
let init_slice_len = init_slice.len();
let tx = Transaction {
version,
txid: *Hash::from_slice(&tx_hash),<|fim▁hole|> txins_count,
txouts_count,
lock_time,
slice: read_slice(&mut init_slice, init_slice_len - slice.len())?,
};
if cur_output_items.len() > 0 {
let len = cur_output_items.len();
cur_output_items.reserve_len_exact(len);
output_items.insert(*Hash::from_slice(&tx_hash), cur_output_items);
}
visitor.visit_transaction_end(tx, block_item, transaction_item);
Ok(tx)
}
}
impl<'a> TransactionInput<'a> {
pub fn read(slice: &mut &'a [u8], timestamp: u32, height: u64) -> Result<TransactionInput<'a>> {
// Save the initial position
let mut init_slice = *slice;
// Read the prev_hash
let prev_hash = Hash::from_slice(read_array!(slice, 32)?);
// Read the prev_index
let prev_index = read_u32(slice)?;
// Read the script
let nbytes = read_var_int(slice)? as usize;
let script = read_slice(slice, nbytes)?;
// Read the sequence_no
let sequence_no = read_u32(slice)?;
let init_slice_len = init_slice.len();
Ok(TransactionInput {
prev_hash,
prev_index,
script: Script::new(script, timestamp, height),
sequence_no,
slice: read_slice(&mut init_slice, init_slice_len - slice.len())?,
})
}
}
impl<'a> TransactionOutput<'a> {
pub fn read(
slice: &mut &'a [u8],
timestamp: u32,
height: u64,
) -> Result<TransactionOutput<'a>> {
// Save the initial position
let mut init_slice = *slice;
// Read the value
let value = read_u64(slice)?;
// Read the script
let nbytes = read_var_int(slice)? as usize;
let script = read_slice(slice, nbytes)?;
// Return the transaction output
let init_slice_len = init_slice.len();
Ok(TransactionOutput {
value,
script: Script::new(script, timestamp, height),
slice: read_slice(&mut init_slice, init_slice_len - slice.len())?,
})
}
}<|fim▁end|> | |
<|file_name|>DisabledFacetForPropertyAnnotation.java<|end_file_name|><|fim▁begin|>/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.isis.core.metamodel.facets.properties.property.disabled;
import org.apache.isis.applib.annotation.Editing;
import org.apache.isis.applib.annotation.Property;
import org.apache.isis.applib.annotation.When;
import org.apache.isis.applib.annotation.Where;
import org.apache.isis.core.metamodel.facetapi.FacetHolder;
import org.apache.isis.core.metamodel.facets.members.disabled.DisabledFacet;
import org.apache.isis.core.metamodel.facets.members.disabled.DisabledFacetAbstractImpl;
public class DisabledFacetForPropertyAnnotation extends DisabledFacetAbstractImpl {
public static DisabledFacet create(final Property property, final FacetHolder holder) {
if (property == null) {<|fim▁hole|> final Editing editing = property.editing();
final String disabledReason = property.editingDisabledReason();
switch (editing) {
case AS_CONFIGURED:
// nothing needs to be done here; the DomainObjectFactory (processing @DomainObject annotation)
// will install an ImmutableFacetForDomainObjectAnnotation on the domain object and then a
// DisabledFacetOnPropertyDerivedFromImmutable facet will be installed.
return null;
case DISABLED:
return new DisabledFacetForPropertyAnnotation(disabledReason, holder);
case ENABLED:
return null;
}
return null;
}
private DisabledFacetForPropertyAnnotation(final String reason, final FacetHolder holder) {
super(When.ALWAYS, Where.EVERYWHERE, reason, holder);
}
}<|fim▁end|> | return null;
}
|
<|file_name|>_core.py<|end_file_name|><|fim▁begin|>"""
websocket - WebSocket client library for Python
Copyright (C) 2010 Hiroki Ohtani(liris)
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor,
Boston, MA 02110-1335 USA
"""
from __future__ import print_function
import six
import socket
if six.PY3:
from base64 import encodebytes as base64encode
else:
from base64 import encodestring as base64encode
import struct
import threading
# websocket modules
from ._exceptions import *
from ._abnf import *
from ._socket import *
from ._utils import *
from ._url import *
from ._logging import *
from ._http import *
from ._handshake import *
from ._ssl_compat import *
"""
websocket python client.
=========================
This version support only hybi-13.
Please see http://tools.ietf.org/html/rfc6455 for protocol.
"""
def create_connection(url, timeout=None, **options):
"""
connect to url and return websocket object.
Connect to url and return the WebSocket object.
Passing optional timeout parameter will set the timeout on the socket.
If no timeout is supplied,
the global default timeout setting returned by getdefauttimeout() is used.
You can customize using 'options'.
If you set "header" list object, you can set your own custom header.
>>> conn = create_connection("ws://echo.websocket.org/",
... header=["User-Agent: MyProgram",
... "x-custom: header"])
timeout: socket timeout time. This value is integer.
if you set None for this value,
it means "use default_timeout value"
options: "header" -> custom http header list or dict.
"cookie" -> cookie value.
"origin" -> custom origin url.
"host" -> custom host header string.
"http_proxy_host" - http proxy host name.
"http_proxy_port" - http proxy port. If not set, set to 80.
"http_no_proxy" - host names, which doesn't use proxy.
"http_proxy_auth" - http proxy auth infomation.
tuple of username and password.
default is None
"enable_multithread" -> enable lock for multithread.
"sockopt" -> socket options
"sslopt" -> ssl option
"subprotocols" - array of available sub protocols.
default is None.
"skip_utf8_validation" - skip utf8 validation.
"""
sockopt = options.get("sockopt", [])
sslopt = options.get("sslopt", {})
fire_cont_frame = options.get("fire_cont_frame", False)
enable_multithread = options.get("enable_multithread", False)
skip_utf8_validation = options.get("skip_utf8_validation", False)
websock = WebSocket(sockopt=sockopt, sslopt=sslopt,
fire_cont_frame=fire_cont_frame,
enable_multithread=enable_multithread,
skip_utf8_validation=skip_utf8_validation)
websock.settimeout(timeout if timeout is not None else getdefaulttimeout())
websock.connect(url, **options)
return websock
class WebSocket(object):
"""
Low level WebSocket interface.
This class is based on
The WebSocket protocol draft-hixie-thewebsocketprotocol-76
http://tools.ietf.org/html/draft-hixie-thewebsocketprotocol-76
We can connect to the websocket server and send/recieve data.
The following example is a echo client.
>>> import websocket
>>> ws = websocket.WebSocket()
>>> ws.connect("ws://echo.websocket.org")
>>> ws.send("Hello, Server")
>>> ws.recv()
'Hello, Server'
>>> ws.close()
get_mask_key: a callable to produce new mask keys, see the set_mask_key
function's docstring for more details
sockopt: values for socket.setsockopt.
sockopt must be tuple and each element is argument of sock.setscokopt.
sslopt: dict object for ssl socket option.
fire_cont_frame: fire recv event for each cont frame. default is False
enable_multithread: if set to True, lock send method.
skip_utf8_validation: skip utf8 validation.
"""
def __init__(self, get_mask_key=None, sockopt=None, sslopt=None,
fire_cont_frame=False, enable_multithread=False,
skip_utf8_validation=False):
"""
Initalize WebSocket object.
"""
self.sock_opt = sock_opt(sockopt, sslopt)
self.handshake_response = None
self.sock = None
self.connected = False
self.get_mask_key = get_mask_key
# These buffer over the build-up of a single frame.
self.frame_buffer = frame_buffer(self._recv, skip_utf8_validation)
self.cont_frame = continuous_frame(fire_cont_frame, skip_utf8_validation)
if enable_multithread:
self.lock = threading.Lock()
else:
self.lock = NoLock()
def __iter__(self):
"""
Allow iteration over websocket, implying sequential `recv` executions.
"""
while True:
yield self.recv()
def __next__(self):
return self.recv()
def next(self):
return self.__next__()
def fileno(self):
return self.sock.fileno()
def set_mask_key(self, func):
"""
set function to create musk key. You can custumize mask key generator.
Mainly, this is for testing purpose.
func: callable object. the fuct must 1 argument as integer.
The argument means length of mask key.
This func must be return string(byte array),
which length is argument specified.
"""
self.get_mask_key = func
def gettimeout(self):
"""
Get the websocket timeout(second).
"""
return self.sock_opt.timeout
def settimeout(self, timeout):
"""
Set the timeout to the websocket.
timeout: timeout time(second).
"""
self.sock_opt.timeout = timeout
if self.sock:
self.sock.settimeout(timeout)
timeout = property(gettimeout, settimeout)
def getsubprotocol(self):
"""
get subprotocol
"""
if self.handshake_response:
return self.handshake_response.subprotocol
else:
return None
subprotocol = property(getsubprotocol)
def getstatus(self):
"""
get handshake status
"""
if self.handshake_response:
return self.handshake_response.status
else:
return None
status = property(getstatus)
def getheaders(self):
"""
get handshake response header
"""
if self.handshake_response:
return self.handshake_response.headers
else:
return None
headers = property(getheaders)
def connect(self, url, **options):
"""
Connect to url. url is websocket url scheme.
ie. ws://host:port/resource
You can customize using 'options'.
If you set "header" list object, you can set your own custom header.
>>> ws = WebSocket()
>>> ws.connect("ws://echo.websocket.org/",
... header=["User-Agent: MyProgram",
... "x-custom: header"])
timeout: socket timeout time. This value is integer.
if you set None for this value,
it means "use default_timeout value"
options: "header" -> custom http header list or dict.
"cookie" -> cookie value.
"origin" -> custom origin url.
"host" -> custom host header string.
"http_proxy_host" - http proxy host name.
"http_proxy_port" - http proxy port. If not set, set to 80.
"http_no_proxy" - host names, which doesn't use proxy.
"http_proxy_auth" - http proxy auth infomation.
tuple of username and password.
defualt is None
"subprotocols" - array of available sub protocols.
default is None.
"""
self.sock, addrs = connect(url, self.sock_opt, proxy_info(**options))
try:
self.handshake_response = handshake(self.sock, *addrs, **options)
self.connected = True
except:
if self.sock:
self.sock.close()
self.sock = None
raise
def send(self, payload, opcode=ABNF.OPCODE_TEXT):
"""
Send the data as string.
payload: Payload must be utf-8 string or unicode,
if the opcode is OPCODE_TEXT.
Otherwise, it must be string(byte array)
opcode: operation code to send. Please see OPCODE_XXX.
"""
frame = ABNF.create_frame(payload, opcode)
return self.send_frame(frame)
def send_frame(self, frame):
"""
Send the data frame.
frame: frame data created by ABNF.create_frame
>>> ws = create_connection("ws://echo.websocket.org/")
>>> frame = ABNF.create_frame("Hello", ABNF.OPCODE_TEXT)
>>> ws.send_frame(frame)
>>> cont_frame = ABNF.create_frame("My name is ", ABNF.OPCODE_CONT, 0)
>>> ws.send_frame(frame)
>>> cont_frame = ABNF.create_frame("Foo Bar", ABNF.OPCODE_CONT, 1)
>>> ws.send_frame(frame)
"""
if self.get_mask_key:
frame.get_mask_key = self.get_mask_key
data = frame.format()
length = len(data)
trace("send: " + repr(data))
with self.lock:
while data:
l = self._send(data)
data = data[l:]
return length
def send_binary(self, payload):
return self.send(payload, ABNF.OPCODE_BINARY)
def ping(self, payload=""):
"""
send ping data.
payload: data payload to send server.
"""
if isinstance(payload, six.text_type):
payload = payload.encode("utf-8")
self.send(payload, ABNF.OPCODE_PING)
def pong(self, payload):
"""
send pong data.
payload: data payload to send server.
"""
if isinstance(payload, six.text_type):
payload = payload.encode("utf-8")
self.send(payload, ABNF.OPCODE_PONG)
def recv(self):
"""
Receive string data(byte array) from the server.
<|fim▁hole|> return value: string(byte array) value.
"""
opcode, data = self.recv_data()
if six.PY3 and opcode == ABNF.OPCODE_TEXT:
return data.decode("utf-8")
elif opcode == ABNF.OPCODE_TEXT or opcode == ABNF.OPCODE_BINARY:
return data
else:
return ''
def recv_data(self, control_frame=False):
"""
Recieve data with operation code.
control_frame: a boolean flag indicating whether to return control frame
data, defaults to False
return value: tuple of operation code and string(byte array) value.
"""
opcode, frame = self.recv_data_frame(control_frame)
return opcode, frame.data
def recv_data_frame(self, control_frame=False):
"""
Recieve data with operation code.
control_frame: a boolean flag indicating whether to return control frame
data, defaults to False
return value: tuple of operation code and string(byte array) value.
"""
while True:
frame = self.recv_frame()
if not frame:
# handle error:
# 'NoneType' object has no attribute 'opcode'
raise WebSocketProtocolException("Not a valid frame %s" % frame)
elif frame.opcode in (ABNF.OPCODE_TEXT, ABNF.OPCODE_BINARY, ABNF.OPCODE_CONT):
self.cont_frame.validate(frame)
self.cont_frame.add(frame)
if self.cont_frame.is_fire(frame):
return self.cont_frame.extract(frame)
elif frame.opcode == ABNF.OPCODE_CLOSE:
self.send_close()
return (frame.opcode, frame)
elif frame.opcode == ABNF.OPCODE_PING:
if len(frame.data) < 126:
self.pong(frame.data)
else:
raise WebSocketProtocolException("Ping message is too long")
if control_frame:
return (frame.opcode, frame)
elif frame.opcode == ABNF.OPCODE_PONG:
if control_frame:
return (frame.opcode, frame)
def recv_frame(self):
"""
recieve data as frame from server.
return value: ABNF frame object.
"""
return self.frame_buffer.recv_frame()
def send_close(self, status=STATUS_NORMAL, reason=six.b("")):
"""
send close data to the server.
status: status code to send. see STATUS_XXX.
reason: the reason to close. This must be string or bytes.
"""
if status < 0 or status >= ABNF.LENGTH_16:
raise ValueError("code is invalid range")
self.connected = False
self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)
def close(self, status=STATUS_NORMAL, reason=six.b(""), timeout=3):
"""
Close Websocket object
status: status code to send. see STATUS_XXX.
reason: the reason to close. This must be string.
timeout: timeout until recieve a close frame.
If None, it will wait forever until recieve a close frame.
"""
if self.connected:
if status < 0 or status >= ABNF.LENGTH_16:
raise ValueError("code is invalid range")
try:
self.connected = False
self.send(struct.pack('!H', status) + reason, ABNF.OPCODE_CLOSE)
sock_timeout = self.sock.gettimeout()
self.sock.settimeout(timeout)
try:
frame = self.recv_frame()
if isEnabledForError():
recv_status = struct.unpack("!H", frame.data)[0]
if recv_status != STATUS_NORMAL:
error("close status: " + repr(recv_status))
except:
pass
self.sock.settimeout(sock_timeout)
self.sock.shutdown(socket.SHUT_RDWR)
except:
pass
self.shutdown()
def abort(self):
"""
Low-level asynchonous abort, wakes up other threads that are waiting in recv_*
"""
if self.connected:
self.sock.shutdown(socket.SHUT_RDWR)
def shutdown(self):
"close socket, immediately."
if self.sock:
self.sock.close()
self.sock = None
self.connected = False
def _send(self, data):
return send(self.sock, data)
def _recv(self, bufsize):
try:
return recv(self.sock, bufsize)
except WebSocketConnectionClosedException:
if self.sock:
self.sock.close()
self.sock = None
self.connected = False
raise<|fim▁end|> | |
<|file_name|>app.js<|end_file_name|><|fim▁begin|>$(function() {
$(".navbar-expand-toggle").click(function() {
$(".app-container").toggleClass("expanded");
return $(".navbar-expand-toggle").toggleClass("fa-rotate-90");<|fim▁hole|> $(".navbar-right").toggleClass("expanded");
return $(".navbar-right-expand-toggle").toggleClass("fa-rotate-90");
});
});
$(function() {
return $('select').select2();
});
$(function() {
return $('.toggle-checkbox').bootstrapSwitch({
size: "small"
});
});
$(function() {
return $('.match-height').matchHeight();
});
$(function() {
return $('.datatable').DataTable({
"dom": '<"top"fl<"clear">>rt<"bottom"ip<"clear">>'
});
});
$(function() {
return $(".side-menu .nav .dropdown").on('show.bs.collapse', function() {
return $(".side-menu .nav .dropdown .collapse").collapse('hide');
});
});<|fim▁end|> | });
return $(".navbar-right-expand-toggle").click(function() { |
<|file_name|>index.ts<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2007-2022 Crafter Software Corporation. All Rights Reserved.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License version 3 as published by
* the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*<|fim▁hole|>export { default } from './FolderBrowserTreeView';
export * from './FolderBrowserTreeView';
export * from './FolderBrowserTreeViewUI';
export * from './FolderBrowserTreeViewSkeleton';
export * from './PathSelected';<|fim▁end|> | * You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
|
<|file_name|>test_swe_adjoint.py<|end_file_name|><|fim▁begin|>"""
Tests whether we can compute a consistent gradient of some functional
based on the forward model with respect to the bottom friction
via firedrake_adjoint.
Stephan Kramer 25-05-16
"""
import pytest
from thetis import *
from firedrake_adjoint import *
op2.init(log_level=INFO)
velocity_u = 2.0
def basic_setup():
lx = 100.0
ly = 50.0
nx = 20
ny = 10
mesh2d = RectangleMesh(nx, ny, lx, ly)
# export interval in seconds
t_export = 0.5
timestep = 0.5
# bathymetry
P1_2d = get_functionspace(mesh2d, 'CG', 1)
bathymetry_2d = Function(P1_2d, name='Bathymetry')
depth = 50.0
bathymetry_2d.assign(depth)
# --- create solver ---
solver_obj = solver2d.FlowSolver2d(mesh2d, bathymetry_2d)
options = solver_obj.options
options.simulation_export_time = t_export
options.check_volume_conservation_2d = True
options.fields_to_export = ['uv_2d', 'elev_2d']
options.timestepper_type = 'CrankNicolson'
options.timestep = timestep
options.horizontal_viscosity = Constant(2.0)
# create function spaces
solver_obj.create_function_spaces()
# create drag function and set it with a bump function representing a turbine
drag_func = Function(solver_obj.function_spaces.P1_2d, name='bottomdrag')
x = SpatialCoordinate(mesh2d)
drag_center = 12.0
drag_bg = 0.0025
x0 = lx/2
y0 = ly/2
sigma = 20.0
drag_func.project(drag_center*exp(-((x[0]-x0)**2 + (x[1]-y0)**2)/sigma**2) + drag_bg)
# assign fiction field
options.quadratic_drag_coefficient = drag_func
# assign boundary conditions
inflow_tag = 1
outflow_tag = 2
inflow_bc = {'un': Constant(-velocity_u)} # NOTE negative into domain
outflow_bc = {'elev': Constant(0.0)}
solver_obj.bnd_functions['shallow_water'] = {inflow_tag: inflow_bc,
outflow_tag: outflow_bc}
return solver_obj
def setup_steady():
solver_obj = basic_setup()
solver_obj.options.timestepper_type = 'SteadyState'
solver_obj.options.simulation_end_time = 0.499
solver_obj.options.timestepper_options.solver_parameters = {
'mat_type': 'aij',
'ksp_type': 'preonly',
'pc_type': 'lu',
'pc_factor_mat_solver_type': 'mumps',
'snes_type': 'newtonls',
}
solver_obj.create_equations()
return solver_obj
def setup_unsteady():
solver_obj = basic_setup()
solver_obj.options.timestepper_type = 'CrankNicolson'
solver_obj.options.simulation_end_time = 2.0
solver_obj.options.timestepper_options.implicitness_theta = 1.0
solver_obj.options.timestepper_options.solver_parameters = {
'mat_type': 'aij',
'ksp_type': 'preonly',
'pc_type': 'lu',
'pc_factor_mat_solver_type': 'mumps',
'snes_type': 'newtonls',
}
solver_obj.create_equations()
return solver_obj
@pytest.fixture(params=[setup_steady, setup_unsteady])
def setup(request):
return request.param
def test_gradient_from_adjoint(setup):
solver_obj = setup()<|fim▁hole|>
drag_func = Control(solver_obj.options.quadratic_drag_coefficient)
Jhat = ReducedFunctional(J0, drag_func)
c = Function(solver_obj.options.quadratic_drag_coefficient)
dc = Function(c)
from numpy.random import rand
c.vector()[:] = rand(*c.dat.shape)
dc.vector()[:] = rand(*dc.dat.shape)
minconv = taylor_test(Jhat, c, dc)
assert minconv > 1.90<|fim▁end|> | solver_obj.assign_initial_conditions(uv=as_vector((velocity_u, 0.0)), elev=Constant(0.0))
solver_obj.iterate()
J0 = assemble(solver_obj.fields.solution_2d[0]*dx) |
<|file_name|>NewDTDTemplatesWizardPage.java<|end_file_name|><|fim▁begin|>/*******************************************************************************
* Copyright (c) 2005, 2009 IBM Corporation and others.
* All rights reserved. This program and the accompanying materials
* are made available under the terms of the Eclipse Public License v1.0
* which accompanies this distribution, and is available at
* http://www.eclipse.org/legal/epl-v10.html
*
* Contributors:
* IBM Corporation - initial API and implementation
*
*******************************************************************************/
package org.eclipse.wst.dtd.ui.internal.wizard;
import org.eclipse.jface.dialogs.Dialog;
import org.eclipse.jface.preference.PreferenceDialog;
import org.eclipse.jface.resource.JFaceResources;
import org.eclipse.jface.text.Document;
import org.eclipse.jface.text.IDocument;
import org.eclipse.jface.text.source.ISourceViewer;
import org.eclipse.jface.text.source.SourceViewer;
import org.eclipse.jface.text.source.SourceViewerConfiguration;
import org.eclipse.jface.text.templates.DocumentTemplateContext;
import org.eclipse.jface.text.templates.Template;
import org.eclipse.jface.text.templates.TemplateBuffer;
import org.eclipse.jface.text.templates.TemplateContext;
import org.eclipse.jface.text.templates.TemplateContextType;
import org.eclipse.jface.text.templates.persistence.TemplateStore;
import org.eclipse.jface.viewers.ISelectionChangedListener;
import org.eclipse.jface.viewers.IStructuredContentProvider;
import org.eclipse.jface.viewers.IStructuredSelection;
import org.eclipse.jface.viewers.ITableLabelProvider;
import org.eclipse.jface.viewers.LabelProvider;
import org.eclipse.jface.viewers.SelectionChangedEvent;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.viewers.TableLayout;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.jface.viewers.Viewer;
import org.eclipse.jface.viewers.ViewerSorter;
import org.eclipse.jface.wizard.WizardPage;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.ControlAdapter;
import org.eclipse.swt.events.ControlEvent;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Image;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Control;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Link;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.TableColumn;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.dialogs.PreferencesUtil;
import org.eclipse.wst.dtd.core.internal.provisional.contenttype.ContentTypeIdForDTD;
import org.eclipse.wst.dtd.ui.StructuredTextViewerConfigurationDTD;
import org.eclipse.wst.dtd.ui.internal.DTDUIMessages;
import org.eclipse.wst.dtd.ui.internal.DTDUIPlugin;
import org.eclipse.wst.dtd.ui.internal.Logger;
import org.eclipse.wst.dtd.ui.internal.editor.IHelpContextIds;
import org.eclipse.wst.dtd.ui.internal.preferences.DTDUIPreferenceNames;
import org.eclipse.wst.dtd.ui.internal.templates.TemplateContextTypeIdsDTD;
import org.eclipse.wst.sse.core.StructuredModelManager;
import org.eclipse.wst.sse.core.internal.provisional.IStructuredModel;
import org.eclipse.wst.sse.ui.StructuredTextViewerConfiguration;
import org.eclipse.wst.sse.ui.internal.StructuredTextViewer;
import org.eclipse.wst.sse.ui.internal.provisional.style.LineStyleProvider;
/**
* Templates page in new file wizard. Allows users to select a new file
* template to be applied in new file.
*
*/
public class NewDTDTemplatesWizardPage extends WizardPage {
/**
* Content provider for templates
*/
private class TemplateContentProvider implements IStructuredContentProvider {
/** The template store. */
private TemplateStore fStore;
/*
* @see IContentProvider#dispose()
*/
public void dispose() {
fStore = null;
}
/*
* @see IStructuredContentProvider#getElements(Object)
*/
public Object[] getElements(Object input) {
return fStore.getTemplates(TemplateContextTypeIdsDTD.NEW);
}
/*
* @see IContentProvider#inputChanged(Viewer, Object, Object)
*/
public void inputChanged(Viewer viewer, Object oldInput, Object newInput) {
fStore = (TemplateStore) newInput;
}
}
/**
* Label provider for templates.
*/
private class TemplateLabelProvider extends LabelProvider implements ITableLabelProvider {
/*
* @see org.eclipse.jface.viewers.ITableLabelProvider#getColumnImage(java.lang.Object,
* int)
*/
public Image getColumnImage(Object element, int columnIndex) {
return null;
}
/*
* @see org.eclipse.jface.viewers.ITableLabelProvider#getColumnText(java.lang.Object,
* int)
*/
public String getColumnText(Object element, int columnIndex) {
Template template = (Template) element;
switch (columnIndex) {
case 0 :
return template.getName();
case 1 :
return template.getDescription();
default :
return ""; //$NON-NLS-1$
}
}
}
/** Last selected template name */
private String fLastSelectedTemplateName;
/** The viewer displays the pattern of selected template. */
private SourceViewer fPatternViewer;
/** The table presenting the templates. */
private TableViewer fTableViewer;
/** Template store used by this wizard page */
private TemplateStore fTemplateStore;
/** Checkbox for using templates. */
private Button fUseTemplateButton;
public NewDTDTemplatesWizardPage() {
super("NewDTDTemplatesWizardPage", DTDUIMessages.NewDTDTemplatesWizardPage_0, null); //$NON-NLS-1$
setDescription(DTDUIMessages.NewDTDTemplatesWizardPage_1);
}
/**
* Correctly resizes the table so no phantom columns appear
*
* @param parent
* the parent control
* @param buttons
* the buttons
* @param table
* the table
* @param column1
* the first column
* @param column2
* the second column
* @param column3
* the third column
*/
private void configureTableResizing(final Composite parent, final Table table, final TableColumn column1, final TableColumn column2) {
parent.addControlListener(new ControlAdapter() {
public void controlResized(ControlEvent e) {
Rectangle area = parent.getClientArea();
Point preferredSize = table.computeSize(SWT.DEFAULT, SWT.DEFAULT);
int width = area.width - 2 * table.getBorderWidth();
if (preferredSize.y > area.height) {
// Subtract the scrollbar width from the total column
// width
// if a vertical scrollbar will be required
Point vBarSize = table.getVerticalBar().getSize();
width -= vBarSize.x;
}
Point oldSize = table.getSize();
if (oldSize.x > width) {
// table is getting smaller so make the columns
// smaller first and then resize the table to
// match the client area width
column1.setWidth(width / 2);
column2.setWidth(width / 2);
table.setSize(width, area.height);
}
else {
// table is getting bigger so make the table
// bigger first and then make the columns wider
// to match the client area width
table.setSize(width, area.height);
column1.setWidth(width / 2);
column2.setWidth(width / 2);
}
}
});
}
public void createControl(Composite ancestor) {
Composite parent = new Composite(ancestor, SWT.NONE);
GridLayout layout = new GridLayout();
layout.numColumns = 2;
parent.setLayout(layout);
// create checkbox for user to use DTD Template
fUseTemplateButton = new Button(parent, SWT.CHECK);
fUseTemplateButton.setText(DTDUIMessages.NewDTDTemplatesWizardPage_4);
GridData data = new GridData(SWT.FILL, SWT.FILL, true, false, 2, 1);
fUseTemplateButton.setLayoutData(data);
fUseTemplateButton.addSelectionListener(new SelectionAdapter() {
public void widgetSelected(SelectionEvent e) {
enableTemplates();
}
});
// create composite for Templates table
Composite innerParent = new Composite(parent, SWT.NONE);
GridLayout innerLayout = new GridLayout();
innerLayout.numColumns = 2;
innerLayout.marginHeight = 0;
innerLayout.marginWidth = 0;
innerParent.setLayout(innerLayout);
GridData gd = new GridData(SWT.FILL, SWT.FILL, true, true, 2, 1);
innerParent.setLayoutData(gd);
Label label = new Label(innerParent, SWT.NONE);
label.setText(DTDUIMessages.NewDTDTemplatesWizardPage_7);
data = new GridData(SWT.FILL, SWT.FILL, true, false, 2, 1);
label.setLayoutData(data);
// create table that displays templates
Table table = new Table(innerParent, SWT.BORDER | SWT.FULL_SELECTION);
data = new GridData(GridData.FILL_BOTH);
data.widthHint = convertWidthInCharsToPixels(2);
data.heightHint = convertHeightInCharsToPixels(10);
data.horizontalSpan = 2;
table.setLayoutData(data);
table.setHeaderVisible(true);
table.setLinesVisible(true);
TableLayout tableLayout = new TableLayout();
table.setLayout(tableLayout);
TableColumn column1 = new TableColumn(table, SWT.NONE);
column1.setText(DTDUIMessages.NewDTDTemplatesWizardPage_2);
TableColumn column2 = new TableColumn(table, SWT.NONE);
column2.setText(DTDUIMessages.NewDTDTemplatesWizardPage_3);
fTableViewer = new TableViewer(table);
fTableViewer.setLabelProvider(new TemplateLabelProvider());
fTableViewer.setContentProvider(new TemplateContentProvider());
fTableViewer.setSorter(new ViewerSorter() {
public int compare(Viewer viewer, Object object1, Object object2) {
if ((object1 instanceof Template) && (object2 instanceof Template)) {
Template left = (Template) object1;
Template right = (Template) object2;
int result = left.getName().compareToIgnoreCase(right.getName());
if (result != 0)
return result;
return left.getDescription().compareToIgnoreCase(right.getDescription());
}
return super.compare(viewer, object1, object2);
}
public boolean isSorterProperty(Object element, String property) {
return true;
}
});
fTableViewer.addSelectionChangedListener(new ISelectionChangedListener() {
public void selectionChanged(SelectionChangedEvent e) {
updateViewerInput();
}
});
// create viewer that displays currently selected template's contents
fPatternViewer = doCreateViewer(parent);
fTemplateStore = DTDUIPlugin.getDefault().getTemplateStore();
fTableViewer.setInput(fTemplateStore);
// Create linked text to just to templates preference page
Link link = new Link(parent, SWT.NONE);
link.setText(DTDUIMessages.NewDTDTemplatesWizardPage_6);
data = new GridData(SWT.END, SWT.FILL, true, false, 2, 1);
link.setLayoutData(data);
link.addSelectionListener(new SelectionAdapter() {
public void widgetSelected(SelectionEvent e) {
linkClicked();
}
});
configureTableResizing(innerParent, table, column1, column2);
loadLastSavedPreferences();
PlatformUI.getWorkbench().getHelpSystem().setHelp(parent, IHelpContextIds.DTD_NEWWIZARD_TEMPLATE_HELPID);
Dialog.applyDialogFont(parent);
setControl(parent);
}
/**
* Creates, configures and returns a source viewer to present the template
* pattern on the preference page. Clients may override to provide a
* custom source viewer featuring e.g. syntax coloring.
*
* @param parent
* the parent control
* @return a configured source viewer
*/
private SourceViewer createViewer(Composite parent) {
SourceViewerConfiguration sourceViewerConfiguration = new StructuredTextViewerConfiguration() {
StructuredTextViewerConfiguration baseConfiguration = new StructuredTextViewerConfigurationDTD();
public String[] getConfiguredContentTypes(ISourceViewer sourceViewer) {
return baseConfiguration.getConfiguredContentTypes(sourceViewer);
}
public LineStyleProvider[] getLineStyleProviders(ISourceViewer sourceViewer, String partitionType) {
return baseConfiguration.getLineStyleProviders(sourceViewer, partitionType);
}
};
SourceViewer viewer = new StructuredTextViewer(parent, null, null, false, SWT.BORDER | SWT.V_SCROLL | SWT.H_SCROLL);
viewer.getTextWidget().setFont(JFaceResources.getFont("org.eclipse.wst.sse.ui.textfont")); //$NON-NLS-1$
IStructuredModel scratchModel = StructuredModelManager.getModelManager().createUnManagedStructuredModelFor(ContentTypeIdForDTD.ContentTypeID_DTD);
IDocument document = scratchModel.getStructuredDocument();
viewer.configure(sourceViewerConfiguration);
viewer.setDocument(document);
return viewer;
}
private SourceViewer doCreateViewer(Composite parent) {
Label label = new Label(parent, SWT.NONE);
label.setText(DTDUIMessages.NewDTDTemplatesWizardPage_5);
GridData data = new GridData();
data.horizontalSpan = 2;
label.setLayoutData(data);
SourceViewer viewer = createViewer(parent);
viewer.setEditable(false);
Control control = viewer.getControl();
data = new GridData(GridData.FILL_BOTH);
data.horizontalSpan = 2;
data.heightHint = convertHeightInCharsToPixels(5);
// [261274] - source viewer was growing to fit the max line width of the template
data.widthHint = convertWidthInCharsToPixels(2);
control.setLayoutData(data);
return viewer;
}
/**
* Enable/disable controls in page based on fUseTemplateButton's current
* state.
*/
void enableTemplates() {
boolean enabled = fUseTemplateButton.getSelection();
if (!enabled) {
// save last selected template
Template template = getSelectedTemplate();
if (template != null)
fLastSelectedTemplateName = template.getName();
else
fLastSelectedTemplateName = ""; //$NON-NLS-1$
fTableViewer.setSelection(null);
}
else {
setSelectedTemplate(fLastSelectedTemplateName);
}
fTableViewer.getControl().setEnabled(enabled);
fPatternViewer.getControl().setEnabled(enabled);
}
/**
* Return the template preference page id
*
* @return
*/
private String getPreferencePageId() {
return "org.eclipse.wst.sse.ui.preferences.dtd.templates"; //$NON-NLS-1$
}
/**
* Get the currently selected template.
*
* @return
*/
private Template getSelectedTemplate() {
Template template = null;
IStructuredSelection selection = (IStructuredSelection) fTableViewer.getSelection();
if (selection.size() == 1) {
template = (Template) selection.getFirstElement();
}
return template;
}
/**
* Returns template string to insert.
*
* @return String to insert or null if none is to be inserted
*/
String getTemplateString() {
String templateString = null;<|fim▁hole|> TemplateContextType contextType = DTDUIPlugin.getDefault().getTemplateContextRegistry().getContextType(TemplateContextTypeIdsDTD.NEW);
IDocument document = new Document();
TemplateContext context = new DocumentTemplateContext(contextType, document, 0, 0);
try {
TemplateBuffer buffer = context.evaluate(template);
templateString = buffer.getString();
}
catch (Exception e) {
Logger.log(Logger.WARNING_DEBUG, "Could not create template for new dtd", e); //$NON-NLS-1$
}
}
return templateString;
}
void linkClicked() {
String pageId = getPreferencePageId();
PreferenceDialog dialog = PreferencesUtil.createPreferenceDialogOn(getShell(), pageId, new String[]{pageId}, null);
dialog.open();
fTableViewer.refresh();
}
/**
* Load the last template name used in New DTD File wizard.
*/
private void loadLastSavedPreferences() {
String templateName = DTDUIPlugin.getDefault().getPreferenceStore().getString(DTDUIPreferenceNames.NEW_FILE_TEMPLATE_NAME);
if (templateName == null || templateName.length() == 0) {
fLastSelectedTemplateName = ""; //$NON-NLS-1$
fUseTemplateButton.setSelection(false);
}
else {
fLastSelectedTemplateName = templateName;
fUseTemplateButton.setSelection(true);
}
enableTemplates();
}
/**
* Save template name used for next call to New DTD File wizard.
*/
void saveLastSavedPreferences() {
String templateName = ""; //$NON-NLS-1$
Template template = getSelectedTemplate();
if (template != null) {
templateName = template.getName();
}
DTDUIPlugin.getDefault().getPreferenceStore().setValue(DTDUIPreferenceNames.NEW_FILE_TEMPLATE_NAME, templateName);
DTDUIPlugin.getDefault().savePluginPreferences();
}
/**
* Select a template in the table viewer given the template name. If
* template name cannot be found or templateName is null, just select
* first item in table. If no items in table select nothing.
*
* @param templateName
*/
private void setSelectedTemplate(String templateName) {
Object template = null;
if (templateName != null && templateName.length() > 0) {
// pick the last used template
template = fTemplateStore.findTemplate(templateName, TemplateContextTypeIdsDTD.NEW);
}
// no record of last used template so just pick first element
if (template == null) {
// just pick first element
template = fTableViewer.getElementAt(0);
}
if (template != null) {
IStructuredSelection selection = new StructuredSelection(template);
fTableViewer.setSelection(selection, true);
}
}
/**
* Updates the pattern viewer.
*/
void updateViewerInput() {
Template template = getSelectedTemplate();
if (template != null) {
fPatternViewer.getDocument().set(template.getPattern());
}
else {
fPatternViewer.getDocument().set(""); //$NON-NLS-1$
}
}
}<|fim▁end|> |
Template template = getSelectedTemplate();
if (template != null) { |
<|file_name|>hex.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
//! Hex binary-to-text encoding
pub use self::FromHexError::*;
use std::fmt;
use std::error;
/// A trait for converting a value to hexadecimal encoding
pub trait ToHex {
/// Converts the value of `self` to a hex value, returning the owned
/// string.
fn to_hex(&self) -> String;
}
const CHARS: &'static [u8] = b"0123456789abcdef";
impl ToHex for [u8] {
/// Turn a vector of `u8` bytes into a hexadecimal string.
///
/// # Examples
///
/// ```
/// # #![feature(rustc_private)]
/// extern crate serialize;
/// use serialize::hex::ToHex;
///
/// fn main () {
/// let str = [52,32].to_hex();
/// println!("{}", str);
/// }
/// ```
fn to_hex(&self) -> String {
let mut v = Vec::with_capacity(self.len() * 2);
for &byte in self {
v.push(CHARS[(byte >> 4) as usize]);
v.push(CHARS[(byte & 0xf) as usize]);
}
unsafe {
String::from_utf8_unchecked(v)
}
}
}
/// A trait for converting hexadecimal encoded values
pub trait FromHex {
/// Converts the value of `self`, interpreted as hexadecimal encoded data,
/// into an owned vector of bytes, returning the vector.
fn from_hex(&self) -> Result<Vec<u8>, FromHexError>;<|fim▁hole|>pub enum FromHexError {
/// The input contained a character not part of the hex format
InvalidHexCharacter(char, usize),
/// The input had an invalid length
InvalidHexLength,
}
impl fmt::Display for FromHexError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match *self {
InvalidHexCharacter(ch, idx) =>
write!(f, "Invalid character '{}' at position {}", ch, idx),
InvalidHexLength => write!(f, "Invalid input length"),
}
}
}
impl error::Error for FromHexError {
fn description(&self) -> &str {
match *self {
InvalidHexCharacter(_, _) => "invalid character",
InvalidHexLength => "invalid length",
}
}
}
impl FromHex for str {
/// Convert any hexadecimal encoded string (literal, `@`, `&`, or `~`)
/// to the byte values it encodes.
///
/// You can use the `String::from_utf8` function to turn a
/// `Vec<u8>` into a string with characters corresponding to those values.
///
/// # Examples
///
/// This converts a string literal to hexadecimal and back.
///
/// ```
/// # #![feature(rustc_private)]
/// extern crate serialize;
/// use serialize::hex::{FromHex, ToHex};
///
/// fn main () {
/// let hello_str = "Hello, World".as_bytes().to_hex();
/// println!("{}", hello_str);
/// let bytes = hello_str.from_hex().unwrap();
/// println!("{:?}", bytes);
/// let result_str = String::from_utf8(bytes).unwrap();
/// println!("{}", result_str);
/// }
/// ```
fn from_hex(&self) -> Result<Vec<u8>, FromHexError> {
// This may be an overestimate if there is any whitespace
let mut b = Vec::with_capacity(self.len() / 2);
let mut modulus = 0;
let mut buf = 0;
for (idx, byte) in self.bytes().enumerate() {
buf <<= 4;
match byte {
b'A'...b'F' => buf |= byte - b'A' + 10,
b'a'...b'f' => buf |= byte - b'a' + 10,
b'0'...b'9' => buf |= byte - b'0',
b' '|b'\r'|b'\n'|b'\t' => {
buf >>= 4;
continue
}
_ => return Err(InvalidHexCharacter(self.char_at(idx), idx)),
}
modulus += 1;
if modulus == 2 {
modulus = 0;
b.push(buf);
}
}
match modulus {
0 => Ok(b.into_iter().collect()),
_ => Err(InvalidHexLength),
}
}
}
#[cfg(test)]
mod tests {
extern crate test;
use self::test::Bencher;
use hex::{FromHex, ToHex};
#[test]
pub fn test_to_hex() {
assert_eq!("foobar".as_bytes().to_hex(), "666f6f626172");
}
#[test]
pub fn test_from_hex_okay() {
assert_eq!("666f6f626172".from_hex().unwrap(),
b"foobar");
assert_eq!("666F6F626172".from_hex().unwrap(),
b"foobar");
}
#[test]
pub fn test_from_hex_odd_len() {
assert!("666".from_hex().is_err());
assert!("66 6".from_hex().is_err());
}
#[test]
pub fn test_from_hex_invalid_char() {
assert!("66y6".from_hex().is_err());
}
#[test]
pub fn test_from_hex_ignores_whitespace() {
assert_eq!("666f 6f6\r\n26172 ".from_hex().unwrap(),
b"foobar");
}
#[test]
pub fn test_to_hex_all_bytes() {
for i in 0..256 {
assert_eq!([i as u8].to_hex(), format!("{:02x}", i as usize));
}
}
#[test]
pub fn test_from_hex_all_bytes() {
for i in 0..256 {
let ii: &[u8] = &[i as u8];
assert_eq!(format!("{:02x}", i as usize).from_hex()
.unwrap(),
ii);
assert_eq!(format!("{:02X}", i as usize).from_hex()
.unwrap(),
ii);
}
}
#[bench]
pub fn bench_to_hex(b: &mut Bencher) {
let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
b.iter(|| {
s.as_bytes().to_hex();
});
b.bytes = s.len() as u64;
}
#[bench]
pub fn bench_from_hex(b: &mut Bencher) {
let s = "イロハニホヘト チリヌルヲ ワカヨタレソ ツネナラム \
ウヰノオクヤマ ケフコエテ アサキユメミシ ヱヒモセスン";
let sb = s.as_bytes().to_hex();
b.iter(|| {
sb.from_hex().unwrap();
});
b.bytes = sb.len() as u64;
}
}<|fim▁end|> | }
/// Errors that can occur when decoding a hex encoded string
#[derive(Copy, Clone, Debug)] |
<|file_name|>countingones.cpp<|end_file_name|><|fim▁begin|>#include <bits/stdc++.h>
using namespace std;
long long unsigned v[63], a, b;
long long unsigned S(long long unsigned n){
if(!n) return 0;
if((1LL<<((sizeof(long long)<<3) - __builtin_clzll(n)))-1 == n)
return v[(sizeof(long long)<<3) - __builtin_clzll(n)];
return v[(sizeof(long long)<<3) - __builtin_clzll(n)-1] + S(n^(1LL<<((sizeof(long long))<<3) - __builtin_clzll(n)-1)) + n - ((1LL << ((sizeof(long long)<<3) - __builtin_clzll(n)-1))-1);
}
int main(){
v[0] = 0;
for(int i=1; i<63; i++)
v[i] = (v[i-1]<<1) + (1LL<<(i-1));
while(scanf("%llu %llu", &a, &b) != EOF)<|fim▁hole|> printf("%llu\n", S(b)-S(a-1));
return 0;
}<|fim▁end|> | |
<|file_name|>logging.js<|end_file_name|><|fim▁begin|>/**
* Copyright (C) 2014 TopCoder Inc., All Rights Reserved.
* @version 1.1
* @author Sky_, TCSASSEMBLER
* changes in 1.1:
* 1. change handleError. Return sql error with unique constrains as Bad Request.
* 2. close db when request ends.
* 3. don't create transactions for GET requests
*/
"use strict";
var _ = require('underscore');
var async = require('async');
var winston = require('winston');
var BadRequestError = require("../errors/BadRequestError");
var IllegalArgumentError = require("../errors/IllegalArgumentError");
var NotFoundError = require("../errors/NotFoundError");
var initDb = require("../db");
/**
* Api codes
*/
var apiCodes = {
OK: { name: 'OK', value: 200, description: 'Success' },
notModified: { name: 'Not Modified', value: 304, description: 'There was no new data to return.' },
badRequest: { name: 'Bad Request', value: 400, description: 'The request was invalid. An accompanying message will explain why.' },
unauthorized: { name: 'Unauthorized', value: 401, description: 'Authentication credentials were missing or incorrect.' },
forbidden: { name: 'Forbidden', value: 403, description: 'The request is understood, but it has been refused or access is not allowed.' },
notFound: { name: 'Not Found', value: 404, description: 'The URI requested is invalid or the requested resource does not exist.' },
serverError: { name: 'Internal Server Error', value: 500, description: 'Something is broken. Please contact support.' }
};
/**
* Handle error and return as JSON to the response.
* @param {Error} error the error to handle
* @param {Object} res the express response object
*/
function handleError(error, res) {
var errdetail, baseError = apiCodes.serverError;
if (error.isValidationError ||
error instanceof IllegalArgumentError ||
error instanceof BadRequestError) {
baseError = apiCodes.badRequest;
} else if (error instanceof NotFoundError) {
baseError = apiCodes.notFound;
} else if (error.code === 'ER_DUP_ENTRY') {
baseError = apiCodes.badRequest;
if (error.message.indexOf("UC_c_sort") !== -1) {
error.message += ". Pair of the columns 'sort' and 'tab' must be unique.";
}
}
errdetail = _.clone(baseError);
errdetail.details = error.message;
res.statusCode = baseError.value;
res.json(errdetail);
}
/**
* This function create a delegate for the express action.
* Input and output logging is performed.
* Errors are handled also and proper http status code is set.
* Wrapped method must always call the callback function, first param is error, second param is object to return.
* @param {String} signature the signature of the method caller
* @param {Function} fn the express method to call. It must have signature (req, res, callback) or (req, callback). Res
* parameter is optional, because he is usually not used.
* @param {Boolean} customHandled true if the express action is handling the response.
* This is useful for downloading files. Wrapper will render only the error response.
* @returns {Function} the wrapped function
*/
function wrapExpress(signature, fn, customHandled) {
if (!_.isString(signature)) {
throw new Error("signature should be a string");
}
if (!_.isFunction(fn)) {
throw new Error("fn should be a function");
}
return function (req, res, next) {
var paramsToLog, db, transaction, apiResult, canRollback = false, useGlobalDB = req.method === 'GET';
paramsToLog = {
body: req.body,
params: req.params,
query : req.query,
url: req.url
};
winston.info("ENTER %s %j", signature, paramsToLog, {});
var disposeDB = function () {
if (useGlobalDB) {
return;
}
//close db connection
//we need this timeout because there is a bug for parallel requests
setTimeout(function () {
db.driver.close();
}, 1000);
};
async.waterfall([
function (cb) {
if (useGlobalDB) {
db = global.db;
cb();
} else {
async.waterfall([
function (cb) {
initDb(cb, false);
}, function (result, cb) {
db = result;
db.transaction(cb);
}, function (t, cb) {
transaction = t;
canRollback = true;
cb();
}
], cb);
}
}, function (cb) {
if (fn.length === 3) {
fn(req, db, cb);
} else {
fn(req, res, db, cb);
}<|fim▁hole|> cb();
} else {
transaction.commit(cb);
}
}, function (cb) {
if (process.env.NO_LOG_RESPONSE) {
paramsToLog.response = "<disabled>";
} else {
paramsToLog.response = apiResult;
}
winston.info("EXIT %s %j", signature, paramsToLog, {});
if (!customHandled) {
res.json(apiResult);
}
disposeDB();
}
], function (error) {
if (canRollback && transaction) {
transaction.rollback(function () {
});
}
disposeDB();
winston.error("EXIT %s %j\n", signature, paramsToLog, error.stack);
handleError(error, res);
});
};
}
module.exports = {
wrapExpress: wrapExpress,
apiCodes: apiCodes,
handleError: handleError
};<|fim▁end|> | }, function (result, cb) {
apiResult = result;
if (useGlobalDB) { |
<|file_name|>conf.py<|end_file_name|><|fim▁begin|>import sys
import os
extensions = [
'sphinx.ext.todo',
]
source_suffix = '.txt'
master_doc = 'index'
### part to update ###################################
project = u'domogik-plugin-daikcode'
copyright = u'2014, Nico0084'
version = '0.1'
release = version
######################################################
<|fim▁hole|>
html_theme = 'default'
html_static_path = ['_static']
htmlhelp_basename = project<|fim▁end|> | pygments_style = 'sphinx' |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import configs.module
import wsgiref.simple_server
import select
import json
import bot
from urllib import parse
import irc.fullparse
import irc.splitparse
import os.path
def init(options):
m = configs.module.Module(__name__)
if 'wserver' in options['server'].state:
del options['server'].state['wserver']
try:
if 'apiport' in options['server'].entry:
options['server'].state[
'wserver'] = wsgiref.simple_server.make_server(
'', options['server'].entry['apiport'],
application(options['server']))
print(('Opening API server on %d' % options[
'server'].entry['apiport']))
except OSError:
print(('Unable to open API server on %d' % options[
'server'].entry['apiport']))
m.set_help('Access various bot functions from a json API.')
m.add_timer_hook(1 * 1000, timer)
m.add_base_hook('api.action.command', apiactioncommand)
m.add_base_hook('api.path.interface', apipathinterface)
return m
class application:
def __init__(self, server):
self.server = server
def __call__(self, environ, start_response):
ret = {
'status': 'error',
'message': 'unknown',
}
start_response('200 OK',
[('content-type', 'text/html;charset=utf-8')])
path = environ['PATH_INFO'].strip('/')
q = parse.parse_qs(environ['QUERY_STRING'])
action = q['action'][0] if 'action' in q else ''
try:
if path:
ret['message'] = 'unknown request'
ret['status'] = 'error'
self.server.do_base_hook('api.path.%s' % path,
ret, self.server, q, environ)
else:
ret['message'] = 'invalid action'
ret['status'] = 'error'
self.server.do_base_hook('api.action.%s' % action,
ret, self.server, q, environ)
if '_html' in ret:
return [ret['_html'].encode('utf-8')]
except KeyError:
pass
return [json.dumps(ret).encode('utf-8')]
def apiactioncommand(ret, server, q, environ):
del ret['message']
ip = environ['REMOTE_ADDR']
if 'command' not in q:
ret['message'] = 'no command'
ret['status'] = 'error'
if server.type == 'irc':
def process_message(i):
sp = irc.splitparse.SplitParser(i)
fp = irc.fullparse.FullParse(
server, sp, nomore=True)
return fp.execute(sp.text)
ret['output'] = process_message(
':%s!%s PRIVMSG %s :%s' % (':' + ip, "~api@" + ip,
server.nick,
q['command'][0],
))
elif server.type == 'file':
ret['output'] = server.fp(server, q['command'][0])
ret['status'] = 'good'
def apipathinterface(ret, server, q, environ):
del ret['message']
ret['_html'] = open(os.path.dirname(__file__) + '/interface.html').read()
ret['status'] = 'good'
<|fim▁hole|> if 'wserver' not in server.state:
continue
wserver = server.state['wserver']
inr, _, _ = select.select([wserver], [], [], 0.01)
if inr:
wserver.handle_request()<|fim▁end|> |
def timer():
for server in bot.servers(): |
<|file_name|>Orbit.cpp<|end_file_name|><|fim▁begin|>#include "Orbit.hpp"
#include <cmath>
Orbit::Orbit(ThreeVector startPosition, ThreeVector startVelocity, time_t startTime)
{
currentPosition = startPosition::DeepCopy();
currentVelocity = startVelocity::DeepCopy();
time_t = startTime;
}
float Orbit::EscapeVelocity()
{
}
float Orbit::RadiusOfPeriapsis()
{
}
float Orbit::PeriapsisHeight()
{
}
float Orbit::RadiusOfApoapsis()
{<|fim▁hole|> return INFINITY;
#endif
#ifndef INFINITY
#ifdef NAN
return nan("");
#endif
#ifndef NAN
return -1.0; // Best we can do with a dumb compiler.
#endif
#endif
}
}
float Orbit::ApoapsisHeight()
{
}
float Orbit::SemimajorAxis()
{
return ( - gravitationalParameter / 2 / SpecificEnergy() );
}
float Orbit::SemiminorAxis()
{
}
float Orbit::Semiparameter()
{
}
float Orbit::SpecificEnergy()
{
return Speed() * Speed() / 2 - gravitationalParameter / currentPosition.Magnitude();
}
float Orbit::SpecificAngularMomentum()
{
}
float Orbit::Eccentricity()
{
}
float Orbit::Period()
{
}
float Orbit::TrueAnomaly()
{
}
float Orbit::Speed()
{
}<|fim▁end|> | if (SpecificEnergy() >= 0)
{
// We have an infinite apoapsis height. Try to express that in a way the compiler understands.
#ifdef INFINITY |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.