code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
r self.hub_height = np.exp(sum( np.log(wind_farm.hub_height) * wind_farm.get_installed_power() for wind_farm in self.wind_farms) / self.get_installed_power()) return self
def mean_hub_height(self)
r""" Calculates the mean hub height of the wind turbine cluster. The mean hub height of a wind turbine cluster is necessary for power output calculations with an aggregated wind turbine cluster power curve. Hub heights of wind farms with higher nominal power weigh more than others. Assigns the hub height to the turbine cluster object. Returns ------- self Notes ----- The following equation is used [1]_: .. math:: h_{WTC} = e^{\sum\limits_{k}{ln(h_{WF,k})} \frac{P_{N,k}}{\sum\limits_{k}{P_{N,k}}}} with: :math:`h_{WTC}`: mean hub height of wind turbine cluster, :math:`h_{WF,k}`: hub height of the k-th wind farm of the cluster, :math:`P_{N,k}`: installed power of the k-th wind farm References ---------- .. [1] Knorr, K.: "Modellierung von raum-zeitlichen Eigenschaften der Windenergieeinspeisung für wetterdatenbasierte Windleistungssimulationen". Universität Kassel, Diss., 2016, p. 35
5.919799
4.349205
1.361122
r for wind_farm in self.wind_farms: wind_farm.installed_power = wind_farm.get_installed_power() return sum(wind_farm.installed_power for wind_farm in self.wind_farms)
def get_installed_power(self)
r""" Calculates the installed power of a wind turbine cluster. Returns ------- float Installed power of the wind turbine cluster.
3.160172
3.25231
0.97167
r # Assign wind farm power curves to wind farms of wind turbine cluster for farm in self.wind_farms: # Assign hub heights (needed for power curve and later for # hub height of turbine cluster) farm.mean_hub_height() # Assign wind farm power curve farm.assign_power_curve( wake_losses_model=wake_losses_model, smoothing=smoothing, block_width=block_width, standard_deviation_method=standard_deviation_method, smoothing_order=smoothing_order, turbulence_intensity=turbulence_intensity, **kwargs) # Create data frame from power curves of all wind farms df = pd.concat([farm.power_curve.set_index(['wind_speed']).rename( columns={'value': farm.name}) for farm in self.wind_farms], axis=1) # Sum up power curves cluster_power_curve = pd.DataFrame( df.interpolate(method='index').sum(axis=1)) cluster_power_curve.columns = ['value'] # Return wind speed (index) to a column of the data frame cluster_power_curve.reset_index('wind_speed', inplace=True) self.power_curve = cluster_power_curve return self
def assign_power_curve(self, wake_losses_model='power_efficiency_curve', smoothing=False, block_width=0.5, standard_deviation_method='turbulence_intensity', smoothing_order='wind_farm_power_curves', turbulence_intensity=None, **kwargs)
r""" Calculates the power curve of a wind turbine cluster. The turbine cluster power curve is calculated by aggregating the wind farm power curves of wind farms within the turbine cluster. Depending on the parameters the power curves are smoothed (before or after the aggregation) and/or a wind farm efficiency is applied before the aggregation. After the calculations the power curve is assigned to the attribute `power_curve`. Parameters ---------- wake_losses_model : string Defines the method for taking wake losses within the farm into consideration. Options: 'power_efficiency_curve', 'constant_efficiency' or None. Default: 'power_efficiency_curve'. smoothing : boolean If True the power curves will be smoothed before or after the aggregation of power curves depending on `smoothing_order`. Default: False. block_width : float Width between the wind speeds in the sum of the equation in :py:func:`~.power_curves.smooth_power_curve`. Default: 0.5. standard_deviation_method : string Method for calculating the standard deviation for the Gauss distribution. Options: 'turbulence_intensity', 'Staffell_Pfenninger'. Default: 'turbulence_intensity'. smoothing_order : string Defines when the smoothing takes place if `smoothing` is True. Options: 'turbine_power_curves' (to the single turbine power curves), 'wind_farm_power_curves'. Default: 'wind_farm_power_curves'. turbulence_intensity : float Turbulence intensity at hub height of the wind farm or wind turbine cluster for power curve smoothing with 'turbulence_intensity' method. Can be calculated from `roughness_length` instead. Default: None. Other Parameters ---------------- roughness_length : float, optional. Roughness length. If `standard_deviation_method` is 'turbulence_intensity' and `turbulence_intensity` is not given the turbulence intensity is calculated via the roughness length. Returns ------- self
3.193282
3.177901
1.00484
r # specification of wind farm data example_farm_data = { 'name': 'example_farm', 'wind_turbine_fleet': [{'wind_turbine': my_turbine, 'number_of_turbines': 6}, {'wind_turbine': e126, 'number_of_turbines': 3} ]} # initialize WindFarm object example_farm = WindFarm(**example_farm_data) # specification of wind farm data (2) containing a wind farm efficiency # and coordinates example_farm_2_data = { 'name': 'example_farm_2', 'wind_turbine_fleet': [{'wind_turbine': my_turbine, 'number_of_turbines': 6}, {'wind_turbine': e126, 'number_of_turbines': 3}], 'efficiency': 0.9, 'coordinates': [52.2, 13.1]} # initialize WindFarm object example_farm_2 = WindFarm(**example_farm_2_data) return example_farm, example_farm_2
def initialize_wind_farms(my_turbine, e126)
r""" Initializes two :class:`~.wind_farm.WindFarm` objects. This function shows how to initialize a WindFarm object. You need to provide at least a name and a the wind farm's wind turbine fleet as done below for 'example_farm'. Optionally you can provide a wind farm efficiency (which can be constant or dependent on the wind speed) and coordinates as done for 'example_farm_2'. In this example the coordinates are not being used as just a single weather data set is provided as example data. Parameters ---------- my_turbine : WindTurbine WindTurbine object with self provided power curve. e126 : WindTurbine WindTurbine object with power curve from data file provided by the windpowerlib. Returns ------- Tuple (WindFarm, WindFarm)
2.408631
1.940884
1.240997
r # specification of cluster data example_cluster_data = { 'name': 'example_cluster', 'wind_farms': [example_farm, example_farm_2]} # initialize WindTurbineCluster object example_cluster = WindTurbineCluster(**example_cluster_data) return example_cluster
def initialize_wind_turbine_cluster(example_farm, example_farm_2)
r""" Initializes a :class:`~.wind_turbine_cluster.WindTurbineCluster` object. Function shows how to initialize a WindTurbineCluster object. In this case the cluster only contains two wind farms. Parameters ---------- example_farm : WindFarm WindFarm object. example_farm_2 : WindFarm WindFarm object constant wind farm efficiency and coordinates. Returns ------- WindTurbineCluster
4.230113
3.958423
1.068636
r # set efficiency of example_farm to apply wake losses example_farm.efficiency = 0.9 # power output calculation for example_farm # initialize TurbineClusterModelChain with default parameters and use # run_model method to calculate power output mc_example_farm = TurbineClusterModelChain(example_farm).run_model(weather) # write power output time series to WindFarm object example_farm.power_output = mc_example_farm.power_output # power output calculation for turbine_cluster # own specifications for TurbineClusterModelChain setup modelchain_data = { 'wake_losses_model': 'constant_efficiency', # # 'dena_mean' (default), None, # 'power_efficiency_curve', # 'constant_efficiency' or name of # a wind efficiency curve # see :py:func:`~.wake_losses.get_wind_efficiency_curve` 'smoothing': True, # False (default) or True 'block_width': 0.5, # default: 0.5 'standard_deviation_method': 'Staffell_Pfenninger', # # 'turbulence_intensity' (default) # or 'Staffell_Pfenninger' 'smoothing_order': 'wind_farm_power_curves', # # 'wind_farm_power_curves' (default) or # 'turbine_power_curves' 'wind_speed_model': 'logarithmic', # 'logarithmic' (default), # 'hellman' or # 'interpolation_extrapolation' 'density_model': 'ideal_gas', # 'barometric' (default), 'ideal_gas' or # 'interpolation_extrapolation' 'temperature_model': 'linear_gradient', # 'linear_gradient' (def.) or # 'interpolation_extrapolation' 'power_output_model': 'power_curve', # 'power_curve' (default) or # 'power_coefficient_curve' 'density_correction': True, # False (default) or True 'obstacle_height': 0, # default: 0 'hellman_exp': None} # None (default) or None # initialize TurbineClusterModelChain with own specifications and use # run_model method to calculate power output mc_example_cluster = TurbineClusterModelChain( example_cluster, **modelchain_data).run_model(weather) # write power output time series to WindTurbineCluster object example_cluster.power_output = mc_example_cluster.power_output return
def calculate_power_output(weather, example_farm, example_cluster)
r""" Calculates power output of wind farms and clusters using the :class:`~.turbine_cluster_modelchain.TurbineClusterModelChain`. The :class:`~.turbine_cluster_modelchain.TurbineClusterModelChain` is a class that provides all necessary steps to calculate the power output of a wind farm or cluster. You can either use the default methods for the calculation steps, as done for 'example_farm', or choose different methods, as done for 'example_cluster'. Parameters ---------- weather : pd.DataFrame Contains weather data time series. example_farm : WindFarm WindFarm object. example_cluster : WindTurbineCluster WindTurbineCluster object.
4.21272
3.73343
1.128378
r # plot or print power output if plt: example_cluster.power_output.plot(legend=True, label='example cluster') example_farm.power_output.plot(legend=True, label='example farm') plt.show() else: print(example_cluster.power_output) print(example_farm.power_output)
def plot_or_print(example_farm, example_cluster)
r""" Plots or prints power output and power (coefficient) curves. Parameters ---------- example_farm : WindFarm WindFarm object. example_farm_2 : WindFarm WindFarm object constant wind farm efficiency and coordinates.
3.066988
2.963578
1.034894
r weather = mc_e.get_weather_data('weather.csv') my_turbine, e126, dummy_turbine = mc_e.initialize_wind_turbines() example_farm, example_farm_2 = initialize_wind_farms(my_turbine, e126) example_cluster = initialize_wind_turbine_cluster(example_farm, example_farm_2) calculate_power_output(weather, example_farm, example_cluster) plot_or_print(example_farm, example_cluster)
def run_example()
r""" Runs the example.
6.944409
6.650965
1.04412
r def isfloat(x): try: float(x) return x except ValueError: return False try: df = pd.read_csv(file_, index_col=0) except FileNotFoundError: raise FileNotFoundError("The file '{}' was not found.".format(file_)) wpp_df = df[df.turbine_id == turbine_type] # if turbine not in data file if wpp_df.shape[0] == 0: pd.set_option('display.max_rows', len(df)) logging.info('Possible types: \n{0}'.format(df.turbine_id)) pd.reset_option('display.max_rows') sys.exit('Cannot find the wind converter type: {0}'.format( turbine_type)) # if turbine in data file select power (coefficient) curve columns and # drop nans cols = [_ for _ in wpp_df.columns if isfloat(_)] curve_data = wpp_df[cols].dropna(axis=1) df = curve_data.transpose().reset_index() df.columns = ['wind_speed', 'value'] df['wind_speed'] = df['wind_speed'].apply(lambda x: float(x)) nominal_power = wpp_df['p_nom'].iloc[0] return df, nominal_power
def get_turbine_data_from_file(turbine_type, file_)
r""" Fetches power (coefficient) curve data from a csv file. See `example_power_curves.csv' and `example_power_coefficient_curves.csv` in example/data for the required format of a csv file. The self-provided csv file may contain more columns than the example files. Only columns containing wind speed and the corresponding power or power coefficient as well as the column 'nominal_power' are taken into account. Parameters ---------- turbine_type : string Specifies the turbine type data is fetched for. file_ : string Specifies the source of the turbine data. See the example below for how to use the example data. Returns ------- Tuple (pandas.DataFrame, float) Power curve or power coefficient curve (pandas.DataFrame) and nominal power (float). Power (coefficient) curve DataFrame contains power coefficient curve values (dimensionless) or power curve values in W as column names with the corresponding wind speeds in m/s. Examples -------- >>> from windpowerlib import wind_turbine >>> import os >>> source = os.path.join(os.path.dirname(__file__), '../example/data', ... 'example_power_curves.csv') >>> example_turbine = { ... 'hub_height': 100, ... 'rotor_diameter': 70, ... 'name': 'DUMMY 3', ... 'fetch_curve': 'power_curve', ... 'data_source': source} >>> e_t_1 = wind_turbine.WindTurbine(**example_turbine) >>> print(e_t_1.power_curve['value'][7]) 18000.0 >>> print(e_t_1.nominal_power) 150000
3.710161
3.678839
1.008514
r # hdf5 filename filename = os.path.join(os.path.dirname(__file__), 'data', 'turbine_data_oedb.h5') if os.path.isfile(filename) and not overwrite: logging.debug("Turbine data is fetched from {}".format(filename)) with pd.HDFStore(filename) as hdf_store: turbine_data = hdf_store.get('turbine_data') else: turbine_data = load_turbine_data_from_oedb() turbine_data.set_index('turbine_type', inplace=True) # Set `curve` depending on `fetch_curve` to match names in oedb curve = ('cp_curve' if fetch_curve == 'power_coefficient_curve' else fetch_curve) # Select curve and nominal power of turbine type try: df = turbine_data.loc[turbine_type] except KeyError: raise KeyError("Turbine type '{}' not in database. ".format( turbine_type) + "Use 'get_turbine_types()' to see a table of " + "possible wind turbine types.") if df[curve] is not None: df = pd.DataFrame(df[curve]) else: sys.exit("{} of {} not available in ".format(curve, turbine_type) + "oedb. Use 'get_turbine_types()' to see for which turbine " + "types power coefficient curves are available.") nominal_power = turbine_data.loc[turbine_type][ 'installed_capacity_kw'] * 1000 df.columns = ['wind_speed', 'value'] if fetch_curve == 'power_curve': # power in W df['value'] = df['value'] * 1000 return df, nominal_power
def get_turbine_data_from_oedb(turbine_type, fetch_curve, overwrite=False)
r""" Fetches data for one wind turbine type from the OpenEnergy Database (oedb). If turbine data exists in local repository it is loaded from this file. The file is created when turbine data was loaded from oedb in :py:func:`~.load_turbine_data_from_oedb`. Use this function with `overwrite=True` to overwrite your file with newly fetched data. Use :py:func:`~.check_local_turbine_data` to check weather your local file is up to date. Parameters ---------- turbine_type : string Specifies the turbine type data is fetched for. Use :py:func:`~.get_turbine_types` to see a table of all wind turbines for which power (coefficient) curve data is provided. fetch_curve : string Parameter to specify whether a power or power coefficient curve should be retrieved from the provided turbine data. Valid options are 'power_curve' and 'power_coefficient_curve'. Default: None. overwrite : boolean If True local file is overwritten by newly fetch data from oedb, if False turbine data is fetched from previously saved file. Returns ------- Tuple (pandas.DataFrame, float) Power curve or power coefficient curve (pandas.DataFrame) and nominal power (float) of one wind turbine type. Power (coefficient) curve DataFrame contains power coefficient curve values (dimensionless) or power curve values in W with the corresponding wind speeds in m/s.
3.289334
2.988682
1.100597
r # url of OpenEnergy Platform that contains the oedb oep_url = 'http://oep.iks.cs.ovgu.de/' # location of data schema = 'model_draft' table = 'openfred_windpower_powercurve' # load data result = requests.get( oep_url + '/api/v0/schema/{}/tables/{}/rows/?'.format( schema, table), ) if not result.status_code == 200: raise ConnectionError("Database connection not successful. " "Response: [{}]".format(result.status_code)) # extract data to data frame turbine_data = pd.DataFrame(result.json()) # store data as hdf5 filename = os.path.join(os.path.dirname(__file__), 'data', 'turbine_data_oedb.h5') with pd.HDFStore(filename) as hdf_store: hdf_store.put('turbine_data', turbine_data) logging.debug("Turbine data is fetched from oedb and saved " "to {}".format(filename)) return turbine_data
def load_turbine_data_from_oedb()
r""" Loads turbine data from the OpenEnergy Database (oedb). Turbine data is saved to `filename` for offline usage of windpowerlib. Returns ------- turbine_data : pd.DataFrame Contains turbine data of different turbines such as 'manufacturer', 'turbine_type', nominal power ('installed_capacity_kw').
4.525019
4.378301
1.03351
r df = load_turbine_data_from_oedb() cp_curves_df = df.iloc[df.loc[df['has_cp_curve']].index][ ['manufacturer', 'turbine_type', 'has_cp_curve']] p_curves_df = df.iloc[df.loc[df['has_power_curve']].index][ ['manufacturer', 'turbine_type', 'has_power_curve']] curves_df = pd.merge(p_curves_df, cp_curves_df, how='outer', sort=True).fillna(False) if print_out: pd.set_option('display.max_rows', len(curves_df)) print(curves_df) pd.reset_option('display.max_rows') return curves_df
def get_turbine_types(print_out=True)
r""" Get the names of all possible wind turbine types for which the power coefficient curve or power curve is provided in the OpenEnergy Data Base (oedb). Parameters ---------- print_out : boolean Directly prints a tabular containing the turbine types in column 'turbine_type', the manufacturer in column 'manufacturer' and information about whether a power (coefficient) curve exists (True) or not (False) in columns 'has_power_curve' and 'has_cp_curve'. Default: True. Returns ------- curves_df : pd.DataFrame Contains turbine types in column 'turbine_type', the manufacturer in column 'manufacturer' and information about whether a power (coefficient) curve exists (True) or not (False) in columns 'has_power_curve' and 'has_cp_curve'. Examples -------- >>> from windpowerlib import wind_turbine >>> df = wind_turbine.get_turbine_types(print_out=False) >>> print(df[df["turbine_type"].str.contains("E-126")].iloc[0]) manufacturer Enercon turbine_type E-126/4200 has_power_curve True has_cp_curve True Name: 5, dtype: object >>> print(df[df["manufacturer"].str.contains("Enercon")].iloc[0]) manufacturer Enercon turbine_type E-101/3050 has_power_curve True has_cp_curve True Name: 1, dtype: object
2.811805
2.285057
1.230518
r if data_source == 'oedb': curve_df, nominal_power = get_turbine_data_from_oedb( turbine_type=self.name, fetch_curve=fetch_curve) else: curve_df, nominal_power = get_turbine_data_from_file( turbine_type=self.name, file_=data_source) if fetch_curve == 'power_curve': self.power_curve = curve_df elif fetch_curve == 'power_coefficient_curve': self.power_coefficient_curve = curve_df else: raise ValueError("'{0}' is an invalid value. ".format( fetch_curve) + "`fetch_curve` must be " + "'power_curve' or 'power_coefficient_curve'.") if self.nominal_power is None: self.nominal_power = nominal_power return self
def fetch_turbine_data(self, fetch_curve, data_source)
r""" Fetches data of the requested wind turbine. Method fetches nominal power as well as power coefficient curve or power curve from a data set provided in the OpenEnergy Database (oedb). You can also import your own power (coefficient) curves from a file. For that the wind speeds in m/s have to be in the first row and the corresponding power coefficient curve values or power curve values in W in a row where the first column contains the turbine name. See `example_power_curves.csv' and `example_power_coefficient_curves.csv` in example/data for the required form of a csv file (more columns can be added). See :py:func:`~.get_turbine_data_from_file` for an example reading data from a csv file. Parameters ---------- fetch_curve : string Parameter to specify whether a power or power coefficient curve should be retrieved from the provided turbine data. Valid options are 'power_curve' and 'power_coefficient_curve'. Default: None. data_source : string Specifies whether turbine data (f.e. nominal power, power curve, power coefficient curve) is loaded from the OpenEnergy Database ('oedb') or from a csv file ('<path including file name>'). Default: 'oedb'. Returns ------- self Examples -------- >>> from windpowerlib import wind_turbine >>> enerconE126 = { ... 'hub_height': 135, ... 'rotor_diameter': 127, ... 'name': 'E-126/4200', ... 'fetch_curve': 'power_coefficient_curve', ... 'data_source': 'oedb'} >>> e126 = wind_turbine.WindTurbine(**enerconE126) >>> print(e126.power_coefficient_curve['value'][5]) 0.44 >>> print(e126.nominal_power) 4200000.0
2.592327
2.514681
1.030877
coef = mod_math.cos(latitude_1 / 180. * mod_math.pi) x = latitude_1 - latitude_2 y = (longitude_1 - longitude_2) * coef return mod_math.sqrt(x * x + y * y) * ONE_DEGREE
def distance(latitude_1, longitude_1, latitude_2, longitude_2)
Distance between two points.
3.397823
3.297648
1.030378
if i <= 0: return color1 if i >= 1: return color2 return (int(color1[0] + (color2[0] - color1[0]) * i), int(color1[1] + (color2[1] - color1[1]) * i), int(color1[2] + (color2[2] - color1[2]) * i))
def get_color_between(color1, color2, i)
i is a number between 0 and 1, if 0 then color1, if 1 color2, ...
1.410339
1.37387
1.026545
tile = self.get_file(latitude, longitude) if tile is None: return None return tile._InverseDistanceWeighted(latitude, longitude, radius)
def _IDW(self, latitude, longitude, radius=1)
Return the interpolated elevation at a point. Load the correct tile for latitude and longitude given. If the tile doesn't exist, return None. Otherwise, call the tile's Inverse Distance Weighted function and return the elevation. Args: latitude: float with the latitude in decimal degrees longitude: float with the longitude in decimal degrees radius: int of 1 or 2 indicating the approximate radius of adjacent cells to include Returns: a float of the interpolated elevation with the same unit as the .hgt file (meters)
6.164506
4.929775
1.250464
file_name = self.get_file_name(latitude, longitude) if not file_name: return None if (file_name in self.files): return self.files[file_name] else: data = self.retrieve_or_load_file_data(file_name) if not data: return None result = GeoElevationFile(file_name, data, self) # Store file (if in batch mode, just keep most recent) if self.batch_mode: self.files = {file_name: result} else: self.files[file_name] = result return result
def get_file(self, latitude, longitude)
If the file can't be found -- it will be retrieved from the server.
3.156027
3.046951
1.035798
if not size or len(size) != 2: raise Exception('Invalid size %s' % size) if not latitude_interval or len(latitude_interval) != 2: raise Exception('Invalid latitude interval %s' % latitude_interval) if not longitude_interval or len(longitude_interval) != 2: raise Exception('Invalid longitude interval %s' % longitude_interval) width, height = size width, height = int(width), int(height) latitude_from, latitude_to = latitude_interval longitude_from, longitude_to = longitude_interval if mode == 'array': import numpy as np array = np.empty((height,width)) for row in range(height): for column in range(width): latitude = latitude_from + float(row) / height * (latitude_to - latitude_from) longitude = longitude_from + float(column) / width * (longitude_to - longitude_from) elevation = self.get_elevation(latitude, longitude) array[row,column] = elevation return array elif mode == 'image': try: import Image as mod_image except: from PIL import Image as mod_image try: import ImageDraw as mod_imagedraw except: from PIL import ImageDraw as mod_imagedraw image = mod_image.new('RGBA', (width, height), (255, 255, 255, 255)) draw = mod_imagedraw.Draw(image) max_elevation -= min_elevation for row in range(height): for column in range(width): latitude = latitude_from + float(row) / height * (latitude_to - latitude_from) longitude = longitude_from + float(column) / width * (longitude_to - longitude_from) elevation = self.get_elevation(latitude, longitude) if elevation == None: color = unknown_color else: elevation_coef = (elevation - min_elevation) / float(max_elevation) if elevation_coef < 0: elevation_coef = 0 if elevation_coef > 1: elevation_coef = 1 color = mod_utils.get_color_between(min_color, max_color, elevation_coef) if elevation <= 0: color = zero_color draw.point((column, height - row), color) return image else: raise Exception('Invalid mode ' + mode)
def get_image(self, size, latitude_interval, longitude_interval, max_elevation, min_elevation=0, unknown_color = (255, 255, 255, 255), zero_color = (0, 0, 255, 255), min_color = (0, 0, 0, 255), max_color = (0, 255, 0, 255), mode='image')
Returns a numpy array or PIL image.
1.758333
1.780717
0.98743
if only_missing: original_elevations = list(map(lambda point: point.elevation, gpx.walk(only_points=True))) if smooth: self._add_sampled_elevations(gpx) else: for point in gpx.walk(only_points=True): ele = self.get_elevation(point.latitude, point.longitude) if ele is not None: point.elevation = ele for i in range(gpx_smooth_no): gpx.smooth(vertical=True, horizontal=False) if only_missing: for original_elevation, point in zip(original_elevations, list(gpx.walk(only_points=True))): if original_elevation != None: point.elevation = original_elevation
def add_elevations(self, gpx, only_missing=False, smooth=False, gpx_smooth_no=0)
only_missing -- if True only points without elevation will get a SRTM value smooth -- if True interpolate between points if gpx_smooth_no > 0 -- execute gpx.smooth(vertical=True)
2.585194
2.512485
1.028939
for track in gpx.tracks: for segment in track.segments: last_interval_changed = 0 previous_point = None length = 0 for no, point in enumerate(segment.points): if previous_point: length += point.distance_2d(previous_point) if no == 0 or no == len(segment.points) - 1 or length > last_interval_changed: last_interval_changed += min_interval_length point.elevation = self.get_elevation(point.latitude, point.longitude) else: point.elevation = None previous_point = point gpx.add_missing_elevations()
def _add_interval_elevations(self, gpx, min_interval_length=100)
Adds elevation on points every min_interval_length and add missing elevation between
2.691059
2.632395
1.022286
if not (self.latitude - self.resolution <= latitude < self.latitude + 1): raise Exception('Invalid latitude %s for file %s' % (latitude, self.file_name)) if not (self.longitude <= longitude < self.longitude + 1 + self.resolution): raise Exception('Invalid longitude %s for file %s' % (longitude, self.file_name)) row, column = self.get_row_and_column(latitude, longitude) if approximate: return self.approximation(latitude, longitude) else: return self.get_elevation_from_row_and_column(int(row), int(column))
def get_elevation(self, latitude, longitude, approximate=None)
If approximate is True then only the points from SRTM grid will be used, otherwise a basic aproximation of nearby points will be calculated.
2.628476
2.52994
1.038948
d = 1. / self.square_side d_meters = d * mod_utils.ONE_DEGREE # Since the less the distance => the more important should be the # distance of the point, we'll use d-distance as importance coef # here: importance_1 = d_meters - mod_utils.distance(latitude + d, longitude, latitude, longitude) elevation_1 = self.geo_elevation_data.get_elevation(latitude + d, longitude, approximate=False) importance_2 = d_meters - mod_utils.distance(latitude - d, longitude, latitude, longitude) elevation_2 = self.geo_elevation_data.get_elevation(latitude - d, longitude, approximate=False) importance_3 = d_meters - mod_utils.distance(latitude, longitude + d, latitude, longitude) elevation_3 = self.geo_elevation_data.get_elevation(latitude, longitude + d, approximate=False) importance_4 = d_meters - mod_utils.distance(latitude, longitude - d, latitude, longitude) elevation_4 = self.geo_elevation_data.get_elevation(latitude, longitude - d, approximate=False) # TODO(TK) Check if coordinates inside the same file, and only then decide if to call # self.geo_elevation_data.get_elevation or just self.get_elevation if elevation_1 == None or elevation_2 == None or elevation_3 == None or elevation_4 == None: elevation = self.get_elevation(latitude, longitude, approximate=False) if not elevation: return None elevation_1 = elevation_1 or elevation elevation_2 = elevation_2 or elevation elevation_3 = elevation_3 or elevation elevation_4 = elevation_4 or elevation # Normalize importance: sum_importances = float(importance_1 + importance_2 + importance_3 + importance_4) # Check normalization: assert abs(importance_1 / sum_importances + \ importance_2 / sum_importances + \ importance_3 / sum_importances + \ importance_4 / sum_importances - 1 ) < 0.000001 result = importance_1 / sum_importances * elevation_1 + \ importance_2 / sum_importances * elevation_2 + \ importance_3 / sum_importances * elevation_3 + \ importance_4 / sum_importances * elevation_4 return result
def approximation(self, latitude, longitude)
Dummy approximation with nearest points. The nearest the neighbour the more important will be its elevation.
2.665009
2.613198
1.019827
if radius == 1: offsetmatrix = (None, (0, 1), None, (-1, 0), (0, 0), (1, 0), None, (0, -1), None) elif radius == 2: offsetmatrix = (None, None, (0, 2), None, None, None, (-1, 1), (0, 1), (1, 1), None, (-2, 0), (-1, 0), (0, 0), (1, 0), (2, 0), None, (-1, -1), (0, -1), (1, -1), None, None, None, (0, -2), None, None) else: raise ValueError("Radius {} invalid, " "expected 1 or 2".format(radius)) row, column = self.get_row_and_column(latitude, longitude) center_lat, center_long = self.get_lat_and_long(row, column) if latitude == center_lat and longitude == center_long: # return direct elev at point (infinite weight) return self.get_elevation_from_row_and_column(int(row), int(column)) weights = 0 elevation = 0 for offset in offsetmatrix: if (offset is not None and 0 <= row + offset[0] < self.square_side and 0 <= column + offset[1] < self.square_side): cell = self.get_elevation_from_row_and_column(int(row + offset[0]), int(column + offset[1])) if cell is not None: # does not need to be meters, anything proportional distance = mod_utils.distance(latitude, longitude, center_lat + float(offset[0])/(self.square_side-1), center_long + float(offset[1])/(self.square_side-1)) weights += 1/distance elevation += cell/distance return elevation/weights
def _InverseDistanceWeighted(self, latitude, longitude, radius=1)
Return the Inverse Distance Weighted Elevation. Interpolate the elevation of the given point using the inverse distance weigthing algorithm (exp of 1) in the form: sum((1/distance) * elevation)/sum(1/distance) for each point in the matrix. The matrix size is determined by the radius. A radius of 1 uses 5 points and a radius of 2 uses 13 points. The matrices are set up to use cells adjacent to and including the one that contains the given point. Any cells referenced by the matrix that are on neighboring tiles are ignored. Args: latitude: float of the latitude in decimal degrees longitude: float of the longitude in decimal degrees radius: int of 1 or 2 indicating the size of the matrix Returns: a float of the interpolated elevation in the same units as the underlying .hgt file (meters) Exceptions: raises a ValueError if an invalid radius is supplied
2.765387
2.736322
1.010622
groups = mod_re.findall('([NS])(\d+)([EW])(\d+)\.hgt', self.file_name) assert groups and len(groups) == 1 and len(groups[0]) == 4, 'Invalid file name {0}'.format(self.file_name) groups = groups[0] if groups[0] == 'N': latitude = float(groups[1]) else: latitude = - float(groups[1]) if groups[2] == 'E': longitude = float(groups[3]) else: longitude = - float(groups[3]) self.latitude = latitude self.longitude = longitude
def parse_file_name_starting_position(self)
Returns (latitude, longitude) of lower left point of the file
2.619765
2.281832
1.148097
if not file_handler: file_handler = FileHandler() if not srtm1 and not srtm3: raise Exception('At least one of srtm1 and srtm3 must be True') srtm1_files, srtm3_files = _get_urls(use_included_urls, file_handler) assert srtm1_files assert srtm3_files if not srtm1: srtm1_files = {} if not srtm3: srtm3_files = {} assert srtm1_files or srtm3_files return mod_data.GeoElevationData(srtm1_files, srtm3_files, file_handler=file_handler, leave_zipped=leave_zipped, batch_mode=batch_mode)
def get_data(srtm1=True, srtm3=True, leave_zipped=False, file_handler=None, use_included_urls=True, batch_mode=False)
Get the utility object for querying elevation data. All data files will be stored in localy (note that it may be gigabytes of data so clean it from time to time). On first run -- all files needed url will be stored and for every next elevation query if the SRTM file is not found it will be retrieved and saved. If you need to change the way the files are saved locally (for example if you need to save them locally) -- change the file_handler. See srtm.main.FileHandler. If leave_zipped is True then files will be stored locally as compressed zip files. That means less disk space but more computing space for every file loaded. If use_included_urls is True urls to SRTM files included in the library will be used. Set to false if you need to reload them on first run. If batch_mode is True, only the most recent file will be stored. This is ideal for situations where you want to use this function to enrich a very large dataset. If your data are spread over a wide geographic area, this setting will make this function slower but will greatly reduce the risk of out-of-memory errors. Default is False. With srtm1 or srtm3 params you can decide which SRTM format to use. Srtm3 has a resolution of three arc-seconds (cca 90 meters between points). Srtm1 has a resolution of one arc-second (cca 30 meters). Srtm1 is available only for the United states. If both srtm1 ans srtm3 are True and both files are present for a location -- the srtm1 will be used.
2.586279
2.801912
0.923041
# Local cache path: result = "" if 'HOME' in mod_os.environ: result = mod_os.sep.join([mod_os.environ['HOME'], '.cache', 'srtm']) elif 'HOMEPATH' in mod_os.environ: result = mod_os.sep.join([mod_os.environ['HOMEPATH'], '.cache', 'srtm']) else: raise Exception('No default HOME directory found, please specify a path where to store files') if not mod_path.exists(result): mod_os.makedirs(result) return result
def get_srtm_dir(self)
The default path to store files.
3.303377
3.069241
1.076285
req_msg = { 'type': 'subscribe', 'topic': topic, 'response': True } await self._conn.send_message(req_msg)
async def subscribe(self, topic: str)
Subscribe to a channel :param topic: required :returns: None Sample ws response .. code-block:: python { "type":"message", "topic":"/market/ticker:BTC-USDT", "subject":"trade.ticker", "data":{ "sequence":"1545896668986", "bestAsk":"0.08", "size":"0.011", "bestBidSize":"0.036", "price":"0.08", "bestAskSize":"0.18", "bestBid":"0.049" } } Error response .. code-block:: python { 'code': 404, 'data': 'topic /market/ticker:BTC-USDT is not found', 'id': '1550868034537', 'type': 'error' }
6.078462
4.629417
1.313008
req_msg = { 'type': 'unsubscribe', 'topic': topic, 'response': True } await self._conn.send_message(req_msg)
async def unsubscribe(self, topic: str)
Unsubscribe from a topic :param topic: required :returns: None Sample ws response .. code-block:: python { "id": "1545910840805", "type": "ack" }
6.234281
5.209599
1.196691
data_json = "" endpoint = path if method == "get": if data: query_string = self._get_params_for_sig(data) endpoint = "{}?{}".format(path, query_string) elif data: data_json = compact_json_dict(data) sig_str = ("{}{}{}{}".format(nonce, method.upper(), endpoint, data_json)).encode('utf-8') m = hmac.new(self.API_SECRET.encode('utf-8'), sig_str, hashlib.sha256) return base64.b64encode(m.digest())
def _generate_signature(self, nonce, method, path, data)
Generate the call signature :param path: :param data: :param nonce: :return: signature string
3.021278
3.050527
0.990412
if not str(response.status_code).startswith('2'): raise KucoinAPIException(response) try: res = response.json() if 'code' in res and res['code'] != "200000": raise KucoinAPIException(response) if 'success' in res and not res['success']: raise KucoinAPIException(response) # by default return full response # if it's a normal response we have a data attribute, return that if 'data' in res: res = res['data'] return res except ValueError: raise KucoinRequestException('Invalid Response: %s' % response.text)
def _handle_response(response)
Internal helper for handling API responses from the Quoine server. Raises the appropriate exceptions when necessary; otherwise, returns the response.
3.136353
3.038852
1.032085
data = { 'type': account_type, 'currency': currency } return self._post('accounts', True, data=data)
def create_account(self, account_type, currency)
Create an account https://docs.kucoin.com/#create-an-account :param account_type: Account type - main or trade :type account_type: string :param currency: Currency code :type currency: string .. code:: python account = client.create_account('trade', 'BTC') :returns: API Response .. code-block:: python { "id": "5bd6e9286d99522a52e458de" } :raises: KucoinResponseException, KucoinAPIException
3.700881
5.707135
0.648466
data = {} if start: data['startAt'] = start if end: data['endAt'] = end if page: data['currentPage'] = page if limit: data['pageSize'] = limit return self._get('accounts/{}/ledgers'.format(account_id), True, data=data)
def get_account_activity(self, account_id, start=None, end=None, page=None, limit=None)
Get list of account activity https://docs.kucoin.com/#get-account-history :param account_id: ID for account - from list_accounts() :type account_id: string :param start: (optional) Start time as unix timestamp :type start: string :param end: (optional) End time as unix timestamp :type end: string :param page: (optional) Current page - default 1 :type page: int :param limit: (optional) Number of results to return - default 50 :type limit: int .. code:: python history = client.get_account_activity('5bd6e9216d99522a52e458d6') history = client.get_account_activity('5bd6e9216d99522a52e458d6', start='1540296039000') history = client.get_account_activity('5bd6e9216d99522a52e458d6', page=2, page_size=10) :returns: API Response .. code-block:: python { "currentPage": 1, "pageSize": 10, "totalNum": 2, "totalPage": 1, "items": [ { "currency": "KCS", "amount": "0.0998", "fee": "0", "balance": "1994.040596", "bizType": "withdraw", "direction": "in", "createdAt": 1540296039000, "context": { "orderId": "5bc7f080b39c5c03286eef8a", "currency": "BTC" } }, { "currency": "KCS", "amount": "0.0998", "fee": "0", "balance": "1994.140396", "bizType": "trade exchange", "direction": "in", "createdAt": 1540296039000, "context": { "orderId": "5bc7f080b39c5c03286eef8e", "tradeId": "5bc7f080b3949c03286eef8a", "symbol": "BTC-USD" } } ] } :raises: KucoinResponseException, KucoinAPIException
2.541427
2.737097
0.928512
data = {} if page: data['currentPage'] = page if page_size: data['pageSize'] = page_size return self._get('accounts/{}/holds'.format(account_id), True, data=data)
def get_account_holds(self, account_id, page=None, page_size=None)
Get account holds placed for any active orders or pending withdraw requests https://docs.kucoin.com/#get-holds :param account_id: ID for account - from list_accounts() :type account_id: string :param page: (optional) Current page - default 1 :type page: int :param page_size: (optional) Number of results to return - default 50 :type page_size: int .. code:: python holds = client.get_account_holds('5bd6e9216d99522a52e458d6') holds = client.get_account_holds('5bd6e9216d99522a52e458d6', page=2, page_size=10) :returns: API Response .. code-block:: python { "currentPage": 1, "pageSize": 10, "totalNum": 2, "totalPage": 1, "items": [ { "currency": "ETH", "holdAmount": "5083", "bizType": "Withdraw", "orderId": "5bc7f080b39c5c03286eef8e", "createdAt": 1545898567000, "updatedAt": 1545898567000 }, { "currency": "ETH", "holdAmount": "1452", "bizType": "Withdraw", "orderId": "5bc7f518b39c5c033818d62d", "createdAt": 1545898567000, "updatedAt": 1545898567000 } ] } :raises: KucoinResponseException, KucoinAPIException
3.009106
3.293521
0.913644
data = { 'payAccountId': from_account_id, 'recAccountId': to_account_id, 'amount': amount } if order_id: data['clientOid'] = order_id else: data['clientOid'] = flat_uuid() return self._post('accounts/inner-transfer', True, data=data)
def create_inner_transfer(self, from_account_id, to_account_id, amount, order_id=None)
Get account holds placed for any active orders or pending withdraw requests https://docs.kucoin.com/#get-holds :param from_account_id: ID of account to transfer funds from - from list_accounts() :type from_account_id: str :param to_account_id: ID of account to transfer funds to - from list_accounts() :type to_account_id: str :param amount: Amount to transfer :type amount: int :param order_id: (optional) Request ID (default flat_uuid()) :type order_id: string .. code:: python transfer = client.create_inner_transfer('5bd6e9216d99522a52e458d6', 5bc7f080b39c5c03286eef8e', 20) :returns: API Response .. code-block:: python { "orderId": "5bd6e9286d99522a52e458de" } :raises: KucoinResponseException, KucoinAPIException
3.680705
3.980895
0.924592
data = { 'currency': currency } return self._post('deposit-addresses', True, data=data)
def create_deposit_address(self, currency)
Create deposit address of currency for deposit. You can just create one deposit address. https://docs.kucoin.com/#create-deposit-address :param currency: Name of currency :type currency: string .. code:: python address = client.create_deposit_address('NEO') :returns: ApiResponse .. code:: python { "address": "0x78d3ad1c0aa1bf068e19c94a2d7b16c9c0fcd8b1", "memo": "5c247c8a03aa677cea2a251d" } :raises: KucoinResponseException, KucoinAPIException
5.447964
9.015183
0.60431
data = { 'currency': currency } return self._get('deposit-addresses', True, data=data)
def get_deposit_address(self, currency)
Get deposit address for a currency https://docs.kucoin.com/#get-deposit-address :param currency: Name of currency :type currency: string .. code:: python address = client.get_deposit_address('NEO') :returns: ApiResponse .. code:: python { "address": "0x78d3ad1c0aa1bf068e19c94a2d7b16c9c0fcd8b1", "memo": "5c247c8a03aa677cea2a251d" } :raises: KucoinResponseException, KucoinAPIException
5.618069
8.15679
0.68876
data = {} if currency: data['currency'] = currency if status: data['status'] = status if start: data['startAt'] = start if end: data['endAt'] = end if limit: data['pageSize'] = limit if page: data['page'] = page return self._get('withdrawals', True, data=data)
def get_withdrawals(self, currency=None, status=None, start=None, end=None, page=None, limit=None)
Get deposit records for a currency https://docs.kucoin.com/#get-withdrawals-list :param currency: Name of currency (optional) :type currency: string :param status: optional - Status of deposit (PROCESSING, SUCCESS, FAILURE) :type status: string :param start: (optional) Start time as unix timestamp :type start: string :param end: (optional) End time as unix timestamp :type end: string :param page: (optional) Page to fetch :type page: int :param limit: (optional) Number of transactions :type limit: int .. code:: python withdrawals = client.get_withdrawals('NEO') :returns: ApiResponse .. code:: python { "currentPage": 1, "pageSize": 10, "totalNum": 1, "totalPage": 1, "items": [ { "id": "5c2dc64e03aa675aa263f1ac", "address": "0x5bedb060b8eb8d823e2414d82acce78d38be7fe9", "memo": "", "currency": "ETH", "amount": 1.0000000, "fee": 0.0100000, "walletTxId": "3e2414d82acce78d38be7fe9", "isInner": false, "status": "FAILURE", "createdAt": 1546503758000, "updatedAt": 1546504603000 } ] } :raises: KucoinResponseException, KucoinAPIException
1.959693
2.195058
0.892775
data = { 'currency': currency } return self._get('withdrawals/quotas', True, data=data)
def get_withdrawal_quotas(self, currency)
Get withdrawal quotas for a currency https://docs.kucoin.com/#get-withdrawal-quotas :param currency: Name of currency :type currency: string .. code:: python quotas = client.get_withdrawal_quotas('ETH') :returns: ApiResponse .. code:: python { "currency": "ETH", "availableAmount": 2.9719999, "remainAmount": 2.9719999, "withdrawMinSize": 0.1000000, "limitBTCAmount": 2.0, "innerWithdrawMinFee": 0.00001, "isWithdrawEnabled": true, "withdrawMinFee": 0.0100000, "precision": 7 } :raises: KucoinResponseException, KucoinAPIException
4.989761
8.16438
0.611162
data = { 'currency': currency, 'amount': amount, 'address': address } if memo: data['memo'] = memo if is_inner: data['isInner'] = is_inner if remark: data['remark'] = remark return self._post('withdrawals', True, data=data)
def create_withdrawal(self, currency, amount, address, memo=None, is_inner=False, remark=None)
Process a withdrawal https://docs.kucoin.com/#apply-withdraw :param currency: Name of currency :type currency: string :param amount: Amount to withdraw :type amount: number :param address: Address to withdraw to :type address: string :param memo: (optional) Remark to the withdrawal address :type memo: string :param is_inner: (optional) Remark to the withdrawal address :type is_inner: bool :param remark: (optional) Remark :type remark: string .. code:: python withdrawal = client.create_withdrawal('NEO', 20, '598aeb627da3355fa3e851') :returns: ApiResponse .. code:: python { "withdrawalId": "5bffb63303aa675e8bbe18f9" } :raises: KucoinResponseException, KucoinAPIException
1.871509
2.455697
0.762109
if not size and not funds: raise MarketOrderException('Need size or fund parameter') if size and funds: raise MarketOrderException('Need size or fund parameter not both') data = { 'side': side, 'symbol': symbol, 'type': self.ORDER_MARKET } if size: data['size'] = size if funds: data['funds'] = funds if client_oid: data['clientOid'] = client_oid else: data['clientOid'] = flat_uuid() if remark: data['remark'] = remark if stp: data['stp'] = stp return self._post('orders', True, data=data)
def create_market_order(self, symbol, side, size=None, funds=None, client_oid=None, remark=None, stp=None)
Create a market order One of size or funds must be set https://docs.kucoin.com/#place-a-new-order :param symbol: Name of symbol e.g. KCS-BTC :type symbol: string :param side: buy or sell :type side: string :param size: (optional) Desired amount in base currency :type size: string :param funds: (optional) Desired amount of quote currency to use :type funds: string :param client_oid: (optional) Unique order id (default flat_uuid()) :type client_oid: string :param remark: (optional) remark for the order, max 100 utf8 characters :type remark: string :param stp: (optional) self trade protection CN, CO, CB or DC (default is None) :type stp: string .. code:: python order = client.create_market_order('NEO', Client.SIDE_BUY, size=20) :returns: ApiResponse .. code:: python { "orderOid": "596186ad07015679730ffa02" } :raises: KucoinResponseException, KucoinAPIException, MarketOrderException
2.201482
2.115021
1.04088
if stop and not stop_price: raise LimitOrderException('Stop order needs stop_price') if stop_price and not stop: raise LimitOrderException('Stop order type required with stop_price') if cancel_after and time_in_force != self.TIMEINFORCE_GOOD_TILL_TIME: raise LimitOrderException('Cancel after only works with time_in_force = "GTT"') if hidden and iceberg: raise LimitOrderException('Order can be either "hidden" or "iceberg"') if iceberg and not visible_size: raise LimitOrderException('Iceberg order requires visible_size') data = { 'symbol': symbol, 'side': side, 'type': self.ORDER_LIMIT, 'price': price, 'size': size } if client_oid: data['clientOid'] = client_oid else: data['clientOid'] = flat_uuid() if remark: data['remark'] = remark if stp: data['stp'] = stp if time_in_force: data['timeInForce'] = time_in_force if cancel_after: data['cancelAfter'] = cancel_after if post_only: data['postOnly'] = post_only if stop: data['stop'] = stop data['stopPrice'] = stop_price if hidden: data['hidden'] = hidden if iceberg: data['iceberg'] = iceberg data['visible_size'] = visible_size return self._post('orders', True, data=data)
def create_limit_order(self, symbol, side, price, size, client_oid=None, remark=None, time_in_force=None, stop=None, stop_price=None, stp=None, cancel_after=None, post_only=None, hidden=None, iceberg=None, visible_size=None)
Create an order https://docs.kucoin.com/#place-a-new-order :param symbol: Name of symbol e.g. KCS-BTC :type symbol: string :param side: buy or sell :type side: string :param price: Name of coin :type price: string :param size: Amount of base currency to buy or sell :type size: string :param client_oid: (optional) Unique order_id default flat_uuid() :type client_oid: string :param remark: (optional) remark for the order, max 100 utf8 characters :type remark: string :param stp: (optional) self trade protection CN, CO, CB or DC (default is None) :type stp: string :param time_in_force: (optional) GTC, GTT, IOC, or FOK (default is GTC) :type time_in_force: string :param stop: (optional) stop type loss or entry - requires stop_price :type stop: string :param stop_price: (optional) trigger price for stop order :type stop_price: string :param cancel_after: (optional) number of seconds to cancel the order if not filled required time_in_force to be GTT :type cancel_after: string :param post_only: (optional) indicates that the order should only make liquidity. If any part of the order results in taking liquidity, the order will be rejected and no part of it will execute. :type post_only: bool :param hidden: (optional) Orders not displayed in order book :type hidden: bool :param iceberg: (optional) Only visible portion of the order is displayed in the order book :type iceberg: bool :param visible_size: (optional) The maximum visible size of an iceberg order :type visible_size: bool .. code:: python order = client.create_limit_order('KCS-BTC', Client.SIDE_BUY, '0.01', '1000') :returns: ApiResponse .. code:: python { "orderOid": "596186ad07015679730ffa02" } :raises: KucoinResponseException, KucoinAPIException, LimitOrderException
1.925029
1.858614
1.035734
data = {} if symbol is not None: data['symbol'] = symbol return self._delete('orders', True, data=data)
def cancel_all_orders(self, symbol=None)
Cancel all orders https://docs.kucoin.com/#cancel-all-orders .. code:: python res = client.cancel_all_orders() :returns: ApiResponse .. code:: python { "cancelledOrderIds": [ "5bd6e9286d99522a52e458de" ] } :raises: KucoinResponseException, KucoinAPIException
4.402567
7.311255
0.602163
data = {} if symbol: data['symbol'] = symbol if status: data['status'] = status if side: data['side'] = side if order_type: data['type'] = order_type if start: data['startAt'] = start if end: data['endAt'] = end if page: data['page'] = page if limit: data['pageSize'] = limit return self._get('orders', True, data=data)
def get_orders(self, symbol=None, status=None, side=None, order_type=None, start=None, end=None, page=None, limit=None)
Get list of orders https://docs.kucoin.com/#list-orders :param symbol: (optional) Name of symbol e.g. KCS-BTC :type symbol: string :param status: (optional) Specify status active or done (default done) :type status: string :param side: (optional) buy or sell :type side: string :param order_type: (optional) limit, market, limit_stop or market_stop :type order_type: string :param start: (optional) Start time as unix timestamp :type start: string :param end: (optional) End time as unix timestamp :type end: string :param page: (optional) Page to fetch :type page: int :param limit: (optional) Number of orders :type limit: int .. code:: python orders = client.get_orders(symbol='KCS-BTC', status='active') :returns: ApiResponse .. code:: python { "currentPage": 1, "pageSize": 1, "totalNum": 153408, "totalPage": 153408, "items": [ { "id": "5c35c02703aa673ceec2a168", "symbol": "BTC-USDT", "opType": "DEAL", "type": "limit", "side": "buy", "price": "10", "size": "2", "funds": "0", "dealFunds": "0.166", "dealSize": "2", "fee": "0", "feeCurrency": "USDT", "stp": "", "stop": "", "stopTriggered": false, "stopPrice": "0", "timeInForce": "GTC", "postOnly": false, "hidden": false, "iceberge": false, "visibleSize": "0", "cancelAfter": 0, "channel": "IOS", "clientOid": null, "remark": null, "tags": null, "isActive": false, "cancelExist": false, "createdAt": 1547026471000 } ] } :raises: KucoinResponseException, KucoinAPIException
1.571115
1.79191
0.876782
data = {} if symbol: data['symbol'] = symbol if side: data['side'] = side if start: data['startAt'] = start if end: data['endAt'] = end if page: data['page'] = page if limit: data['pageSize'] = limit return self._get('hist-orders', True, data=data)
def get_historical_orders(self, symbol=None, side=None, start=None, end=None, page=None, limit=None)
List of KuCoin V1 historical orders. https://docs.kucoin.com/#get-v1-historical-orders-list :param symbol: (optional) Name of symbol e.g. KCS-BTC :type symbol: string :param side: (optional) buy or sell :type side: string :param start: (optional) Start time as unix timestamp :type start: string :param end: (optional) End time as unix timestamp :type end: string :param page: (optional) Page to fetch :type page: int :param limit: (optional) Number of orders :type limit: int .. code:: python orders = client.get_historical_orders(symbol='KCS-BTC') :returns: ApiResponse .. code:: python { "currentPage": 1, "pageSize": 50, "totalNum": 1, "totalPage": 1, "items": [ { "symbol": "SNOV-ETH", "dealPrice": "0.0000246", "dealValue": "0.018942", "amount": "770", "fee": "0.00001137", "side": "sell", "createdAt": 1540080199 } ] } :raises: KucoinResponseException, KucoinAPIException
1.905988
2.294173
0.830796
data = {} if order_id: data['orderId'] = order_id if symbol: data['symbol'] = symbol if side: data['side'] = side if order_type: data['type'] = order_type if start: data['startAt'] = start if end: data['endAt'] = end if page: data['page'] = page if limit: data['pageSize'] = limit return self._get('fills', True, data=data)
def get_fills(self, order_id=None, symbol=None, side=None, order_type=None, start=None, end=None, page=None, limit=None)
Get a list of recent fills. https://docs.kucoin.com/#list-fills :param order_id: (optional) generated order id :type order_id: string :param symbol: (optional) Name of symbol e.g. KCS-BTC :type symbol: string :param side: (optional) buy or sell :type side: string :param order_type: (optional) limit, market, limit_stop or market_stop :type order_type: string :param start: Start time as unix timestamp (optional) :type start: string :param end: End time as unix timestamp (optional) :type end: string :param page: optional - Page to fetch :type page: int :param limit: optional - Number of orders :type limit: int .. code:: python fills = client.get_fills() :returns: ApiResponse .. code:: python { "currentPage":1, "pageSize":1, "totalNum":251915, "totalPage":251915, "items":[ { "symbol":"BTC-USDT", "tradeId":"5c35c02709e4f67d5266954e", "orderId":"5c35c02703aa673ceec2a168", "counterOrderId":"5c1ab46003aa676e487fa8e3", "side":"buy", "liquidity":"taker", "forceTaker":true, "price":"0.083", "size":"0.8424304", "funds":"0.0699217232", "fee":"0", "feeRate":"0", "feeCurrency":"USDT", "stop":"", "type":"limit", "createdAt":1547026472000 } ] } :raises: KucoinResponseException, KucoinAPIException
1.565922
1.771248
0.884078
data = {} tick_path = 'market/allTickers' if symbol is not None: tick_path = 'market/orderbook/level1' data = { 'symbol': symbol } return self._get(tick_path, False, data=data)
def get_ticker(self, symbol=None)
Get symbol tick https://docs.kucoin.com/#get-ticker :param symbol: (optional) Name of symbol e.g. KCS-BTC :type symbol: string .. code:: python all_ticks = client.get_ticker() ticker = client.get_ticker('ETH-BTC') :returns: ApiResponse .. code:: python { "sequence": "1545825031840", # now sequence "price": "3494.367783", # last trade price "size": "0.05027185", # last trade size "bestBid": "3494.367783", # best bid price "bestBidSize": "2.60323254", # size at best bid price "bestAsk": "3499.12", # best ask price "bestAskSize": "0.01474011" # size at best ask price } :raises: KucoinResponseException, KucoinAPIException
4.63037
5.375091
0.86145
data = {} if base is not None: data['base'] = base if symbol is not None: data['currencies'] = symbol return self._get('prices', False, data=data)
def get_fiat_prices(self, base=None, symbol=None)
Get fiat price for currency https://docs.kucoin.com/#get-fiat-price :param base: (optional) Fiat,eg.USD,EUR, default is USD. :type base: string :param symbol: (optional) Cryptocurrencies.For multiple cyrptocurrencies, please separate them with comma one by one. default is all :type symbol: string .. code:: python prices = client.get_fiat_prices() :returns: ApiResponse .. code:: python { "BTC": "3911.28000000", "ETH": "144.55492453", "LTC": "48.45888179", "KCS": "0.45546856" } :raises: KucoinResponseException, KucoinAPIException
3.563463
4.708715
0.75678
data = { 'symbol': symbol } return self._get('market/stats', False, data=data)
def get_24hr_stats(self, symbol)
Get 24hr stats for a symbol. Volume is in base currency units. open, high, low are in quote currency units. :param symbol: (optional) Name of symbol e.g. KCS-BTC :type symbol: string .. code:: python stats = client.get_24hr_stats('ETH-BTC') :returns: ApiResponse Without a symbol param .. code:: python { "symbol": "BTC-USDT", "changeRate": "0.0128", # 24h change rate "changePrice": "0.8", # 24h rises and falls in price (if the change rate is a negative number, # the price rises; if the change rate is a positive number, the price falls.) "open": 61, # Opening price "close": 63.6, # Closing price "high": "63.6", # Highest price filled "low": "61", # Lowest price filled "vol": "244.78", # Transaction quantity "volValue": "15252.0127" # Transaction amount } :raises: KucoinResponseException, KucoinAPIException
6.967729
7.346668
0.94842
data = { 'symbol': symbol } return self._get('market/orderbook/level2_100', False, data=data)
def get_order_book(self, symbol)
Get a list of bids and asks aggregated by price for a symbol. Returns up to 100 depth each side. Fastest Order book API https://docs.kucoin.com/#get-part-order-book-aggregated :param symbol: Name of symbol e.g. KCS-BTC :type symbol: string .. code:: python orders = client.get_order_book('KCS-BTC') :returns: ApiResponse .. code:: python { "sequence": "3262786978", "bids": [ ["6500.12", "0.45054140"], # [price, size] ["6500.11", "0.45054140"] ], "asks": [ ["6500.16", "0.57753524"], ["6500.15", "0.57753524"] ] } :raises: KucoinResponseException, KucoinAPIException
7.449246
7.740986
0.962312
data = { 'symbol': symbol } return self._get('market/orderbook/level2', False, data=data)
def get_full_order_book(self, symbol)
Get a list of all bids and asks aggregated by price for a symbol. This call is generally used by professional traders because it uses more server resources and traffic, and Kucoin has strict access frequency control. https://docs.kucoin.com/#get-full-order-book-aggregated :param symbol: Name of symbol e.g. KCS-BTC :type symbol: string .. code:: python orders = client.get_order_book('KCS-BTC') :returns: ApiResponse .. code:: python { "sequence": "3262786978", "bids": [ ["6500.12", "0.45054140"], # [price size] ["6500.11", "0.45054140"] ], "asks": [ ["6500.16", "0.57753524"], ["6500.15", "0.57753524"] ] } :raises: KucoinResponseException, KucoinAPIException
6.680698
7.12037
0.938252
data = { 'symbol': symbol } return self._get('market/orderbook/level3', False, data=data)
def get_full_order_book_level3(self, symbol)
Get a list of all bids and asks non-aggregated for a symbol. This call is generally used by professional traders because it uses more server resources and traffic, and Kucoin has strict access frequency control. https://docs.kucoin.com/#get-full-order-book-atomic :param symbol: Name of symbol e.g. KCS-BTC :type symbol: string .. code:: python orders = client.get_order_book('KCS-BTC') :returns: ApiResponse .. code:: python { "sequence": "1545896707028", "bids": [ [ "5c2477e503aa671a745c4057", # orderId "6", # price "0.999" # size ], [ "5c2477e103aa671a745c4054", "5", "0.999" ] ], "asks": [ [ "5c24736703aa671a745c401e", "200", "1" ], [ "5c2475c903aa671a745c4033", "201", "1" ] ] } :raises: KucoinResponseException, KucoinAPIException
5.715412
7.432422
0.768984
data = { 'symbol': symbol } return self._get('market/histories', False, data=data)
def get_trade_histories(self, symbol)
List the latest trades for a symbol https://docs.kucoin.com/#get-trade-histories :param symbol: Name of symbol e.g. KCS-BTC :type symbol: string .. code:: python orders = client.get_trade_histories('KCS-BTC') :returns: ApiResponse .. code:: python [ { "sequence": "1545896668571", "price": "0.07", # Filled price "size": "0.004", # Filled amount "side": "buy", # Filled side. The filled side is set to the taker by default. "time": 1545904567062140823 # Transaction time }, { "sequence": "1545896668578", "price": "0.054", "size": "0.066", "side": "buy", "time": 1545904581619888405 } ] :raises: KucoinResponseException, KucoinAPIException
6.900657
9.334304
0.739279
data = { 'symbol': symbol } if kline_type is not None: data['type'] = kline_type if start is not None: data['startAt'] = start else: data['startAt'] = calendar.timegm(datetime.utcnow().date().timetuple()) if end is not None: data['endAt'] = end else: data['endAt'] = int(time.time()) return self._get('market/candles', False, data=data)
def get_kline_data(self, symbol, kline_type='5min', start=None, end=None)
Get kline data For each query, the system would return at most 1500 pieces of data. To obtain more data, please page the data by time. :param symbol: Name of symbol e.g. KCS-BTC :type symbol: string :param kline_type: type of symbol, type of candlestick patterns: 1min, 3min, 5min, 15min, 30min, 1hour, 2hour, 4hour, 6hour, 8hour, 12hour, 1day, 1week :type kline_type: string :param start: Start time as unix timestamp (optional) default start of day in UTC :type start: int :param end: End time as unix timestamp (optional) default now in UTC :type end: int https://docs.kucoin.com/#get-historic-rates .. code:: python klines = client.get_kline_data('KCS-BTC', '5min', 1507479171, 1510278278) :returns: ApiResponse .. code:: python [ [ "1545904980", //Start time of the candle cycle "0.058", //opening price "0.049", //closing price "0.058", //highest price "0.049", //lowest price "0.018", //Transaction amount "0.000945" //Transaction volume ], [ "1545904920", "0.058", "0.072", "0.072", "0.058", "0.103", "0.006986" ] ] :raises: KucoinResponseException, KucoinAPIException
2.315414
2.447505
0.946031
path = 'bullet-public' signed = private if private: path = 'bullet-private' return self._post(path, signed)
def get_ws_endpoint(self, private=False)
Get websocket channel details :param private: Name of symbol e.g. KCS-BTC :type private: bool https://docs.kucoin.com/#websocket-feed .. code:: python ws_details = client.get_ws_endpoint(private=True) :returns: ApiResponse .. code:: python { "code": "200000", "data": { "instanceServers": [ { "pingInterval": 50000, "endpoint": "wss://push1-v2.kucoin.net/endpoint", "protocol": "websocket", "encrypt": true, "pingTimeout": 10000 } ], "token": "vYNlCtbz4XNJ1QncwWilJnBtmmfe4geLQDUA62kKJsDChc6I4bRDQc73JfIrlFaVYIAE0Gv2--MROnLAgjVsWkcDq_MuG7qV7EktfCEIphiqnlfpQn4Ybg==.IoORVxR2LmKV7_maOR9xOg==" } } :raises: KucoinResponseException, KucoinAPIException
12.101193
13.944833
0.86779
prefix = settings.SESSION_REDIS_PREFIX if not prefix: return session_key return ':'.join([prefix, session_key])
def get_real_stored_key(self, session_key)
Return the real key name in redis storage @return string
4.141675
4.410345
0.939082
try: inputtiles = click.open_file(inputtiles).readlines() except IOError: inputtiles = [inputtiles] # parse the input stream into an array tiles = edge_finder.findedges(inputtiles, parsenames) for t in tiles: click.echo(t.tolist())
def edges(inputtiles, parsenames)
For a stream of [<x>, <y>, <z>] tiles, return only those tiles that are on the edge.
5.638144
5.214674
1.081208
try: inputtiles = click.open_file(inputtiles).readlines() except IOError: inputtiles = [inputtiles] unioned = uniontiles.union(inputtiles, parsenames) for u in unioned: click.echo(json.dumps(u))
def union(inputtiles, parsenames)
Returns the unioned shape of a steeam of [<x>, <y>, <z>] tiles in GeoJSON.
4.017384
3.915717
1.025964
features = [f for f in super_utils.filter_polygons(features)] tiles = burntiles.burn(features, zoom) for t in tiles: click.echo(t.tolist())
def burn(features, sequence, zoom)
Burn a stream of GeoJSONs into a output stream of the tiles they intersect for a given zoom.
10.329443
7.407968
1.394369
max_retries = getattr( settings, 'LOCALIZED_FIELDS_MAX_RETRIES', 100 ) if not hasattr(self, 'retries'): self.retries = 0 with transaction.atomic(): try: return super().save(*args, **kwargs) except IntegrityError as ex: # this is as retarded as it looks, there's no # way we can put the retry logic inside the slug # field class... we can also not only catch exceptions # that apply to slug fields... so yea.. this is as # retarded as it gets... i am sorry :( if 'slug' not in str(ex): raise ex if self.retries >= max_retries: raise ex self.retries += 1 return self.save()
def save(self, *args, **kwargs)
Saves this model instance to the database.
6.376882
6.23652
1.022507
db_value = super().to_python(value) return self._convert_localized_value(db_value)
def to_python(self, value: Union[Dict[str, int], int, None]) -> LocalizedIntegerValue
Converts the value from a database value into a Python value.
5.856948
3.989988
1.467911
# apply default values default_values = LocalizedIntegerValue(self.default) if isinstance(value, LocalizedIntegerValue): for lang_code, _ in settings.LANGUAGES: local_value = value.get(lang_code) if local_value is None: value.set(lang_code, default_values.get(lang_code, None)) prepped_value = super().get_prep_value(value) if prepped_value is None: return None # make sure all values are proper integers for lang_code, _ in settings.LANGUAGES: local_value = prepped_value[lang_code] try: if local_value is not None: int(local_value) except (TypeError, ValueError): raise IntegrityError('non-integer value in column "%s.%s" violates ' 'integer constraint' % (self.name, lang_code)) # convert to a string before saving because the underlying # type is hstore, which only accept strings prepped_value[lang_code] = str(local_value) if local_value is not None else None return prepped_value
def get_prep_value(self, value: LocalizedIntegerValue) -> dict
Gets the value in a format to store into the database.
2.843728
2.767096
1.027694
defaults = { 'form_class': LocalizedIntegerFieldForm } defaults.update(kwargs) return super().formfield(**defaults)
def formfield(self, **kwargs)
Gets the form field associated with this field.
4.620378
4.502327
1.02622
integer_values = {} for lang_code, _ in settings.LANGUAGES: local_value = value.get(lang_code, None) if local_value is None or local_value.strip() == '': local_value = None try: integer_values[lang_code] = int(local_value) except (ValueError, TypeError): integer_values[lang_code] = None return LocalizedIntegerValue(integer_values)
def _convert_localized_value(value: LocalizedValue) -> LocalizedIntegerValue
Converts from :see:LocalizedValue to :see:LocalizedIntegerValue.
2.443529
2.361643
1.034673
localized_value = self.value_class() for (lang_code, _), value in zip(settings.LANGUAGES, value): localized_value.set(lang_code, value) return localized_value
def compress(self, value: List[str]) -> value_class
Compresses the values from individual fields into a single :see:LocalizedValue instance. Arguments: value: The values from all the widgets. Returns: A :see:LocalizedValue containing all the value in several languages.
4.808717
4.037349
1.191058
if initial is None: initial = [None for x in range(0, len(value))] else: if not isinstance(initial, list): initial = self.widget.decompress(initial) clean_data = [] errors = [] if not value or isinstance(value, (list, tuple)): if (not value or not [v for v in value if v not in self.empty_values]) \ and (not initial or not [v for v in initial if v not in self.empty_values]): if self.required: raise ValidationError(self.error_messages['required'], code='required') else: raise ValidationError(self.error_messages['invalid'], code='invalid') for i, field in enumerate(self.fields): try: field_value = value[i] except IndexError: field_value = None try: field_initial = initial[i] except IndexError: field_initial = None if field_value in self.empty_values and \ field_initial in self.empty_values: if self.require_all_fields: # Raise a 'required' error if the MultiValueField is # required and any field is empty. if self.required: raise ValidationError(self.error_messages['required'], code='required') elif field.required: # Otherwise, add an 'incomplete' error to the list of # collected errors and skip field cleaning, if a required # field is empty. if field.error_messages['incomplete'] not in errors: errors.append(field.error_messages['incomplete']) continue try: clean_data.append(field.clean(field_value, field_initial)) except ValidationError as e: # Collect all validation errors in a single list, which we'll # raise at the end of clean(), rather than raising a single # exception for the first error we encounter. Skip duplicates. errors.extend(m for m in e.error_list if m not in errors) if errors: raise ValidationError(errors) out = self.compress(clean_data) self.validate(out) self.run_validators(out) return out
def clean(self, value, initial=None)
Most part of this method is a copy of django.forms.MultiValueField.clean, with the exception of initial value handling (this need for correct processing FileField's). All original comments saved.
2.546878
2.377454
1.071263
defaults = { 'form_class': LocalizedTextFieldForm } defaults.update(kwargs) return super().formfield(**defaults)
def formfield(self, **kwargs)
Gets the form field associated with this field.
4.637195
4.446421
1.042905
defaults = { 'form_class': forms.CharField, 'required': False } defaults.update(kwargs) form_field = super().formfield(**defaults) form_field.widget = forms.HiddenInput() return form_field
def formfield(self, **kwargs)
Gets the form field associated with this field. Because this is a slug field which is automatically populated, it should be hidden from the form.
2.609689
2.403071
1.085981
slugs = LocalizedValue() for lang_code, value in self._get_populate_values(instance): if not value: continue if self.include_time: value += '-%s' % datetime.now().microsecond def is_unique(slug: str, language: str) -> bool: unique_filter = { '%s__%s' % (self.name, language): slug } return not type(instance).objects.filter(**unique_filter).exists() slug = self._make_unique_slug( slugify(value, allow_unicode=True), lang_code, is_unique ) slugs.set(lang_code, slug) setattr(instance, self.name, slugs) return slugs
def pre_save(self, instance, add: bool)
Ran just before the model is saved, allows us to built the slug. Arguments: instance: The model that is being saved. add: Indicates whether this is a new entry to the database or an update.
4.558944
4.251289
1.072367
index = 1 unique_slug = slug while not is_unique(unique_slug, language): unique_slug = '%s-%d' % (slug, index) index += 1 return unique_slug
def _make_unique_slug(slug: str, language: str, is_unique: Callable[[str], bool]) -> str
Guarentees that the specified slug is unique by appending a number until it is unique. Arguments: slug: The slug to make unique. is_unique: Function that can be called to verify whether the generate slug is unique. Returns: A guarenteed unique slug.
2.94906
2.901889
1.016255
return [ ( lang_code, self._get_populate_from_value( instance, self.populate_from, lang_code ), ) for lang_code, _ in settings.LANGUAGES ]
def _get_populate_values(self, instance) -> Tuple[str, str]
Gets all values (for each language) from the specified's instance's `populate_from` field. Arguments: instance: The instance to get the values from. Returns: A list of (lang_code, value) tuples.
4.715126
3.61191
1.305438
if callable(field_name): return field_name(instance) def get_field_value(name): value = resolve_object_property(instance, name) with translation.override(language): return str(value) if isinstance(field_name, tuple) or isinstance(field_name, list): value = '-'.join([ value for value in [get_field_value(name) for name in field_name] if value ]) return value return get_field_value(field_name)
def _get_populate_from_value(instance, field_name: Union[str, Tuple[str]], language: str)
Gets the value to create a slug from in the specified language. Arguments: instance: The model that the field resides on. field_name: The name of the field to generate a slug for. language: The language to generate the slug for. Returns: The text to generate a slug for.
2.846758
2.767325
1.028704
name, path, args, kwargs = super( LocalizedUniqueSlugField, self).deconstruct() kwargs['populate_from'] = self.populate_from kwargs['include_time'] = self.include_time return name, path, args, kwargs
def deconstruct(self)
Deconstructs the field into something the database can store.
5.710166
5.079174
1.124231
if not isinstance(instance, AtomicSlugRetryMixin): raise ImproperlyConfigured(( 'Model \'%s\' does not inherit from AtomicSlugRetryMixin. ' 'Without this, the LocalizedUniqueSlugField will not work.' ) % type(instance).__name__) slugs = LocalizedValue() for lang_code, value in self._get_populate_values(instance): if not value: continue slug = slugify(value, allow_unicode=True) # verify whether it's needed to re-generate a slug, # if not, re-use the same slug if instance.pk is not None: current_slug = getattr(instance, self.name).get(lang_code) if current_slug is not None: stripped_slug = current_slug[0:current_slug.rfind('-')] if slug == stripped_slug: slugs.set(lang_code, current_slug) continue if self.include_time: slug += '-%d' % datetime.now().microsecond retries = getattr(instance, 'retries', 0) if retries > 0: # do not add another - if we already added time if not self.include_time: slug += '-' slug += '%d' % retries slugs.set(lang_code, slug) setattr(instance, self.name, slugs) return slugs
def pre_save(self, instance, add: bool)
Ran just before the model is saved, allows us to built the slug. Arguments: instance: The model that is being saved. add: Indicates whether this is a new entry to the database or an update. Returns: The localized slug that was generated.
4.252834
3.915415
1.086177
super(LocalizedField, self).contribute_to_class(model, name, **kwargs) setattr(model, self.name, self.descriptor_class(self))
def contribute_to_class(self, model, name, **kwargs)
Adds this field to the specifed model. Arguments: cls: The model to add the field to. name: The name of the field to add.
2.925305
5.047872
0.579513
if not value: if getattr(settings, 'LOCALIZED_FIELDS_EXPERIMENTAL', False): return None else: return cls.attr_class() # we can get a list if an aggregation expression was used.. # if we the expression was flattened when only one key was selected # then we don't wrap each value in a localized value, otherwise we do if isinstance(value, list): result = [] for inner_val in value: if isinstance(inner_val, dict): if inner_val is None: result.append(None) else: result.append(cls.attr_class(inner_val)) else: result.append(inner_val) return result # this is for when you select an individual key, it will be string, # not a dictionary, we'll give it to you as a flat value, not as a # localized value instance if not isinstance(value, dict): return value return cls.attr_class(value)
def from_db_value(cls, value, *_) -> Optional[LocalizedValue]
Turns the specified database value into its Python equivalent. Arguments: value: The value that is stored in the database and needs to be converted to its Python equivalent. Returns: A :see:LocalizedValue instance containing the data extracted from the database.
5.079845
5.291459
0.960008
# first let the base class handle the deserialization, this is in case we # get specified a json string representing a dict try: deserialized_value = super(LocalizedField, self).to_python(value) except json.JSONDecodeError: deserialized_value = value if not deserialized_value: return self.attr_class() return self.attr_class(deserialized_value)
def to_python(self, value: Union[dict, str, None]) -> LocalizedValue
Turns the specified database value into its Python equivalent. Arguments: value: The value that is stored in the database and needs to be converted to its Python equivalent. Returns: A :see:LocalizedValue instance containing the data extracted from the database.
5.704823
6.141047
0.928966
if isinstance(value, dict): value = LocalizedValue(value) # default to None if this is an unknown type if not isinstance(value, LocalizedValue) and value: value = None if value: cleaned_value = self.clean(value) self.validate(cleaned_value) else: cleaned_value = value return super(LocalizedField, self).get_prep_value( cleaned_value.__dict__ if cleaned_value else None )
def get_prep_value(self, value: LocalizedValue) -> dict
Turns the specified value into something the database can store. If an illegal value (non-LocalizedValue instance) is specified, we'll treat it as an empty :see:LocalizedValue instance, on which the validation will fail. Dictonaries are converted into :see:LocalizedValue instances. Arguments: value: The :see:LocalizedValue instance to serialize into a data type that the database can understand. Returns: A dictionary containing a key for every language, extracted from the specified value.
3.32511
3.436574
0.967565
if not value or not isinstance(value, LocalizedValue): return None # are any of the language fiels None/empty? is_all_null = True for lang_code, _ in settings.LANGUAGES: if value.get(lang_code) is not None: is_all_null = False break # all fields have been left empty and we support # null values, let's return null to represent that if is_all_null and self.null: return None return value
def clean(self, value, *_)
Cleans the specified value into something we can store in the database. For example, when all the language fields are left empty, and the field is allowed to be null, we will store None instead of empty keys. Arguments: value: The value to clean. Returns: The cleaned value, ready for database storage.
5.728679
4.693508
1.220554
if self.null: return for lang in self.required: lang_val = getattr(value, settings.LANGUAGE_CODE) if lang_val is None: raise IntegrityError('null value in column "%s.%s" violates ' 'not-null constraint' % (self.name, lang))
def validate(self, value: LocalizedValue, *_)
Validates that the values has been filled in for all required languages Exceptions are raises in order to notify the user of invalid values. Arguments: value: The value to validate.
6.334648
5.448809
1.162575
defaults = dict( form_class=LocalizedFieldForm, required=False if self.blank else self.required ) defaults.update(kwargs) return super().formfield(**defaults)
def formfield(self, **kwargs)
Gets the form field associated with this field.
4.242875
3.982534
1.065371
language = language or settings.LANGUAGE_CODE value = super().get(language, default) return value if value is not None else default
def get(self, language: str=None, default: str=None) -> str
Gets the underlying value in the specified or primary language. Arguments: language: The language to get the value in. Returns: The value in the current language, or the primary language in case no language was specified.
4.990756
6.340382
0.787138
self[language] = value self.__dict__.update(self) return self
def set(self, language: str, value: str)
Sets the value in the specified language. Arguments: language: The language to set the value in. value: The value to set.
13.273277
10.84051
1.224414
path = 'localized_fields.value.%s' % self.__class__.__name__ return path, [self.__dict__], {}
def deconstruct(self) -> dict
Deconstructs this value into a primitive type. Returns: A dictionary with all the localized values contained in this instance.
22.351763
15.125979
1.477707
for lang_code, _ in settings.LANGUAGES: self.set(lang_code, self.default_value) if isinstance(value, str): self.set(settings.LANGUAGE_CODE, value) elif isinstance(value, dict): for lang_code, _ in settings.LANGUAGES: lang_value = value.get(lang_code, self.default_value) self.set(lang_code, lang_value) elif isinstance(value, collections.Iterable): for val in value: self._interpret_value(val)
def _interpret_value(self, value)
Interprets a value passed in the constructor as a :see:LocalizedValue. If string: Assumes it's the default language. If dict: Each key is a language and the value a string in that language. If list: Recurse into to apply rules above. Arguments: value: The value to interpret.
2.229136
2.189857
1.017937
fallbacks = getattr(settings, 'LOCALIZED_FIELDS_FALLBACKS', {}) language = translation.get_language() or settings.LANGUAGE_CODE languages = fallbacks.get(language, [settings.LANGUAGE_CODE])[:] languages.insert(0, language) for lang_code in languages: value = self.get(lang_code) if value: return value or None return None
def translate(self) -> Optional[str]
Gets the value in the current language or falls back to the next language if there's no value in the current language.
3.896932
3.362564
1.158917
value = super().translate() if value is None or (isinstance(value, str) and value.strip() == ''): return None return int(value)
def translate(self)
Gets the value in the current language, or in the configured fallbck language.
4.938655
5.222038
0.945733
if isinstance(value, LocalizedValue): prep_value = LocalizedValue() for k, v in value.__dict__.items(): if v is None: prep_value.set(k, '') else: # Need to convert File objects provided via a form to # unicode for database insertion prep_value.set(k, six.text_type(v)) return super().get_prep_value(prep_value) return super().get_prep_value(value)
def get_prep_value(self, value)
Returns field's value prepared for saving into a database.
4.079802
3.86952
1.054343
value = super().pre_save(model_instance, add) if isinstance(value, LocalizedValue): for file in value.__dict__.values(): if file and not file._committed: file.save(file.name, file, save=False) return value
def pre_save(self, model_instance, add)
Returns field's value just before saving.
4.455044
4.129945
1.078718
value = obj for path_part in path.split('.'): value = getattr(value, path_part) return value
def resolve_object_property(obj, path: str)
Resolves the value of a property on an object. Is able to resolve nested properties. For example, a path can be specified: 'other.beer.name' Raises: AttributeError: In case the property could not be resolved. Returns: The value of the specified property.
4.281349
5.637391
0.759456
result = [] for lang_code, _ in settings.LANGUAGES: if value: result.append(value.get(lang_code)) else: result.append(None) return result
def decompress(self, value: LocalizedValue) -> List[str]
Decompresses the specified value so it can be spread over the internal widgets. Arguments: value: The :see:LocalizedValue to display in this widget. Returns: All values to display in the inner widgets.
3.312818
3.824825
0.866136
defaults = { 'form_class': LocalizedCharFieldForm } defaults.update(kwargs) return super().formfield(**defaults)
def formfield(self, **kwargs)
Gets the form field associated with this field.
4.259529
4.186786
1.017374
localized_value = getattr(instance, self.attname) if not localized_value: return None for lang_code, _ in settings.LANGUAGES: value = localized_value.get(lang_code) if not value: continue localized_value.set( lang_code, bleach.clean(value, **get_bleach_default_options()) ) return localized_value
def pre_save(self, instance, add: bool)
Ran just before the model is saved, allows us to built the slug. Arguments: instance: The model that is being saved. add: Indicates whether this is a new entry to the database or an update.
3.554769
3.595021
0.988803
''' Method to show a CLI based confirmation message, waiting for a yes/no answer. "what" and "where" are used to better define the message. ''' ans = input('Are you sure you want to delete the ' '{} {} from the service?\n[yN]> '.format(what, where)) if 'y' in ans: ans = loop_input('Are you really sure? there\'s no coming back!\n' '[type \'burn!\' to proceed]> ') if 'burn!' != ans: return False else: return False return True
def confirm(what, where)
Method to show a CLI based confirmation message, waiting for a yes/no answer. "what" and "where" are used to better define the message.
9.890639
5.864684
1.686474
def decorate(klass): log.debug('Loading service module class: {}'.format(klass.__name__) ) klass.command = repo_cmd klass.name = repo_service RepositoryService.service_map[repo_service] = klass RepositoryService.command_map[repo_cmd] = repo_service return klass return decorate
def register_target(repo_cmd, repo_service)
Decorator to register a class with an repo_service
4.540159
3.985604
1.13914
'''Accessor for a repository given a command :param repository: git-python repository instance :param command: aliased name of the service :return: instance for using the service ''' if not repository: config = git_config.GitConfigParser(cls.get_config_path()) else: config = repository.config_reader() target = cls.command_map.get(command, command) conf_section = list(filter(lambda n: 'gitrepo' in n and target in n, config.sections())) http_section = [config._sections[scheme] for scheme in ('http', 'https') if scheme in config.sections()] # check configuration constraints if len(conf_section) == 0: if not target: raise ValueError('Service {} unknown'.format(target)) else: config = dict() elif len(conf_section) > 1: raise ValueError('Too many configurations for service {}'.format(target)) # get configuration section as a dict else: config = config._sections[conf_section[0]] if target in cls.service_map: service = cls.service_map.get(target, cls) service.name = target else: if 'type' not in config: raise ValueError('Missing service type for custom service.') if config['type'] not in cls.service_map: raise ValueError('Service type {} does not exists.'.format(config['type'])) service = cls.service_map.get(config['type'], cls) cls._current = service(repository, config, http_section) return cls._current
def get_service(cls, repository, command)
Accessor for a repository given a command :param repository: git-python repository instance :param command: aliased name of the service :return: instance for using the service
3.986963
3.253645
1.225384
'''format the repository's URL :param repository: name of the repository :param namespace: namespace of the repository :param rw: return a git+ssh URL if true, an https URL otherwise :return: the full URI of the repository ready to use as remote if namespace is not given, repository is expected to be of format `<namespace>/<repository>`. ''' repo = repository if namespace: repo = '{}/{}'.format(namespace, repository) if not rw and repo.count('/') >= self._min_nested_namespaces: return '{}/{}'.format(self.url_ro, repo) elif rw and repo.count('/') >= self._min_nested_namespaces: if self.url_rw.startswith('ssh://'): return '{}/{}'.format(self.url_rw, repo) else: return '{}:{}'.format(self.url_rw, repo) else: raise ArgumentError("Cannot tell how to handle this url: `{}/{}`!".format(namespace, repo))
def format_path(self, repository, namespace=None, rw=False)
format the repository's URL :param repository: name of the repository :param namespace: namespace of the repository :param rw: return a git+ssh URL if true, an https URL otherwise :return: the full URI of the repository ready to use as remote if namespace is not given, repository is expected to be of format `<namespace>/<repository>`.
4.482845
2.491673
1.79913
'''Pull a repository :param remote: git-remote instance :param branch: name of the branch to pull ''' pb = ProgressBar() pb.setup(self.name) if branch: remote.pull(branch, progress=pb) else: # pragma: no cover remote.pull(progress=pb) print()
def pull(self, remote, branch=None)
Pull a repository :param remote: git-remote instance :param branch: name of the branch to pull
4.875442
3.790824
1.286117
'''Push a repository :param remote: git-remote instance :param branch: name of the branch to push :return: PushInfo, git push output lines ''' pb = ProgressBar() pb.setup(self.name, ProgressBar.Action.PUSH) if branch: result = remote.push(branch, progress=pb) else: #pragma: no cover result = remote.push(progress=pb) print() return result, pb.other_lines
def push(self, remote, branch=None)
Push a repository :param remote: git-remote instance :param branch: name of the branch to push :return: PushInfo, git push output lines
6.26497
4.09022
1.531695
'''Pull a repository :param remote: git-remote instance :param branch: name of the branch to pull ''' pb = ProgressBar() pb.setup(self.name) if local_branch: branch = ':'.join([branch, local_branch]) remote.fetch(branch, update_head_ok=True, force=force, progress=pb) print()
def fetch(self, remote, branch, local_branch = None, force=False)
Pull a repository :param remote: git-remote instance :param branch: name of the branch to pull
6.123845
4.619765
1.325575
'''Clones a new repository :param user: namespace of the repository :param repo: name slug of the repository :Param branch: branch to pull as tracking This command is fairly simple, and pretty close to the real `git clone` command, except it does not take a full path, but just a namespace/slug path for a given service. ''' log.info('Cloning {}…'.format(repo)) project = self.get_repository(user, repo) if not branch: branch = self.get_project_default_branch(project) is_empty = self.is_repository_empty(project) if is_empty: self.repository.init() else: url = self.get_parent_project_url(user, repo, rw=rw) if url: parent_user, parent_project = self.convert_url_into_slug(url).split('/') self.add(user=parent_user, repo=parent_project, name='upstream', alone=True) remote, *_ = self.add(user=user, repo=repo, tracking=True, rw=rw) if not is_empty: self.pull(remote, branch)
def clone(self, user, repo, branch=None, rw=True)
Clones a new repository :param user: namespace of the repository :param repo: name slug of the repository :Param branch: branch to pull as tracking This command is fairly simple, and pretty close to the real `git clone` command, except it does not take a full path, but just a namespace/slug path for a given service.
5.932664
3.034231
1.955245