code
string | signature
string | docstring
string | loss_without_docstring
float64 | loss_with_docstring
float64 | factor
float64 |
---|---|---|---|---|---|
x, y = map(np.ravel, np.broadcast_arrays(x, y))
if N is None:
N = int(np.max(x) + 0.5 * M + 1)
# Now use legendre polynomial weights to populate the results array;
# This is an efficient recursive implementation (See Press et al. 1989)
result = np.zeros(N, dtype=y.dtype)
# first take care of the easy cases where x is an integer
integers = (x % 1 == 0)
np.add.at(result, x[integers].astype(int), y[integers])
x, y = x[~integers], y[~integers]
# For each remaining x, find the index describing the extirpolation range.
# i.e. ilo[i] < x[i] < ilo[i] + M with x[i] in the center,
# adjusted so that the limits are within the range 0...N
ilo = np.clip((x - M // 2).astype(int), 0, N - M)
numerator = y * np.prod(x - ilo - np.arange(M)[:, np.newaxis], 0)
denominator = factorial(M - 1)
for j in range(M):
if j > 0:
denominator *= j / (j - M)
ind = ilo + (M - 1 - j)
np.add.at(result, ind, numerator / (denominator * (x - ind)))
return result
|
def extirpolate(x, y, N=None, M=4)
|
Extirpolate the values (x, y) onto an integer grid range(N),
using lagrange polynomial weights on the M nearest points.
Parameters
----------
x : array_like
array of abscissas
y : array_like
array of ordinates
N : int
number of integer bins to use. For best performance, N should be larger
than the maximum of x
M : int
number of adjoining points on which to extirpolate.
Returns
-------
yN : ndarray
N extirpolated values associated with range(N)
Example
-------
>>> rng = np.random.RandomState(0)
>>> x = 100 * rng.rand(20)
>>> y = np.sin(x)
>>> y_hat = extirpolate(x, y)
>>> x_hat = np.arange(len(y_hat))
>>> f = lambda x: np.sin(x / 10)
>>> np.allclose(np.sum(y * f(x)), np.sum(y_hat * f(x_hat)))
True
Notes
-----
This code is based on the C implementation of spread() presented in
Numerical Recipes in C, Second Edition (Press et al. 1989; p.583).
| 4.363871 | 4.253479 | 1.025953 |
df *= freq_factor
f0 *= freq_factor
assert df > 0
t, h = map(np.ravel, np.broadcast_arrays(t, h))
if use_fft:
Mfft = int(Mfft)
assert(Mfft > 0)
# required size of fft is the power of 2 above the oversampling rate
Nfft = bitceil(N * oversampling)
t0 = t.min()
if f0 > 0:
h = h * np.exp(2j * np.pi * f0 * (t - t0))
tnorm = ((t - t0) * Nfft * df) % Nfft
grid = extirpolate(tnorm, h, Nfft, Mfft)
fftgrid = np.fft.ifft(grid)
if t0 != 0:
f = f0 + df * np.arange(Nfft)
fftgrid *= np.exp(2j * np.pi * t0 * f)
fftgrid = fftgrid[:N]
C = Nfft * fftgrid.real
S = Nfft * fftgrid.imag
else:
f = f0 + df * np.arange(N)
C = np.dot(h, np.cos(2 * np.pi * f * t[:, np.newaxis]))
S = np.dot(h, np.sin(2 * np.pi * f * t[:, np.newaxis]))
return S, C
|
def trig_sum(t, h, df, N, f0=0, freq_factor=1,
oversampling=5, use_fft=True, Mfft=4)
|
Compute (approximate) trigonometric sums for a number of frequencies
This routine computes weighted sine and cosine sums:
S_j = sum_i { h_i * sin(2 pi * f_j * t_i) }
C_j = sum_i { h_i * cos(2 pi * f_j * t_i) }
Where f_j = freq_factor * (f0 + j * df) for the values j in 1 ... N.
The sums can be computed either by a brute force O[N^2] method, or
by an FFT-based O[Nlog(N)] method.
Parameters
----------
t : array_like
array of input times
h : array_like
array weights for the sum
df : float
frequency spacing
N : int
number of frequency bins to return
f0 : float (optional, default=0)
The low frequency to use
freq_factor : float (optional, default=1)
Factor which multiplies the frequency
use_fft : bool
if True, use the approximate FFT algorithm to compute the result.
This uses the FFT with Press & Rybicki's Lagrangian extirpolation.
oversampling : int (default = 5)
oversampling freq_factor for the approximation; roughtly the number of
time samples across the highest-frequency sinusoid. This parameter
contains the tradeoff between accuracy and speed. Not referenced
if use_fft is False.
Mfft : int
The number of adjacent points to use in the FFT approximation.
Not referenced if use_fft is False.
Returns
-------
S, C : ndarrays
summation arrays for frequencies f = df * np.arange(1, N + 1)
| 3.190417 | 2.946413 | 1.082814 |
if band not in 'ugriz':
raise ValueError("band='{0}' not recognized".format(band))
i = 'ugriz'.find(band)
t, y, dy = self.lcdata.get_lightcurve(self.lcid, return_1d=False)
if corrected:
ext = self.obsmeta['rExt'] * self.ext_correction[band]
else:
ext = 0
return t[:, i], y[:, i] - ext, dy[:, i]
|
def observed(self, band, corrected=True)
|
Return observed values in the given band
Parameters
----------
band : str
desired bandpass: should be one of ['u', 'g', 'r', 'i', 'z']
corrected : bool (optional)
If true, correct for extinction
Returns
-------
t, mag, dmag : ndarrays
The times, magnitudes, and magnitude errors for the specified band.
| 5.82514 | 5.333477 | 1.092184 |
t = np.asarray(t)
num = self.meta[band + 'T']
mu = self.meta[band + '0']
amp = self.meta[band + 'A']
t0 = self.meta[band + 'E']
# if there are nans or infinities, mask them
bad_vals = np.isnan(t) | np.isinf(t)
t[bad_vals] = t0
if corrected:
ext = 0
else:
ext = self.obsmeta['rExt'] * self.ext_correction[band]
func = self._template_func(num, band, mu + ext, amp)
mag = func(((t - t0) / self.period) % 1)
mag[bad_vals] = np.nan
if err is not None:
mag += self.rng.normal(0, err, t.shape)
return mag
|
def generated(self, band, t, err=None, corrected=True)
|
Return generated magnitudes in the specified band
Parameters
----------
band : str
desired bandpass: should be one of ['u', 'g', 'r', 'i', 'z']
t : array_like
array of times (in days)
err : float or array_like
gaussian error in observations
corrected : bool (optional)
If true, correct for extinction
Returns
-------
mag : ndarray
magnitudes at the specified times under the generated model.
| 4.624924 | 4.710258 | 0.981884 |
# Import here so astroML is not required at package level
from astroML.datasets.tools import get_data_home
if data_home is None:
data_home = get_data_home(data_home)
data_home = os.path.join(data_home, 'Sesar2010')
if not os.path.exists(data_home):
os.makedirs(data_home)
src_url = SESAR_RRLYRAE_URL + filename
save_loc = os.path.join(data_home, filename)
if force_download or not os.path.exists(save_loc):
fhandle = urlopen(src_url)
with open(save_loc, 'wb') as cache:
cache.write(fhandle.read())
return save_loc
|
def _get_download_or_cache(filename, data_home=None,
url=SESAR_RRLYRAE_URL,
force_download=False)
|
Private utility to download and/or load data from disk cache.
| 2.384086 | 2.28653 | 1.042666 |
if partial:
return PartialRRLyraeLC('table1.tar.gz',
cache_kwargs=kwargs)
else:
return RRLyraeLC('table1.tar.gz',
cache_kwargs=kwargs)
|
def fetch_rrlyrae(partial=False, **kwargs)
|
Fetch RR Lyrae light curves from Sesar 2010
Parameters
----------
partial : bool (optional)
If true, return the partial dataset (reduced to 1 band per night)
Returns
-------
rrlyrae : :class:`RRLyraeLC` object
This object contains pointers to the RR Lyrae data.
Other Parameters
----------------
data_home : str (optional)
Specify the local cache directory for the dataset. If not used, it
will default to the ``astroML`` default location.
url : str (optional)
Specify the URL of the datasets. Defaults to webpage associated with
Sesar 2010.
force_download : bool (optional)
If true, then force re-downloading data even if it is already cached
locally. Default is False.
Examples
--------
>>> rrlyrae = fetch_rrlyrae()
>>> rrlyrae.ids[:5]
[1013184, 1019544, 1027882, 1052471, 1056152]
>>> lcid = rrlyrae.ids[0]
>>> t, mag, dmag, bands = rrlyrae.get_lightcurve(lcid)
>>> t[:4]
array([ 51081.347856, 51081.349522, 51081.346189, 51081.347022])
>>> mag[:4]
array([ 18.702, 17.553, 17.236, 17.124])
>>> dmag[:4]
array([ 0.021, 0.005, 0.005, 0.006])
>>> list(bands[:4])
['u', 'g', 'r', 'i']
| 4.723311 | 6.498722 | 0.726806 |
save_loc = _get_download_or_cache('table2.dat.gz', **kwargs)
dtype = [('id', 'i'), ('type', 'S2'), ('P', 'f'),
('uA', 'f'), ('u0', 'f'), ('uE', 'f'), ('uT', 'f'),
('gA', 'f'), ('g0', 'f'), ('gE', 'f'), ('gT', 'f'),
('rA', 'f'), ('r0', 'f'), ('rE', 'f'), ('rT', 'f'),
('iA', 'f'), ('i0', 'f'), ('iE', 'f'), ('iT', 'f'),
('zA', 'f'), ('z0', 'f'), ('zE', 'f'), ('zT', 'f')]
return np.loadtxt(save_loc, dtype=dtype)
|
def fetch_rrlyrae_lc_params(**kwargs)
|
Fetch data from table 2 of Sesar 2010
This table includes observationally-derived parameters for all the
Sesar 2010 lightcurves.
| 2.350697 | 2.392819 | 0.982396 |
save_loc = _get_download_or_cache('table3.dat.gz', **kwargs)
dtype = [('id', 'i'), ('RA', 'f'), ('DEC', 'f'), ('rExt', 'f'),
('d', 'f'), ('RGC', 'f'),
('u', 'f'), ('g', 'f'), ('r', 'f'),
('i', 'f'), ('z', 'f'), ('V', 'f'),
('ugmin', 'f'), ('ugmin_err', 'f'),
('grmin', 'f'), ('grmin_err', 'f')]
return np.loadtxt(save_loc, dtype=dtype)
|
def fetch_rrlyrae_fitdata(**kwargs)
|
Fetch data from table 3 of Sesar 2010
This table includes parameters derived from template fits to all the
Sesar 2010 lightcurves.
| 3.797541 | 3.955328 | 0.960108 |
filename = '{0}/{1}.dat'.format(self.dirname, star_id)
try:
data = np.loadtxt(self.data.extractfile(filename))
except KeyError:
raise ValueError("invalid star id: {0}".format(star_id))
RA = data[:, 0]
DEC = data[:, 1]
t = data[:, 2::3]
y = data[:, 3::3]
dy = data[:, 4::3]
nans = (y == -99.99)
t[nans] = np.nan
y[nans] = np.nan
dy[nans] = np.nan
if return_1d:
t, y, dy, filts = np.broadcast_arrays(t, y, dy,
['u', 'g', 'r', 'i', 'z'])
good = ~np.isnan(t)
return t[good], y[good], dy[good], filts[good]
else:
return t, y, dy
|
def get_lightcurve(self, star_id, return_1d=True)
|
Get the light curves for the given ID
Parameters
----------
star_id : int
A valid integer star id representing an object in the dataset
return_1d : boolean (default=True)
Specify whether to return 1D arrays of (t, y, dy, filts) or
2D arrays of (t, y, dy) where each column is a filter.
Returns
-------
t, y, dy : np.ndarrays (if return_1d == False)
Times, magnitudes, and magnitude errors.
The shape of each array is [Nobs, 5], where the columns refer
to [u,g,r,i,z] bands. Non-observations are indicated by NaN.
t, y, dy, filts : np.ndarrays (if return_1d == True)
Times, magnitudes, magnitude errors, and filters
The shape of each array is [Nobs], and non-observations are
filtered out.
| 2.526558 | 2.414751 | 1.046302 |
if self._metadata is None:
self._metadata = fetch_rrlyrae_lc_params()
i = np.where(self._metadata['id'] == lcid)[0]
if len(i) == 0:
raise ValueError("invalid lcid: {0}".format(lcid))
return self._metadata[i[0]]
|
def get_metadata(self, lcid)
|
Get the parameters derived from the fit for the given id.
This is table 2 of Sesar 2010
| 3.958972 | 3.413891 | 1.159666 |
if self._obsdata is None:
self._obsdata = fetch_rrlyrae_fitdata()
i = np.where(self._obsdata['id'] == lcid)[0]
if len(i) == 0:
raise ValueError("invalid lcid: {0}".format(lcid))
return self._obsdata[i[0]]
|
def get_obsmeta(self, lcid)
|
Get the observation metadata for the given id.
This is table 3 of Sesar 2010
| 3.772349 | 3.626063 | 1.040343 |
try:
data = np.loadtxt(self.data.extractfile(template_id + '.dat'))
except KeyError:
raise ValueError("invalid star id: {0}".format(template_id))
return data[:, 0], data[:, 1]
|
def get_template(self, template_id)
|
Get a particular lightcurve template
Parameters
----------
template_id : str
id of desired template
Returns
-------
phase : ndarray
array of phases
mag : ndarray
array of normalized magnitudes
| 5.714584 | 5.08455 | 1.123911 |
hr_data = self.hr_values()
return int(sum(hr_data) / len(hr_data))
|
def hr_avg(self)
|
Average heart rate of the workout
| 4.823905 | 4.04897 | 1.191391 |
secs_per_km = self.duration / (self.distance / 1000)
return time.strftime('%M:%S', time.gmtime(secs_per_km))
|
def pace(self)
|
Average pace (mm:ss/km for the workout
| 3.781524 | 3.197057 | 1.182814 |
total_ascent = 0.0
altitude_data = self.altitude_points()
for i in range(len(altitude_data) - 1):
diff = altitude_data[i+1] - altitude_data[i]
if diff > 0.0:
total_ascent += diff
return total_ascent
|
def ascent(self)
|
Returns ascent of workout in meters
| 2.681432 | 2.359143 | 1.136613 |
total_descent = 0.0
altitude_data = self.altitude_points()
for i in range(len(altitude_data) - 1):
diff = altitude_data[i+1] - altitude_data[i]
if diff < 0.0:
total_descent += abs(diff)
return total_descent
|
def descent(self)
|
Returns descent of workout in meters
| 2.764187 | 2.491146 | 1.109604 |
invalid_chars = '!\"#$%&\'()*+-./:;<=>?@[\\]^_{|}~\t\n'
if any(char in invalid_chars for char in keywords):
raise ValidationError(MESSAGE_KEYWORD_SPECIAL_CHARS)
|
def keywords_special_characters(keywords)
|
Confirms that the keywords don't contain special characters
Args:
keywords (str)
Raises:
django.forms.ValidationError
| 3.818029 | 4.428728 | 0.862105 |
if value.image.format.upper() not in constants.ALLOWED_IMAGE_FORMATS:
raise ValidationError(MESSAGE_INVALID_IMAGE_FORMAT)
|
def image_format(value)
|
Confirms that the uploaded image is of supported format.
Args:
value (File): The file with an `image` property containing the image
Raises:
django.forms.ValidationError
| 6.423262 | 5.098093 | 1.259934 |
parsed = parse.urlparse(value.lower())
if not parsed.netloc.endswith('facebook.com'):
raise ValidationError(MESSAGE_NOT_FACEBOOK)
|
def case_study_social_link_facebook(value)
|
Confirms that the social media url is pointed at the correct domain.
Args:
value (string): The url to check.
Raises:
django.forms.ValidationError
| 5.77042 | 6.185073 | 0.932959 |
parsed = parse.urlparse(value.lower())
if not parsed.netloc.endswith('twitter.com'):
raise ValidationError(MESSAGE_NOT_TWITTER)
|
def case_study_social_link_twitter(value)
|
Confirms that the social media url is pointed at the correct domain.
Args:
value (string): The url to check.
Raises:
django.forms.ValidationError
| 5.417538 | 5.838859 | 0.927842 |
parsed = parse.urlparse(value.lower())
if not parsed.netloc.endswith('linkedin.com'):
raise ValidationError(MESSAGE_NOT_LINKEDIN)
|
def case_study_social_link_linkedin(value)
|
Confirms that the social media url is pointed at the correct domain.
Args:
value (string): The url to check.
Raises:
django.forms.ValidationError
| 5.438718 | 6.004476 | 0.905777 |
for prefix, name in company_types_with_insufficient_companies_house_data:
if value.upper().startswith(prefix):
raise ValidationError(
MESSAGE_INSUFFICIENT_DATA, params={'name': name}
)
|
def no_company_with_insufficient_companies_house_data(value)
|
Confirms that the company number is not for for a company that
Companies House does not hold information on.
Args:
value (string): The company number to check.
Raises:
django.forms.ValidationError
| 5.394264 | 5.834042 | 0.924619 |
domain = helpers.get_domain_from_email_address(value)
if domain.lower() in free_domains:
raise ValidationError(MESSAGE_USE_COMPANY_EMAIL)
|
def email_domain_free(value)
|
Confirms that the email address is not using a free service.
@param {str} value
@returns {None}
@raises AssertionError
| 7.408188 | 8.810359 | 0.84085 |
domain = helpers.get_domain_from_email_address(value)
if domain.lower() in disposable_domains:
raise ValidationError(MESSAGE_USE_COMPANY_EMAIL)
|
def email_domain_disposable(value)
|
Confirms that the email address is not using a disposable service.
@param {str} value
@returns {None}
@raises AssertionError
| 6.619162 | 8.443723 | 0.783915 |
try:
parsed = phonenumbers.parse(value, 'GB')
except NumberParseException:
pass
else:
is_mobile = carrier._is_mobile(number_type(parsed))
if is_mobile and phonenumbers.is_valid_number(parsed):
return None
raise ValidationError(MESSAGE_INVALID_PHONE_NUMBER)
|
def domestic_mobile_phone_number(value)
|
Confirms that the phone number is a valid UK phone number.
@param {str} value
@returns {None}
@raises AssertionError
| 4.709093 | 5.288334 | 0.890468 |
result = [points[0]]
for i in range(1, len(points) - 2):
prv = points[i-1]
crr = points[i]
nxt = points[i+1]
if prv.time <= crr.time and crr.time <= nxt.time:
result.append(crr)
result.append(points[-1])
return result
|
def remove_liers(points)
|
Removes obvious noise points
Checks time consistency, removing points that appear out of order
Args:
points (:obj:`list` of :obj:`Point`)
Returns:
:obj:`list` of :obj:`Point`
| 2.3104 | 2.457208 | 0.940254 |
points = self.points[lower_index:upper_index]
min_lat = float("inf")
min_lon = float("inf")
max_lat = -float("inf")
max_lon = -float("inf")
for point in points:
min_lat = min(min_lat, point.lat)
min_lon = min(min_lon, point.lon)
max_lat = max(max_lat, point.lat)
max_lon = max(max_lon, point.lon)
return (min_lat - thr, min_lon - thr, max_lat + thr, max_lon + thr)
|
def bounds(self, thr=0, lower_index=0, upper_index=-1)
|
Computes the bounds of the segment, or part of it
Args:
lower_index (int, optional): Start index. Defaults to 0
upper_index (int, optional): End index. Defaults to 0
Returns:
:obj:`tuple` of :obj:`float`: Bounds of the (sub)segment, such that
(min_lat, min_lon, max_lat, max_lon)
| 1.575697 | 1.556051 | 1.012626 |
if strategy is INVERSE_STRATEGY:
self.points = with_inverse(self.points, noise)
elif strategy is EXTRAPOLATE_STRATEGY:
self.points = with_extrapolation(self.points, noise, 30)
elif strategy is NO_STRATEGY:
self.points = with_no_strategy(self.points, noise)
return self
|
def smooth(self, noise, strategy=INVERSE_STRATEGY)
|
In-place smoothing
See smooth_segment function
Args:
noise (float): Noise expected
strategy (int): Strategy to use. Either smooth.INVERSE_STRATEGY
or smooth.EXTRAPOLATE_STRATEGY
Returns:
:obj:`Segment`
| 2.641988 | 2.772531 | 0.952916 |
if topology_only:
self.points = drp(self.points, eps)
else:
self.points = spt(self.points, max_dist_error, max_speed_error)
return self
|
def simplify(self, eps, max_dist_error, max_speed_error, topology_only=False)
|
In-place segment simplification
See `drp` and `compression` modules
Args:
eps (float): Distance threshold for the `drp` function
max_dist_error (float): Max distance error, in meters
max_speed_error (float): Max speed error, in km/h
topology_only (bool, optional): True to only keep topology, not considering
times when simplifying. Defaults to False.
Returns:
:obj:`Segment`
| 4.118866 | 3.576848 | 1.151535 |
for prev, point in pairwise(self.points):
point.compute_metrics(prev)
return self
|
def compute_metrics(self)
|
Computes metrics for each point
Returns:
:obj:`Segment`: self
| 13.237811 | 10.555717 | 1.254089 |
self.location_from = infer_location(
self.points[0],
location_query,
max_distance,
google_key,
foursquare_client_id,
foursquare_client_secret,
limit
)
self.location_to = infer_location(
self.points[-1],
location_query,
max_distance,
google_key,
foursquare_client_id,
foursquare_client_secret,
limit
)
return self
|
def infer_location(
self,
location_query,
max_distance,
google_key,
foursquare_client_id,
foursquare_client_secret,
limit
)
|
In-place location inferring
See infer_location function
Args:
Returns:
:obj:`Segment`: self
| 1.717447 | 1.806729 | 0.950583 |
self.transportation_modes = speed_clustering(clf, self.points, min_time)
return self
|
def infer_transportation_mode(self, clf, min_time)
|
In-place transportation mode inferring
See infer_transportation_mode function
Args:
Returns:
:obj:`Segment`: self
| 7.807756 | 12.580415 | 0.620628 |
self.points = sort_segment_points(self.points, segment.points)
return self
|
def merge_and_fit(self, segment)
|
Merges another segment with this one, ordering the points based on a
distance heuristic
Args:
segment (:obj:`Segment`): Segment to merge with
Returns:
:obj:`Segment`: self
| 8.499692 | 6.403974 | 1.327253 |
i = 0
point_arr = point.gen2arr()
def closest_in_line(pointA, pointB):
temp = closest_point(pointA.gen2arr(), pointB.gen2arr(), point_arr)
return Point(temp[1], temp[0], None)
for (p_a, p_b) in pairwise(self.points):
candidate = closest_in_line(p_a, p_b)
if candidate.distance(point) <= thr:
if p_a.distance(point) <= thr:
return i, p_a
elif p_b.distance(point) <= thr:
return i + 1, p_b
else:
return i, candidate
i = i + 1
return -1, None
|
def closest_point_to(self, point, thr=20.0)
|
Finds the closest point in the segment to a given point
Args:
point (:obj:`Point`)
thr (float, optional): Distance threshold, in meters, to be considered
the same point. Defaults to 20.0
Returns:
(int, Point): Index of the point. -1 if doesn't exist. A point is given if it's along the segment
| 3.000734 | 3.193637 | 0.939598 |
reverse = False
if start > end:
temp = start
start = end
end = temp
reverse = True
seg = self.copy()
seg.points = seg.points[start:end+1]
if reverse:
seg.points = list(reversed(seg.points))
return seg
|
def slice(self, start, end)
|
Creates a copy of the current segment between indexes. If end > start,
points are reverted
Args:
start (int): Start index
end (int): End index
Returns:
:obj:`Segment`
| 2.951529 | 2.719097 | 1.085481 |
points = [point.to_json() for point in self.points]
return {
'points': points,
'transportationModes': self.transportation_modes,
'locationFrom': self.location_from.to_json() if self.location_from != None else None,
'locationTo': self.location_to.to_json() if self.location_to != None else None
}
|
def to_json(self)
|
Converts segment to a JSON serializable format
Returns:
:obj:`dict`
| 2.408675 | 2.733025 | 0.881322 |
points = []
for point in gpx_segment.points:
points.append(Point.from_gpx(point))
return Segment(points)
|
def from_gpx(gpx_segment)
|
Creates a segment from a GPX format.
No preprocessing is done.
Arguments:
gpx_segment (:obj:`gpxpy.GPXTrackSegment`)
Return:
:obj:`Segment`
| 2.649294 | 3.090527 | 0.857231 |
points = []
for point in json['points']:
points.append(Point.from_json(point))
return Segment(points)
|
def from_json(json)
|
Creates a segment from a JSON file.
No preprocessing is done.
Arguments:
json (:obj:`dict`): JSON representation. See to_json.
Return:
:obj:`Segment`
| 3.335582 | 4.18033 | 0.797923 |
points = points[:n_points]
lat = []
lon = []
last = None
for point in points:
if last is not None:
lat.append(last.lat-point.lat)
lon.append(last.lon-point.lon)
last = point
dts = np.mean([p.dt for p in points])
lons = np.mean(lon)
lats = np.mean(lat)
gen_sample = []
last = points[0]
for _ in range(n_points):
point = Point(last.lat+lats, last.lon+lons, None)
point.dt = dts
# point.compute_metrics(last)
gen_sample.append(point)
last = point
return gen_sample
|
def extrapolate_points(points, n_points)
|
Extrapolate a number of points, based on the first ones
Args:
points (:obj:`list` of :obj:`Point`)
n_points (int): number of points to extrapolate
Returns:
:obj:`list` of :obj:`Point`
| 2.796012 | 2.936589 | 0.952129 |
n_points = 10
return kalman_filter(extrapolate_points(points, n_points) + points, noise)[n_points:]
|
def with_extrapolation(points, noise, n_points)
|
Smooths a set of points, but it extrapolates some points at the beginning
Args:
points (:obj:`list` of :obj:`Point`)
noise (float): Expected noise, the higher it is the more the path will
be smoothed.
Returns:
:obj:`list` of :obj:`Point`
| 7.251376 | 10.512538 | 0.689784 |
# noise_sample = 20
n_points = len(points)/2
break_point = n_points
points_part = copy.deepcopy(points)
points_part = list(reversed(points_part))
part = kalman_filter(points_part, noise)
total = kalman_filter(points, noise)
result = list(reversed(part))[:break_point] + total[break_point:]
result[break_point] = point_mean(part[break_point], total[break_point])
return result
|
def with_inverse(points, noise)
|
Smooths a set of points
It smooths them twice, once in given order, another one in the reverse order.
The the first half of the results will be taken from the reverse order and
the second half from the normal order.
Args:
points (:obj:`list` of :obj:`Point`)
noise (float): Expected noise, the higher it is the more the path will
be smoothed.
Returns:
:obj:`list` of :obj:`Point`
| 4.582646 | 4.616345 | 0.9927 |
final_segments = []
for segment in segments:
final_segments.append([])
for point in segment:
if point.dt > min_time:
final_segments.append([])
final_segments[-1].append(point)
return final_segments
|
def temporal_segmentation(segments, min_time)
|
Segments based on time distant points
Args:
segments (:obj:`list` of :obj:`list` of :obj:`Point`): segment points
min_time (int): minimum required time for segmentation
| 2.957546 | 3.123665 | 0.946819 |
# segments = [points for points in segments if len(points) > 1]
result_segments = []
prev_segment = None
for i, segment in enumerate(segments):
if len(segment) >= 1:
continue
cluster = clusters[i]
if prev_segment is None:
prev_segment = segment
else:
cluster_dt = 0
if len(cluster) > 0:
cluster_dt = abs(cluster[0].time_difference(cluster[-1]))
if cluster_dt <= min_time:
prev_segment.extend(segment)
else:
prev_segment.append(segment[0])
result_segments.append(prev_segment)
prev_segment = segment
if prev_segment is not None:
result_segments.append(prev_segment)
return result_segments
|
def correct_segmentation(segments, clusters, min_time)
|
Corrects the predicted segmentation
This process prevents over segmentation
Args:
segments (:obj:`list` of :obj:`list` of :obj:`Point`):
segments to correct
min_time (int): minimum required time for segmentation
| 2.287929 | 2.39242 | 0.956324 |
# min time / sample rate
dt_average = np.median([point.dt for point in points])
min_samples = min_time / dt_average
data = [point.gen3arr() for point in points]
data = StandardScaler().fit_transform(data)
print 'min_samples: %f' % min_samples
db_cluster = DBSCAN(eps=eps, min_samples=min_samples).fit(data)
labels = db_cluster.labels_
n_clusters_ = len(set(labels)) - (1 if -1 in labels else 0)
segments = [[] for _ in range(n_clusters_+1)]
clusters = [[] for _ in range(n_clusters_+1)]
current_segment = 0
print 'clusters'
print n_clusters_
if n_clusters_ == 1:
segments = temporal_segmentation([points], min_time)
return [segment for segment in segments if len(segment) > 1]
# split segments identified with dbscan
for i, label in enumerate(labels):
if label != -1 and label + 1 != current_segment:
current_segment = label + 1
point = points[i]
if label == -1:
segments[current_segment].append(point)
else:
clusters[label + 1].append(point)
if len(segments) == 0 or sum([len(s) for s in segments]):
segments = [points]
segments = temporal_segmentation(segments, min_time)
# segments = temporal_segmentation(correct_segmentation(segments, clusters, min_time), min_time)
return [segment for segment in segments if len(segment) > 1]
|
def spatiotemporal_segmentation(points, eps, min_time)
|
Splits a set of points into multiple sets of points based on
spatio-temporal stays
DBSCAN is used to predict possible segmentations,
furthermore we check to see if each clusters is big enough in
time (>=min_time). If that's the case than the segmentation is
considered valid.
When segmenting, the last point of the ith segment will be the same
of the (i-1)th segment.
Segments are identified through clusters.
The last point of a clusters, that comes after a sub-segment A, will
be present on the sub-segment A.
Args:
points (:obj:`list` of :obj:`Point`): segment's points
eps (float): Epsilon to feed to the DBSCAN algorithm.
Maximum distance between two samples, to be considered in
the same cluster.
min_time (float): Minimum time of a stay
Returns:
:obj:`list` of :obj:`list` of :obj:`Point`: Initial set of
points in different segments
| 3.045516 | 3.007309 | 1.012705 |
kalman = ikalman.filter(noise)
for point in points:
kalman.update_velocity2d(point.lat, point.lon, point.dt)
(lat, lon) = kalman.get_lat_long()
point.lat = lat
point.lon = lon
return points
|
def kalman_filter(points, noise)
|
Smooths points with kalman filter
See https://github.com/open-city/ikalman
Args:
points (:obj:`list` of :obj:`Point`): points to smooth
noise (float): expected noise
| 4.555761 | 5.255282 | 0.866892 |
for segment in track.segments:
tmodes = segment.transportation_modes
points = segment.points
features = []
labels = []
for tmode in tmodes:
points_part = points[tmode['from']:tmode['to']]
if len(points_part) > 0:
features.append(extract_features_2(points_part))
labels.append(tmode['label'])
clf.learn(features, labels)
|
def learn_transportation_mode(track, clf)
|
Inserts transportation modes of a track into a classifier
Args:
track (:obj:`Track`)
clf (:obj:`Classifier`)
| 3.249549 | 3.246859 | 1.000829 |
max_bin = -1
for point in points:
max_bin = max(max_bin, point.vel)
max_bin = int(round(max_bin)) + 1
# inits histogram
histogram = [0] * max_bin
time = 0
# fills histogram
for point in points:
bin_index = int(round(point.vel))
histogram[bin_index] += point.dt
time += point.dt
result = []
if time == 0:
return result
for _ in range(n_tops):
max_index = np.argmax(histogram)
value = histogram[max_index] / time
result.extend([max_index, value])
histogram[max_index] = -1
return result
|
def extract_features(points, n_tops)
|
Feature extractor
Args:
points (:obj:`list` of :obj:`Point`)
n_tops (int): Number of top speeds to extract
Returns:
:obj:`list` of float: with length (n_tops*2). Where the ith even element
is the ith top speed and the i+1 element is the percentage of time
spent on that speed
| 2.993296 | 3.01335 | 0.993345 |
data = [0]
for before, after in pairwise(points):
data.append(before.vel - after.vel)
return data
|
def speed_difference(points)
|
Computes the speed difference between each adjacent point
Args:
points (:obj:`Point`)
Returns:
:obj:`list` of int: Indexes of changepoints
| 5.599298 | 7.688273 | 0.728291 |
data = [0]
for before, after in pairwise(points):
data.append(before.acc - after.acc)
return data
|
def acc_difference(points)
|
Computes the accelaration difference between each adjacent point
Args:
points (:obj:`Point`)
Returns:
:obj:`list` of int: Indexes of changepoints
| 4.931079 | 6.645149 | 0.742057 |
data = data_processor(points)
changepoints = pelt(normal_mean(data, np.std(data)), len(data))
changepoints.append(len(points) - 1)
result = []
for start, end in pairwise(changepoints):
time_diff = points[end].time_difference(points[start])
if time_diff > min_time:
result.append(start)
# adds the first point
result.append(0)
# adds the last changepoint detected
result.append(len(points) - 1)
return sorted(list(set(result)))
|
def detect_changepoints(points, min_time, data_processor=acc_difference)
|
Detects changepoints on points that have at least a specific duration
Args:
points (:obj:`Point`)
min_time (float): Min time that a sub-segmented, bounded by two changepoints, must have
data_processor (function): Function to extract data to feed to the changepoint algorithm.
Defaults to `speed_difference`
Returns:
:obj:`list` of int: Indexes of changepoints
| 3.839811 | 4.15996 | 0.92304 |
if len(modes) > 0:
previous = modes[0]
grouped = []
for changep in modes[1:]:
if changep['label'] != previous['label']:
previous['to'] = changep['from']
grouped.append(previous)
previous = changep
previous['to'] = modes[-1]['to']
grouped.append(previous)
return grouped
else:
return modes
|
def group_modes(modes)
|
Groups consecutive transportation modes with same label, into one
Args:
modes (:obj:`list` of :obj:`dict`)
Returns:
:obj:`list` of :obj:`dict`
| 3.426922 | 3.11665 | 1.099553 |
# get changepoint indexes
changepoints = detect_changepoints(points, min_time)
# info for each changepoint
cp_info = []
for i in range(0, len(changepoints) - 1):
from_index = changepoints[i]
to_index = changepoints[i+1]
info = classify(clf, points[from_index:to_index], min_time, from_index, to_index)
if info:
cp_info.append(info)
return group_modes(cp_info)
|
def speed_clustering(clf, points, min_time)
|
Transportation mode infering, based on changepoint segmentation
Args:
clf (:obj:`Classifier`): Classifier to use
points (:obj:`list` of :obj:`Point`)
min_time (float): Min time, in seconds, before do another segmentation
Returns:
:obj:`list` of :obj:`dict`
| 3.216833 | 3.285367 | 0.97914 |
return sqrt((p_a.lat - p_b.lat) ** 2 + (p_a.lon - p_b.lon) ** 2)
|
def distance(p_a, p_b)
|
Euclidean distance, between two points
Args:
p_a (:obj:`Point`)
p_b (:obj:`Point`)
Returns:
float: distance, in degrees
| 2.151346 | 3.059586 | 0.703149 |
if start == end:
return distance(point, start)
else:
un_dist = abs(
(end.lat-start.lat)*(start.lon-point.lon) - (start.lat-point.lat)*(end.lon-start.lon)
)
n_dist = sqrt(
(end.lat-start.lat)**2 + (end.lon-start.lon)**2
)
if n_dist == 0:
return 0
else:
return un_dist / n_dist
|
def point_line_distance(point, start, end)
|
Distance from a point to a line, formed by two points
Args:
point (:obj:`Point`)
start (:obj:`Point`): line point
end (:obj:`Point`): line point
Returns:
float: distance to line, in degrees
| 2.225309 | 2.376149 | 0.936519 |
dmax = 0.0
index = 0
for i in range(1, len(points)-1):
dist = point_line_distance(points[i], points[0], points[-1])
if dist > dmax:
index = i
dmax = dist
if dmax > epsilon:
return drp(points[:index+1], epsilon)[:-1] + drp(points[index:], epsilon)
else:
return [points[0], points[-1]]
|
def drp(points, epsilon)
|
Douglas ramer peucker
Based on https://en.wikipedia.org/wiki/Ramer%E2%80%93Douglas%E2%80%93Peucker_algorithm
Args:
points (:obj:`list` of :obj:`Point`)
epsilon (float): drp threshold
Returns:
:obj:`list` of :obj:`Point`
| 2.220483 | 2.516533 | 0.882358 |
if len(points) <= 2:
return points
else:
max_speed_threshold = 0
found_index = 0
for i in range(1, len(points)-1):
dt1 = time_dist(points[i], points[i-1])
if dt1 == 0:
dt1 = 0.000000001
vim = loc_dist(points[i], points[i-1]) / dt1
dt2 = time_dist(points[i+1], points[i])
if dt2 == 0:
dt2 = 0.000000001
vi_ = loc_dist(points[i+1], points[i]) / dt2
if abs(vi_ - vim) > max_speed_threshold:
max_speed_threshold = abs(vi_ - vim)
found_index = i
if max_speed_threshold > speed_threshold:
one = td_sp(points[:found_index], speed_threshold)
two = td_sp(points[found_index:], speed_threshold)
one.extend(two)
return one
else:
return [points[0], points[-1]]
|
def td_sp(points, speed_threshold)
|
Top-Down Speed-Based Trajectory Compression Algorithm
Detailed in https://www.itc.nl/library/Papers_2003/peer_ref_conf/meratnia_new.pdf
Args:
points (:obj:`list` of :obj:`Point`): trajectory or part of it
speed_threshold (float): max speed error, in km/h
Returns:
:obj:`list` of :obj:`Point`, compressed trajectory
| 1.951517 | 1.992927 | 0.979222 |
if len(points) <= 2:
return points
else:
max_dist_threshold = 0
found_index = 0
delta_e = time_dist(points[-1], points[0]) * I_3600
d_lat = points[-1].lat - points[0].lat
d_lon = points[-1].lon - points[0].lon
for i in range(1, len(points)-1):
delta_i = time_dist(points[i], points[0]) * I_3600
di_de = delta_i / delta_e
point = Point(
points[0].lat + d_lat * di_de,
points[0].lon + d_lon * di_de,
None
)
dist = loc_dist(points[i], point)
if dist > max_dist_threshold:
max_dist_threshold = dist
found_index = i
if max_dist_threshold > dist_threshold:
one = td_tr(points[:found_index], dist_threshold)
two = td_tr(points[found_index:], dist_threshold)
one.extend(two)
return one
else:
return [points[0], points[-1]]
|
def td_tr(points, dist_threshold)
|
Top-Down Time-Ratio Trajectory Compression Algorithm
Detailed in https://www.itc.nl/library/Papers_2003/peer_ref_conf/meratnia_new.pdf
Args:
points (:obj:`list` of :obj:`Point`): trajectory or part of it
dist_threshold (float): max distance error, in meters
Returns:
:obj:`list` of :obj:`Point`, compressed trajectory
| 2.458735 | 2.478673 | 0.991956 |
if len(points) <= 2:
return points
else:
is_error = False
e = 1
while e < len(points) and not is_error:
i = 1
while i < e and not is_error:
delta_e = time_dist(points[e], points[0]) * I_3600
delta_i = time_dist(points[i], points[0]) * I_3600
di_de = 0
if delta_e != 0:
di_de = delta_i / delta_e
d_lat = points[e].lat - points[0].lat
d_lon = points[e].lon - points[0].lon
point = Point(
points[0].lat + d_lat * di_de,
points[0].lon + d_lon * di_de,
None
)
dt1 = time_dist(points[i], points[i-1])
if dt1 == 0:
dt1 = 0.000000001
dt2 = time_dist(points[i+1], points[i])
if dt2 == 0:
dt2 = 0.000000001
v_i_1 = loc_dist(points[i], points[i-1]) / dt1
v_i = loc_dist(points[i+1], points[i]) / dt2
if loc_dist(points[i], point) > max_dist_error or abs(v_i - v_i_1) > max_speed_error:
is_error = True
else:
i = i + 1
if is_error:
return [points[0]] + spt(points[i:len(points)], max_dist_error, max_speed_error)
e = e + 1
if not is_error:
return [points[0], points[len(points)-1]]
|
def spt(points, max_dist_error, max_speed_error)
|
A combination of both `td_sp` and `td_tr`
Detailed in,
Spatiotemporal Compression Techniques for Moving Point Objects,
Nirvana Meratnia and Rolf A. de By, 2004,
in Advances in Database Technology - EDBT 2004: 9th
International Conference on Extending Database Technology,
Heraklion, Crete, Greece, March 14-18, 2004
Args:
points (:obj:`list` of :obj:`Point`)
max_dist_error (float): max distance error, in meters
max_speed_error (float): max speed error, in km/h
Returns:
:obj:`list` of :obj:`Point`
| 2.174048 | 2.154711 | 1.008974 |
if len(self.segments) > 0:
return self.segments[0].points[0].time.strftime(name_format) + ".gpx"
else:
return "EmptyTrack"
|
def generate_name(self, name_format=DEFAULT_FILE_NAME_FORMAT)
|
Generates a name for the track
The name is generated based on the date of the first point of the
track, or in case it doesn't exist, "EmptyTrack"
Args:
name_format (str, optional): Name formar to give to the track, based on
its start time. Defaults to DEFAULT_FILE_NAME_FORMAT
Returns:
str
| 5.044258 | 3.883963 | 1.29874 |
print noise
for segment in self.segments:
segment.smooth(noise, strategy)
return self
|
def smooth(self, strategy, noise)
|
In-place smoothing of segments
Returns:
:obj:`Track`: self
| 8.098438 | 8.071951 | 1.003281 |
new_segments = []
for segment in self.segments:
segmented = segment.segment(eps, min_time)
for seg in segmented:
new_segments.append(Segment(seg))
self.segments = new_segments
return self
|
def segment(self, eps, min_time)
|
In-place segmentation of segments
Spatio-temporal segmentation of each segment
The number of segments may increse after this step
Returns:
This track
| 2.687103 | 3.249154 | 0.827016 |
for segment in self.segments:
segment.simplify(eps, max_dist_error, max_speed_error, topology_only)
return self
|
def simplify(self, eps, max_dist_error, max_speed_error, topology_only=False)
|
In-place simplification of segments
Args:
max_dist_error (float): Min distance error, in meters
max_speed_error (float): Min speed error, in km/h
topology_only: Boolean, optional. True to keep
the topology, neglecting velocity and time
accuracy (use common Douglas-Ramen-Peucker).
False (default) to simplify segments keeping
the velocity between points.
Returns:
This track
| 2.405652 | 2.929356 | 0.821222 |
for segment in self.segments:
segment.infer_transportation_mode(clf, min_time)
return self
|
def infer_transportation_mode(self, clf, min_time)
|
In-place transportation mode inferring of segments
Returns:
This track
| 3.5231 | 3.949733 | 0.891984 |
self.compute_metrics()
self.remove_noise()
print (smooth, seg, simplify)
if smooth:
self.compute_metrics()
self.smooth(smooth_strategy, smooth_noise)
if seg:
self.compute_metrics()
self.segment(seg_eps, seg_min_time)
if simplify:
self.compute_metrics()
self.simplify(0, simplify_max_dist_error, simplify_max_speed_error)
self.compute_metrics()
return self
|
def to_trip(
self,
smooth,
smooth_strategy,
smooth_noise,
seg,
seg_eps,
seg_min_time,
simplify,
simplify_max_dist_error,
simplify_max_speed_error
)
|
In-place, transformation of a track into a trip
A trip is a more accurate depiction of reality than a
track.
For a track to become a trip it need to go through the
following steps:
+ noise removal
+ smoothing
+ spatio-temporal segmentation
+ simplification
At the end of these steps we have a less noisy, track
that has less points, but that holds the same information.
It's required that each segment has their metrics calculated
or has been preprocessed.
Args:
name: An optional string with the name of the trip. If
none is given, one will be generated by generateName
Returns:
This Track instance
| 2.377507 | 2.463554 | 0.965072 |
self.segments = [
segment.infer_transportation_mode(dt_threshold=dt_threshold)
for segment in self.segments
]
return self
|
def infer_transportation_modes(self, dt_threshold=10)
|
In-place transportation inferring of segments
Returns:
This track
| 3.928416 | 4.352702 | 0.902524 |
self.segments = [
segment.infer_location(
location_query,
max_distance,
google_key,
foursquare_client_id,
foursquare_client_secret,
limit
)
for segment in self.segments
]
return self
|
def infer_location(
self,
location_query,
max_distance,
google_key,
foursquare_client_id,
foursquare_client_secret,
limit
)
|
In-place location inferring of segments
Returns:
This track
| 2.259824 | 2.203617 | 1.025507 |
return {
'name': self.name,
'segments': [segment.to_json() for segment in self.segments],
'meta': self.meta
}
|
def to_json(self)
|
Converts track to a JSON serializable format
Returns:
Map with the name, and segments of the track.
| 3.628785 | 3.219127 | 1.127258 |
for (self_seg_index, track_seg_index, _) in pairings:
self_s = self.segments[self_seg_index]
ss_start = self_s.points[0]
track_s = track.segments[track_seg_index]
tt_start = track_s.points[0]
tt_end = track_s.points[-1]
d_start = ss_start.distance(tt_start)
d_end = ss_start.distance(tt_end)
if d_start > d_end:
track_s = track_s.copy()
track_s.points = list(reversed(track_s.points))
self_s.merge_and_fit(track_s)
return self
|
def merge_and_fit(self, track, pairings)
|
Merges another track with this one, ordering the points based on a
distance heuristic
Args:
track (:obj:`Track`): Track to merge with
pairings
Returns:
:obj:`Segment`: self
| 2.369807 | 2.312215 | 1.024908 |
for i, segment in enumerate(self.segments):
idx = segment.getPointIndex(point)
if idx != -1:
return i, idx
return -1, -1
|
def get_point_index(self, point)
|
Gets of the closest first point
Args:
point (:obj:`Point`)
Returns:
(int, int): Segment id and point index in that segment
| 2.943513 | 3.001022 | 0.980837 |
min_lat = float("inf")
min_lon = float("inf")
max_lat = -float("inf")
max_lon = -float("inf")
for segment in self.segments:
milat, milon, malat, malon = segment.bounds(thr=thr)
min_lat = min(milat, min_lat)
min_lon = min(milon, min_lon)
max_lat = max(malat, max_lat)
max_lon = max(malon, max_lon)
return min_lat, min_lon, max_lat, max_lon
|
def bounds(self, thr=0)
|
Gets the bounds of this segment
Returns:
(float, float, float, float): Bounds, with min latitude, min longitude,
max latitude and max longitude
| 1.825038 | 1.851175 | 0.985881 |
idx = index.Index()
i = 0
for i, segment in enumerate(self.segments):
idx.insert(i, segment.bounds(), obj=segment)
final_siml = []
final_diff = []
for i, segment in enumerate(track.segments):
query = idx.intersection(segment.bounds(), objects=True)
res_siml = []
res_diff = []
for result in query:
siml, diff = segment_similarity(segment, result.object)
res_siml.append(siml)
res_diff.append((result.id, i, diff))
if len(res_siml) > 0:
final_siml.append(max(res_siml))
final_diff.append(res_diff[np.argmax(res_siml)])
else:
final_siml.append(0)
final_diff.append([])
return np.mean(final_siml), final_diff
|
def similarity(self, track)
|
Compares two tracks based on their topology
This method compares the given track against this
instance. It only verifies if given track is close
to this one, not the other way arround
Args:
track (:obj:`Track`)
Returns:
Two-tuple with global similarity between tracks
and an array the similarity between segments
| 2.67258 | 2.647629 | 1.009424 |
gpx_segments = []
for segment in self.segments:
gpx_points = []
for point in segment.points:
time = ''
if point.time:
iso_time = point.time.isoformat().split('.')[0]
time = '<time>%s</time>' % iso_time
gpx_points.append(
u'<trkpt lat="%f" lon="%f">%s</trkpt>' % (point.lat, point.lon, time)
)
points = u'\n\t\t\t'.join(gpx_points)
gpx_segments.append(u'\t\t<trkseg>\n\t\t\t%s\n\t\t</trkseg>' % points)
segments = u'\t\n'.join(gpx_segments)
content = [
u'<?xml version="1.0" encoding="UTF-8"?>',
u'<gpx xmlns:xsi="http://www.w3.org/2001/XMLSchema-instance" xmlns="http://www.topografix.com/GPX/1/0" xsi:schemaLocation="http://www.topografix.com/GPX/1/0 http://www.topografix.com/GPX/1/0/gpx.xsd" version="1.0" creator="GatherMySteps">',
u'\t<trk>',
segments,
u'\t</trk>',
u'</gpx>'
]
return u'\n'.join(content)
|
def to_gpx(self)
|
Converts track to a GPX format
Uses GPXPY library as an intermediate format
Returns:
A string with the GPX/XML track
| 1.708252 | 1.770066 | 0.965078 |
tz_dt = timedelta(hours=timezone)
for segment in self.segments:
for point in segment.points:
point.time = point.time + tz_dt
return self
|
def timezone(self, timezone=0)
|
Sets the timezone of the entire track
Args:
timezone (int): Timezone hour delta
| 4.399285 | 4.766827 | 0.922896 |
buff = "--%s\n" % self.segments[0].points[0].time.strftime("%Y_%m_%d")
# buff += "--" + day
# buff += "UTC+s" # if needed
def military_time(time):
return time.strftime("%H%M")
def stay(buff, start, end, place):
if not isinstance(start, str):
start = military_time(start)
if not isinstance(end, str):
end = military_time(end)
return "%s\n%s-%s: %s" % (buff, start, end, place.label)
def trip(buff, segment):
trip = "%s-%s: %s -> %s" % (
military_time(segment.points[0].time),
military_time(segment.points[-1].time),
segment.location_from.label,
segment.location_to.label
)
t_modes = segment.transportation_modes
if len(t_modes) == 1:
trip = "%s [%s]" % (trip, t_modes[0]['label'])
elif len(t_modes) > 1:
modes = []
for mode in t_modes:
trip_from = military_time(segment.points[mode['from']].time)
trip_to = military_time(segment.points[mode['to']].time)
modes.append(" %s-%s: [%s]" % (trip_from, trip_to, mode['label']))
trip = "%s\n%s" % (trip, "\n".join(modes))
return "%s\n%s" % (buff, trip)
last = len(self.segments) - 1
for i, segment in enumerate(self.segments):
if i == 0:
buff = stay(
buff,
"0000",
military_time(segment.points[0].time),
segment.location_from
)
buff = trip(buff, segment)
if i is last:
buff = stay(
buff,
military_time(segment.points[-1].time),
"2359",
segment.location_to
)
else:
next_seg = self.segments[i+1]
buff = stay(
buff,
military_time(segment.points[-1].time),
military_time(next_seg.points[0].time),
segment.location_to
)
return buff
|
def to_life(self)
|
Converts track to LIFE format
| 2.42507 | 2.361171 | 1.027062 |
gpx = gpxpy.parse(open(file_path, 'r'))
file_name = basename(file_path)
tracks = []
for i, track in enumerate(gpx.tracks):
segments = []
for segment in track.segments:
segments.append(Segment.from_gpx(segment))
if len(gpx.tracks) > 1:
name = file_name + "_" + str(i)
else:
name = file_name
tracks.append(Track(name, segments))
return tracks
|
def from_gpx(file_path)
|
Creates a Track from a GPX file.
No preprocessing is done.
Arguments:
file_path (str): file path and name to the GPX file
Return:
:obj:`list` of :obj:`Track`
| 1.989899 | 2.192638 | 0.907536 |
segments = [Segment.from_json(s) for s in json['segments']]
return Track(json['name'], segments).compute_metrics()
|
def from_json(json)
|
Creates a Track from a JSON file.
No preprocessing is done.
Arguments:
json: map with the keys: name (optional) and segments.
Return:
A track instance
| 6.171353 | 6.391315 | 0.965584 |
l = math.sqrt(p[0]**2 + p[1]**2)
return [0.0, 0.0] if l == 0 else [p[0]/l, p[1]/l]
|
def normalize(p)
|
Normalizes a point/vector
Args:
p ([float, float]): x and y coordinates
Returns:
float
| 2.296006 | 2.515403 | 0.912779 |
A = (p1[1] - p2[1])
B = (p2[0] - p1[0])
C = (p1[0]*p2[1] - p2[0]*p1[1])
return A, B, -C
|
def line(p1, p2)
|
Creates a line from two points
From http://stackoverflow.com/a/20679579
Args:
p1 ([float, float]): x and y coordinates
p2 ([float, float]): x and y coordinates
Returns:
(float, float, float): x, y and _
| 1.654123 | 1.971616 | 0.838968 |
D = L1[0] * L2[1] - L1[1] * L2[0]
Dx = L1[2] * L2[1] - L1[1] * L2[2]
Dy = L1[0] * L2[2] - L1[2] * L2[0]
if D != 0:
x = Dx / D
y = Dy / D
return x, y
else:
return False
|
def intersection(L1, L2)
|
Intersects two line segments
Args:
L1 ([float, float]): x and y coordinates
L2 ([float, float]): x and y coordinates
Returns:
bool: if they intersect
(float, float): x and y of intersection, if they do
| 1.484655 | 1.605862 | 0.924523 |
return math.sqrt((b.lat-a.lat)**2 + (b.lon-a.lon)**2)
|
def distance_tt_point(a, b)
|
Euclidean distance between two (tracktotrip) points
Args:
a (:obj:`Point`)
b (:obj:`Point`)
Returns:
float
| 2.721843 | 4.163023 | 0.653814 |
ap = [p[0]-a[0], p[1]-a[1]]
ab = [b[0]-a[0], b[1]-a[1]]
mag = float(ab[0]**2 + ab[1]**2)
proj = dot(ap, ab)
if mag ==0 :
dist = 0
else:
dist = proj / mag
if dist < 0:
return [a[0], a[1]]
elif dist > 1:
return [b[0], b[1]]
else:
return [a[0] + ab[0] * dist, a[1] + ab[1] * dist]
|
def closest_point(a, b, p)
|
Finds closest point in a line segment
Args:
a ([float, float]): x and y coordinates. Line start
b ([float, float]): x and y coordinates. Line end
p ([float, float]): x and y coordinates. Point to find in the segment
Returns:
(float, float): x and y coordinates of the closest point
| 1.862005 | 2.044882 | 0.910569 |
return distance(closest_point(a, b, p), p)
|
def distance_to_line(a, b, p)
|
Closest distance between a line segment and a point
Args:
a ([float, float]): x and y coordinates. Line start
b ([float, float]): x and y coordinates. Line end
p ([float, float]): x and y coordinates. Point to compute the distance
Returns:
float
| 5.430776 | 15.138209 | 0.358746 |
d = distance_to_line(a, b, p)
r = (-1/float(T)) * abs(d) + 1
return r if r > 0 else 0
|
def distance_similarity(a, b, p, T=CLOSE_DISTANCE_THRESHOLD)
|
Computes the distance similarity between a line segment
and a point
Args:
a ([float, float]): x and y coordinates. Line start
b ([float, float]): x and y coordinates. Line end
p ([float, float]): x and y coordinates. Point to compute the distance
Returns:
float: between 0 and 1. Where 1 is very similar and 0 is completely different
| 5.430666 | 6.501319 | 0.835318 |
d1 = distance_similarity(p1a, p1b, p2a, T=T)
d2 = distance_similarity(p1a, p1b, p2b, T=T)
return abs(d1 + d2) * 0.5
|
def line_distance_similarity(p1a, p1b, p2a, p2b, T=CLOSE_DISTANCE_THRESHOLD)
|
Line distance similarity between two line segments
Args:
p1a ([float, float]): x and y coordinates. Line A start
p1b ([float, float]): x and y coordinates. Line A end
p2a ([float, float]): x and y coordinates. Line B start
p2b ([float, float]): x and y coordinates. Line B end
Returns:
float: between 0 and 1. Where 1 is very similar and 0 is completely different
| 2.056699 | 2.802591 | 0.733856 |
d = line_distance_similarity(p1a, p1b, p2a, p2b, T=T)
a = abs(angle_similarity(normalize(line(p1a, p1b)), normalize(line(p2a, p2b))))
return d * a
|
def line_similarity(p1a, p1b, p2a, p2b, T=CLOSE_DISTANCE_THRESHOLD)
|
Similarity between two lines
Args:
p1a ([float, float]): x and y coordinates. Line A start
p1b ([float, float]): x and y coordinates. Line A end
p2a ([float, float]): x and y coordinates. Line B start
p2b ([float, float]): x and y coordinates. Line B end
Returns:
float: between 0 and 1. Where 1 is very similar and 0 is completely different
| 3.029545 | 3.878556 | 0.781101 |
pi = points[i]
pi1 = points[i1]
min_lat = min(pi.lat, pi1.lat)
min_lon = min(pi.lon, pi1.lon)
max_lat = max(pi.lat, pi1.lat)
max_lon = max(pi.lon, pi1.lon)
return min_lat-thr, min_lon-thr, max_lat+thr, max_lon+thr
|
def bounding_box_from(points, i, i1, thr)
|
Creates bounding box for a line segment
Args:
points (:obj:`list` of :obj:`Point`)
i (int): Line segment start, index in points array
i1 (int): Line segment end, index in points array
Returns:
(float, float, float, float): with bounding box min x, min y, max x and max y
| 1.719994 | 1.800133 | 0.955482 |
l_a = len(A.points)
l_b = len(B.points)
idx = index.Index()
dex = 0
for i in range(l_a-1):
idx.insert(dex, bounding_box_from(A.points, i, i+1, T), obj=[A.points[i], A.points[i+1]])
dex = dex + 1
prox_acc = []
for i in range(l_b-1):
ti = B.points[i].gen2arr()
ti1 = B.points[i+1].gen2arr()
bb = bounding_box_from(B.points, i, i+1, T)
intersects = idx.intersection(bb, objects=True)
n_prox = []
i_prox = 0
a = 0
for x in intersects:
a = a + 1
pi = x.object[0].gen2arr()
pi1 = x.object[1].gen2arr()
prox = line_similarity(ti, ti1, pi, pi1, T)
i_prox = i_prox + prox
n_prox.append(prox)
if a != 0:
prox_acc.append(i_prox / a)
# prox_acc.append(max(n_prox))
else:
prox_acc.append(0)
return np.mean(prox_acc), prox_acc
|
def segment_similarity(A, B, T=CLOSE_DISTANCE_THRESHOLD)
|
Computes the similarity between two segments
Args:
A (:obj:`Segment`)
B (:obj:`Segment`)
Returns:
float: between 0 and 1. Where 1 is very similar and 0 is completely different
| 2.880541 | 2.975181 | 0.96819 |
mid = []
j = 0
mid.append(Aps[0])
for i in range(len(Aps)-1):
dist = distance_tt_point(Aps[i], Aps[i+1])
for m in range(j, len(Bps)):
distm = distance_tt_point(Aps[i], Bps[m])
if dist > distm:
direction = dot(normalize(line(Aps[i].gen2arr(), Aps[i+1].gen2arr())), normalize(Bps[m].gen2arr()))
if direction > 0:
j = m + 1
mid.append(Bps[m])
break
mid.append(Aps[i+1])
for m in range(j, len(Bps)):
mid.append(Bps[m])
return mid
|
def sort_segment_points(Aps, Bps)
|
Takes two line segments and sorts all their points,
so that they form a continuous path
Args:
Aps: Array of tracktotrip.Point
Bps: Array of tracktotrip.Point
Returns:
Array with points ordered
| 2.741143 | 2.678299 | 1.023464 |
d_lat = to_rad(latitude_1 - latitude_2)
d_lon = to_rad(longitude_1 - longitude_2)
lat1 = to_rad(latitude_1)
lat2 = to_rad(latitude_2)
#pylint: disable=invalid-name
a = math.sin(d_lat/2) * math.sin(d_lat/2) + \
math.sin(d_lon/2) * math.sin(d_lon/2) * math.cos(lat1) * math.cos(lat2)
c = 2 * math.atan2(math.sqrt(a), math.sqrt(1-a))
d = EARTH_RADIUS * c
return d
|
def haversine_distance(latitude_1, longitude_1, latitude_2, longitude_2)
|
Haversine distance between two points, expressed in meters.
Implemented from http://www.movable-type.co.uk/scripts/latlong.html
| 1.45954 | 1.454088 | 1.003749 |
# If points too distant -- compute haversine distance:
if haversine or (abs(latitude_1 - latitude_2) > .2 or abs(longitude_1 - longitude_2) > .2):
return haversine_distance(latitude_1, longitude_1, latitude_2, longitude_2)
coef = math.cos(latitude_1 / 180. * math.pi)
#pylint: disable=invalid-name
x = latitude_1 - latitude_2
y = (longitude_1 - longitude_2) * coef
distance_2d = math.sqrt(x * x + y * y) * ONE_DEGREE
if elevation_1 is None or elevation_2 is None or elevation_1 == elevation_2:
return distance_2d
return math.sqrt(distance_2d ** 2 + (elevation_1 - elevation_2) ** 2)
|
def distance(latitude_1, longitude_1, elevation_1, latitude_2, longitude_2, elevation_2,
haversine=None)
|
Distance between two points
| 2.609752 | 2.593591 | 1.006231 |
return distance(self.lat, self.lon, None, other.lat, other.lon, None)
|
def distance(self, other)
|
Distance between points
Args:
other (:obj:`Point`)
Returns:
float: Distance in km
| 4.223953 | 7.055758 | 0.598653 |
delta_t = self.time_difference(previous)
delta_x = self.distance(previous)
vel = 0
delta_v = 0
acc = 0
if delta_t != 0:
vel = delta_x/delta_t
delta_v = vel - previous.vel
acc = delta_v/delta_t
self.dt = delta_t
self.dx = delta_x
self.acc = acc
self.vel = vel
return self
|
def compute_metrics(self, previous)
|
Computes the metrics of this point
Computes and updates the dt, vel and acc attributes.
Args:
previous (:obj:`Point`): Point before
Returns:
:obj:`Point`: Self
| 2.7577 | 2.507627 | 1.099725 |
return Point(
lat=gpx_track_point.latitude,
lon=gpx_track_point.longitude,
time=gpx_track_point.time
)
|
def from_gpx(gpx_track_point)
|
Creates a point from GPX representation
Arguments:
gpx_track_point (:obj:`gpxpy.GPXTrackPoint`)
Returns:
:obj:`Point`
| 2.124859 | 2.544812 | 0.834977 |
return {
'lat': self.lat,
'lon': self.lon,
'time': self.time.isoformat() if self.time is not None else None
}
|
def to_json(self)
|
Creates a JSON serializable representation of this instance
Returns:
:obj:`dict`: For example,
{
"lat": 9.3470298,
"lon": 3.79274,
"time": "2016-07-15T15:27:53.574110"
}
| 2.779659 | 2.322867 | 1.19665 |
return Point(
lat=json['lat'],
lon=json['lon'],
time=isostr_to_datetime(json['time'])
)
|
def from_json(json)
|
Creates Point instance from JSON representation
Args:
json (:obj:`dict`): Must have at least the following keys: lat (float), lon (float),
time (string in iso format). Example,
{
"lat": 9.3470298,
"lon": 3.79274,
"time": "2016-07-15T15:27:53.574110"
}
json: map representation of Point instance
Returns:
:obj:`Point`
| 4.458349 | 4.413314 | 1.010204 |
lats = [p[1] for p in points]
lons = [p[0] for p in points]
return Point(np.mean(lats), np.mean(lons), None)
|
def compute_centroid(points)
|
Computes the centroid of set of points
Args:
points (:obj:`list` of :obj:`Point`)
Returns:
:obj:`Point`
| 2.467031 | 2.976096 | 0.828948 |
cluster.append(point)
points = [p.gen2arr() for p in cluster]
# Estimates the epsilon
eps = estimate_meters_to_deg(max_distance, precision=6)
p_cluster = DBSCAN(eps=eps, min_samples=min_samples)
p_cluster.fit(points)
clusters = {}
for i, label in enumerate(p_cluster.labels_):
if label in clusters.keys():
clusters[label].append(points[i])
else:
clusters[label] = [points[i]]
centroids = []
biggest_centroid_l = -float("inf")
biggest_centroid = None
for label, n_cluster in clusters.items():
centroid = compute_centroid(n_cluster)
centroids.append(centroid)
if label >= 0 and len(n_cluster) >= biggest_centroid_l:
biggest_centroid_l = len(n_cluster)
biggest_centroid = centroid
if biggest_centroid is None:
biggest_centroid = compute_centroid(points)
return biggest_centroid, cluster
|
def update_location_centroid(point, cluster, max_distance, min_samples)
|
Updates the centroid of a location cluster with another point
Args:
point (:obj:`Point`): Point to add to the cluster
cluster (:obj:`list` of :obj:`Point`): Location cluster
max_distance (float): Max neighbour distance
min_samples (int): Minimum number of samples
Returns:
(:obj:`Point`, :obj:`list` of :obj:`Point`): Tuple with the location centroid
and new point cluster (given cluster + given point)
| 3.045444 | 3.113983 | 0.97799 |
if not client_id:
return []
if not client_secret:
return []
if from_cache(FS_CACHE, point, max_distance):
return from_cache(FS_CACHE, point, max_distance)
url = FOURSQUARE_URL % (client_id, client_secret, point.lat, point.lon, max_distance)
req = requests.get(url)
if req.status_code != 200:
return []
response = req.json()
result = []
venues = response['response']['venues']
for venue in venues:
name = venue['name']
distance = venue['location']['distance']
categories = [c['shortName'] for c in venue['categories']]
result.append({
'label': name,
'distance': distance,
'types': categories,
'suggestion_type': 'FOURSQUARE'
})
# final_results = sorted(result, key=lambda elm: elm['distance'])
foursquare_insert_cache(point, result)
return result
|
def query_foursquare(point, max_distance, client_id, client_secret)
|
Queries Squarespace API for a location
Args:
point (:obj:`Point`): Point location to query
max_distance (float): Search radius, in meters
client_id (str): Valid Foursquare client id
client_secret (str): Valid Foursquare client secret
Returns:
:obj:`list` of :obj:`dict`: List of locations with the following format:
{
'label': 'Coffee house',
'distance': 19,
'types': 'Commerce',
'suggestion_type': 'FOURSQUARE'
}
| 2.584619 | 2.319753 | 1.114178 |
if not key:
return []
if from_cache(GG_CACHE, point, max_distance):
return from_cache(GG_CACHE, point, max_distance)
req = requests.get(GOOGLE_PLACES_URL % (
point.lat,
point.lon,
max_distance,
key
))
if req.status_code != 200:
return []
response = req.json()
results = response['results']
# l = len(results)
final_results = []
for local in results:
final_results.append({
'label': local['name'],
'distance': Point(local['geometry']['location']['lat'], local['geometry']['location']['lng'], None).distance(point),
# 'rank': (l-i)/float(l),
'types': local['types'],
'suggestion_type': 'GOOGLE'
})
google_insert_cache(point, final_results)
return final_results
|
def query_google(point, max_distance, key)
|
Queries google maps API for a location
Args:
point (:obj:`Point`): Point location to query
max_distance (float): Search radius, in meters
key (str): Valid google maps api key
Returns:
:obj:`list` of :obj:`dict`: List of locations with the following format:
{
'label': 'Coffee house',
'types': 'Commerce',
'suggestion_type': 'GOOGLE'
}
| 3.280071 | 2.960341 | 1.108005 |
locations = []
if location_query is not None:
queried_locations = location_query(point, max_distance)
for (label, centroid, _) in queried_locations:
locations.append({
'label': unicode(label, 'utf-8'),
'distance': centroid.distance(point),
# 'centroid': centroid,
'suggestion_type': 'KB'
})
api_locations = []
if len(locations) <= limit:
if google_key:
google_locs = query_google(point, max_distance, google_key)
api_locations.extend(google_locs)
if foursquare_client_id and foursquare_client_secret:
foursquare_locs = query_foursquare(
point,
max_distance,
foursquare_client_id,
foursquare_client_secret
)
api_locations.extend(foursquare_locs)
if len(locations) > 0 or len(api_locations) > 0:
locations = sorted(locations, key=lambda d: d['distance'])
api_locations = sorted(api_locations, key=lambda d: d['distance'])
locations = (locations + api_locations)[:limit]
return Location(locations[0]['label'], point, locations)
else:
return Location('#?', point, [])
|
def infer_location(
point,
location_query,
max_distance,
google_key,
foursquare_client_id,
foursquare_client_secret,
limit
)
|
Infers the semantic location of a (point) place.
Args:
points (:obj:`Point`): Point location to infer
location_query: Function with signature, (:obj:`Point`, int) -> (str, :obj:`Point`, ...)
max_distance (float): Max distance to a position, in meters
google_key (str): Valid google maps api key
foursquare_client_id (str): Valid Foursquare client id
foursquare_client_secret (str): Valid Foursquare client secret
limit (int): Results limit
Returns:
:obj:`Location`: with top match, and alternatives
| 2.476165 | 2.434209 | 1.017236 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.