code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
handler = WSGIHandler() try: from django.contrib.staticfiles.handlers import StaticFilesHandler except ImportError: return handler use_static_handler = options.get('use_static_handler', True) insecure_serving = options.get('insecure_serving', False) if (settings.DEBUG and use_static_handler or (use_static_handler and insecure_serving)): handler = StaticFilesHandler(handler) return handler
def get_handler(self, *args, **options)
Returns the django.contrib.staticfiles handler.
2.648917
2.407609
1.100227
for handler, pattern in self.handlers: no_channel = not pattern and not socket.channels if self.name.endswith("subscribe") and pattern: matches = [pattern.match(args[0])] else: matches = [pattern.match(c) for c in socket.channels if pattern] if no_channel or filter(None, matches): handler(request, socket, context, *args)
def send(self, request, socket, context, *args)
When an event is sent, run all relevant handlers. Relevant handlers are those without a channel pattern when the given socket is not subscribed to any particular channel, or the handlers with a channel pattern that matches any of the channels that the given socket is subscribed to. In the case of subscribe/unsubscribe, match the channel arg being sent to the channel pattern.
4.847256
4.148442
1.168452
context = {"rooms": ChatRoom.objects.all()} return render(request, template, context)
def rooms(request, template="rooms.html")
Homepage - lists all rooms.
3.14245
2.81094
1.117936
context = {"room": get_object_or_404(ChatRoom, slug=slug)} return render(request, template, context)
def room(request, slug, template="room.html")
Show a room.
2.43496
2.222161
1.095762
name = request.POST.get("name") if name: room, created = ChatRoom.objects.get_or_create(name=name) return redirect(room) return redirect(rooms)
def create(request)
Handles post from the "Add room" form on the homepage, and redirects to the new room.
3.269981
3.127995
1.045392
if isinstance(color, tuple) and len(color) == 3: # already a tuple of RGB values return color try: import matplotlib.colors as mplcolors except: raise ImportError("Error importing matplotlib. If running from within a jupyter notebook, try calling '%matplotlib inline' beforehand.") try: hexcolor = mplcolors.cnames[color] except KeyError: raise AttributeError("Color not recognized in matplotlib.") hexcolor = hexcolor.lstrip('#') lv = len(hexcolor) return tuple(int(hexcolor[i:i + lv // 3], 16)/255. for i in range(0, lv, lv // 3))
def get_color(color)
Takes a string for a color name defined in matplotlib and returns of a 3-tuple of RGB values. Will simply return passed value if it's a tuple of length three. Parameters ---------- color : str Name of matplotlib color to calculate RGB values for.
3.211849
3.15154
1.019136
try: from matplotlib.collections import LineCollection from matplotlib.colors import LinearSegmentedColormap import numpy as np except: raise ImportError("Error importing matplotlib and/or numpy. Plotting functions not available. If running from within a jupyter notebook, try calling '%matplotlib inline' beforehand.") if glow: glow = False kwargs["lw"] = 1 fl1 = fading_line(x, y, color, alpha_initial, alpha_final, glow=False, **kwargs) kwargs["lw"] = 2 alpha_initial *= 0.5 alpha_final *= 0.5 fl2 = fading_line(x, y, color, alpha_initial, alpha_final, glow=False, **kwargs) kwargs["lw"] = 6 alpha_initial *= 0.5 alpha_final *= 0.5 fl3 = fading_line(x, y, color, alpha_initial, alpha_final, glow=False, **kwargs) return [fl3,fl2,fl1] color = get_color(color) cdict = {'red': ((0.,color[0],color[0]),(1.,color[0],color[0])), 'green': ((0.,color[1],color[1]),(1.,color[1],color[1])), 'blue': ((0.,color[2],color[2]),(1.,color[2],color[2])), 'alpha': ((0.,alpha_initial, alpha_initial), (1., alpha_final, alpha_final))} Npts = len(x) if len(y) != Npts: raise AttributeError("x and y must have same dimension.") segments = np.zeros((Npts-1,2,2)) segments[0][0] = [x[0], y[0]] for i in range(1,Npts-1): pt = [x[i], y[i]] segments[i-1][1] = pt segments[i][0] = pt segments[-1][1] = [x[-1], y[-1]] individual_cm = LinearSegmentedColormap('indv1', cdict) lc = LineCollection(segments, cmap=individual_cm, **kwargs) lc.set_array(np.linspace(0.,1.,len(segments))) return lc
def fading_line(x, y, color='black', alpha_initial=1., alpha_final=0., glow=False, **kwargs)
Returns a matplotlib LineCollection connecting the points in the x and y lists, with a single color and alpha varying from alpha_initial to alpha_final along the line. Can pass any kwargs you can pass to LineCollection, like linewidgth. Parameters ---------- x : list or array of floats for the positions on the (plot's) x axis y : list or array of floats for the positions on the (plot's) y axis color : matplotlib color for the line. Can also pass a 3-tuple of RGB values (default: 'black') alpha_initial: Limiting value of alpha to use at the beginning of the arrays. alpha_final: Limiting value of alpha to use at the end of the arrays.
2.127546
2.159368
0.985263
if t>self.tmax or t<self.tmin: raise ValueError("Requested time outside of baseline stored in binary file.") # Bisection method l = 0 r = len(self) while True: bi = l+(r-l)//2 if self.t[bi]>t: r = bi else: l = bi if r-1<=l: bi = l break return bi, self.t[bi]
def _getSnapshotIndex(self, t)
Return the index for the snapshot just before t
4.552226
4.4973
1.012213
if mode not in ['snapshot', 'close', 'exact']: raise AttributeError("Unknown mode.") bi, bt = self._getSnapshotIndex(t) sim = Simulation() w = c_int(0) clibrebound.reb_create_simulation_from_simulationarchive_with_messages(byref(sim),byref(self),bi,byref(w)) # Restore function pointers and any additional setup required by the user/reboundx provided functions if self.setup: self.setup(sim, *self.setup_args) if self.rebxfilename: import reboundx rebx = reboundx.Extras.from_file(sim, self.rebxfilename) if mode=='snapshot': if sim.integrator=="whfast" and sim.ri_whfast.safe_mode == 1: keep_unsynchronized = 0 sim.ri_whfast.keep_unsynchronized = keep_unsynchronized sim.integrator_synchronize() return sim else: if mode=='exact': keep_unsynchronized==0 if sim.integrator=="whfast" and sim.ri_whfast.safe_mode == 1: keep_unsynchronized = 0 sim.ri_whfast.keep_unsynchronized = keep_unsynchronized exact_finish_time = 1 if mode=='exact' else 0 sim.integrate(t,exact_finish_time=exact_finish_time) return sim
def getSimulation(self, t, mode='snapshot', keep_unsynchronized=1)
This function returns a simulation object at (or close to) the requested time `t`. Everytime this function is called a new simulation object is created. Arguments --------- t : float Requested time. Needs to be within tmin and tmax of this Simulation Archive. mode : str This argument determines how close the simulation should be to the requested time. There are three options. - 'snapshot' This loads a nearby snapshot such that sim.t<t. This is the default. - 'close' This integrates the simulation to get to the time t but may overshoot by at most one timestep sim.dt. - 'exact' This integrates the simulation to exactly time t. This is not compatible with keep_unsynchronized=1. keep_unsynchronized : int By default this argument is 1. This means that if the simulation had to be synchronized to generate this output, then it will nevertheless use the unsynchronized values if one integrates the simulation further in time. This is important for exact (bit-by-bit) reproducibility. If the value of this argument is 0, then one can modify the particles coordinates and these changes are taken into account when integrating the simulation further in time. Returns ------- A rebound.Simulation object. Everytime the function gets called a new object gets created. Examples -------- Here is a simple example on how to load a simulation from a Simulation Archive file with the `getSimulation` method. As the `mode` argument is set to `close`, the simulation will be integrated from the nearest snapshot to the request time. >>> sa = rebound.SimulationArchive("archive.bin") >>> sim = sa.getSimulation(t=1e6, mode="close") >>> print(sim.particles[1]) >>> for sim in sa: >>> print(sim.t, sim.particles[1].e)
5.395493
5.544243
0.97317
for t in times: yield self.getSimulation(t, **kwargs)
def getSimulations(self, times, **kwargs)
A generator to quickly access many simulations. The arguments are the same as for `getSimulation`.
4.173199
3.288859
1.268889
import numpy as np Npoints = len(self)*3-2 if len(self)<=1: raise Runtim Nparticles = self[0].N verts = np.zeros((Npoints,Nparticles,2)) xy = np.zeros((len(self),Nparticles,2)) if origin=="com": origin = -2 elif origin is not None: try: origin = int(origin) except: raise AttributeError("Cannot parse origin") if origin<0 or origin>=Nparticles: raise AttributeError("Origin index out of range") for i, sim in enumerate(self): if origin is None: shift = (0,0,0,0) elif origin == -2: sp = sim.calculate_com() shift = (sp.x,sp.y,sp.vx,sp.vy) else: sp = sim.particles[origin] shift = (sp.x,sp.y,sp.vx,sp.vy) for j in range(sim.N): p = sim.particles[j] if i==0: verts[0,j] = p.x-shift[0],p.y-shift[1] verts[1,j] = p.vx-shift[2], p.vy-shift[3] else: dt = sim.t-tlast # time since last snapshot verts[-2+i*3,j] = verts[-2+i*3,j]*dt/3.+verts[-3+i*3,j] verts[ 0+i*3,j] = p.x-shift[0],p.y-shift[1] verts[-1+i*3,j] = -p.vx+shift[2], -p.vy+shift[3] verts[-1+i*3,j] = verts[-1+i*3+0,j]*dt/3.+verts[ 0+i*3,j] if i!=len(self)-1: verts[+1+i*3,j] = p.vx-shift[2], p.vy-shift[3] xy[i,j] = p.x,p.y tlast = sim.t codes = np.full(Npoints,4,dtype=np.uint8) # Hardcoded 4 = matplotlib.path.Path.CURVE4 codes[0] = 1 # Hardcoded 1 = matplotlib.path.Path.MOVETO return verts, codes
def getBezierPaths(self,origin=None)
This function returns array that can be used as a Cubic Bezier Path in matplotlib. The function returns two arrays, the first one contains the verticies for each particles and has the shape (Nvert, Nparticles, 2) where Nvert is the number of verticies. The second array returned describes the type of verticies to be used with matplotlib's Patch class. Arguments --------- origin : multiple, optional If `origin` is None (default), then none of the coordinates are shifted. If `origin` is an integer then the particle with that index is used as the origin. if `origin` is equal to `com`, then the centre of mass is used as the origin. Examples -------- The following example reads in a SimulationArchive and plots the trajectories as Cubic Bezier Curves. It also plots the actual datapoints stored in the SimulationArchive. Note that the SimulationArchive needs to have enough datapoints to allow for smooth and reasonable orbits. >>> from matplotlib.path import Path >>> import matplotlib.patches as patches >>> sa = rebound.SimulationArchive("test.bin") >>> verts, codes = sa.getBezierPaths(origin=0) >>> fig, ax = plt.subplots() >>> for j in range(sa[0].N): >>> path = Path(verts[:,j,:], codes) >>> patch = patches.PathPatch(path, facecolor='none') >>> ax.add_patch(patch) >>> ax.scatter(verts[::3,j,0],verts[::3,j,1]) >>> ax.set_aspect('equal') >>> ax.autoscale_view()
2.932801
2.791596
1.050582
np = Particle() memmove(byref(np), byref(self), sizeof(self)) return np
def copy(self)
Returns a deep copy of the particle. The particle is not added to any simulation by default.
9.116388
5.591738
1.630332
if not self._sim: # Particle not in a simulation if primary is None: raise ValueError("Particle does not belong to any simulation and no primary given. Cannot calculate orbit.") if G is None: raise ValueError("Particle does not belong to any simulation and G not given. Cannot calculate orbit.") else: G = c_double(G) else: # First check whether this is particles[0] clibrebound.reb_get_particle_index.restype = c_int index = clibrebound.reb_get_particle_index(byref(self)) # first check this isn't particles[0] if index == 0 and primary is None: raise ValueError("Orbital elements for particle[0] not implemented unless primary is provided") if primary is None: # Use default, i.e., Jacobi coordinates clibrebound.reb_get_jacobi_com.restype = Particle # now return jacobi center of mass primary = clibrebound.reb_get_jacobi_com(byref(self)) G = c_double(self._sim.contents.G) err = c_int() clibrebound.reb_tools_particle_to_orbit_err.restype = rebound.Orbit o = clibrebound.reb_tools_particle_to_orbit_err(G, self, primary, byref(err)) if err.value == 1: raise ValueError("Primary has no mass.") if err.value == 2: raise ValueError("Particle and primary positions are the same.") return o
def calculate_orbit(self, primary=None, G=None)
Returns a rebound.Orbit object with the keplerian orbital elements corresponding to the particle around the passed primary (rebound.Particle) If no primary is passed, defaults to Jacobi coordinates (with mu = G*Minc, where Minc is the total mass from index 0 to the particle's index, inclusive). Examples -------- >>> sim = rebound.Simulation() >>> sim.add(m=1.) >>> sim.add(x=1.,vy=1.) >>> orbit = sim.particles[1].calculate_orbit(sim.particles[0]) >>> print(orbit.e) # gives the eccentricity Parameters ---------- primary : rebound.Particle Central body (Optional. Default uses Jacobi coordinates) G : float Gravitational constant (Optional. Default takes G from simulation in which particle is in) Returns ------- A rebound.Orbit object
4.81449
4.313882
1.116046
pts = [] if primary is None: primary = self.jacobi_com o = self.calculate_orbit(primary=primary) if timespan is None: if o.a < 0.: # hyperbolic orbit timespan = 2*math.pi*o.d/o.v # rough time to cross display box else: timespan = o.P lim_phase = abs(o.n)*timespan # n is negative for hyperbolic orbits if trailing is True: lim_phase *= -1 # sample phase backwards from current value phase = [lim_phase*i/(Npts-1) for i in range(Npts)] for i,ph in enumerate(phase): if useTrueAnomaly is True: newp = Particle(a=o.a, f=o.f+ph, inc=o.inc, omega=o.omega, Omega=o.Omega, e=o.e, m=self.m, primary=primary, simulation=self._sim.contents) else: newp = Particle(a=o.a, M=o.M+ph, inc=o.inc, omega=o.omega, Omega=o.Omega, e=o.e, m=self.m, primary=primary, simulation=self._sim.contents) pts.append(newp.xyz) return pts
def sample_orbit(self, Npts=100, primary=None, trailing=True, timespan=None, useTrueAnomaly=True)
Returns a nested list of xyz positions along the osculating orbit of the particle. If primary is not passed, returns xyz positions along the Jacobi osculating orbit (with mu = G*Minc, where Minc is the total mass from index 0 to the particle's index, inclusive). Parameters ---------- Npts : int, optional Number of points along the orbit to return (default: 100) primary : rebound.Particle, optional Primary to use for the osculating orbit (default: Jacobi center of mass) trailing: bool, optional Whether to return points stepping backwards in time (True) or forwards (False). (default: True) timespan: float, optional Return points (for the osculating orbit) from the current position to timespan (forwards or backwards in time depending on trailing keyword). Defaults to the orbital period for bound orbits, and to the rough time it takes the orbit to move by the current distance from the primary for a hyperbolic orbit. Implementation currently only supports this option if useTrueAnomaly=False. useTrueAnomaly: bool, optional Will sample equally spaced points in true anomaly if True, otherwise in mean anomaly. Latter might be better for hyperbolic orbits, where true anomaly can stay near the limiting value for a long time, and then switch abruptly at pericenter. (Default: True)
4.303813
3.911779
1.100219
if simp==None: simp = self.simp if self.autorefresh==0 and isauto==1: return sim = simp.contents size_changed = clibrebound.reb_display_copy_data(simp) clibrebound.reb_display_prepare_data(simp,c_int(self.orbits)) if sim.N>0: self.particle_data = (c_char * (4*7*sim.N)).from_address(sim.display_data.contents.particle_data).raw if self.orbits: self.orbit_data = (c_char * (4*9*(sim.N-1))).from_address(sim.display_data.contents.orbit_data).raw if size_changed: #TODO: Implement better GPU size change pass if self.useroverlay==True: self.overlay = "REBOUND (%s), N=%d, t=%g"%(sim.integrator,sim.N,sim.t) elif self.useroverlay==None or self.useroverlay==False: self.overlay = "" else: self.overlay = self.useroverlay + ", N=%d, t=%g"%(sim.N,sim.t) self.N = sim.N self.t = sim.t self.count += 1
def refresh(self, simp=None, isauto=0)
Manually refreshes a widget. Note that this function can also be called using the wrapper function of the Simulation object: sim.refreshWidgets().
4.456234
4.43067
1.00577
self.archive = archive if resetCounter: self.screenshotcountall = 0 self.screenshotprefix = prefix self.screenshotcount = 0 self.overlay = "REBOUND" self.screenshot = "" if archive is None: if times is None: times = self.simp.contents.t try: # List len(times) except: # Float: times = [times] self.times = times self.observe(savescreenshot,names="screenshot") self.simp.contents.integrate(times[0]) self.screenshotcount += 1 # triggers first screenshot else: if times is None: raise ValueError("Need times argument for archive mode.") try: len(times) except: raise ValueError("Need a list of times for archive mode.") self.times = times self.mode = mode self.observe(savescreenshot,names="screenshot") sim = archive.getSimulation(times[0],mode=mode) self.refresh(pointer(sim)) self.screenshotcount += 1
def takeScreenshot(self, times=None, prefix="./screenshot", resetCounter=False, archive=None,mode="snapshot")
Take one or more screenshots of the widget and save the images to a file. The images can be used to create a video. This function cannot be called multiple times within one cell. Note: this is a new feature and might not work on all systems. It was tested on python 2.7.10 and 3.5.2 on MacOSX. Parameters ---------- times : (float, list), optional If this argument is not given a screenshot of the widget will be made as it is (without integrating the simulation). If a float is given, then the simulation will be integrated to that time and then a screenshot will be taken. If a list of floats is given, the simulation will be integrated to each time specified in the array. A separate screenshot for each time will be saved. prefix : (str), optional This string will be part of the output filename for each image. Follow by a five digit integer and the suffix .png. By default the prefix is './screenshot' which outputs images in the current directory with the filnames screenshot00000.png, screenshot00001.png... Note that the prefix can include a directory. resetCounter : (bool), optional Resets the output counter to 0. archive : (rebound.SimulationArchive), optional Use a REBOUND SimulationArchive. Thus, instead of integratating the Simulation from the current time, it will use the SimulationArchive to load a snapshot. See examples for usage. mode : (string), optional Mode to use when querying the SimulationArchive. See SimulationArchive documentation for details. By default the value is "snapshot". Examples -------- First, create a simulation and widget. All of the following can go in one cell. >>> sim = rebound.Simulation() >>> sim.add(m=1.) >>> sim.add(m=1.e-3,x=1.,vy=1.) >>> w = sim.getWidget() >>> w The widget should show up. To take a screenshot, simply call >>> w.takeScreenshot() A new file with the name screenshot00000.png will appear in the current directory. Note that the takeScreenshot command needs to be in a separate cell, i.e. after you see the widget. You can pass an array of times to the function. This allows you to take multiple screenshots, for example to create a movie, >>> times = [0,10,100] >>> w.takeScreenshot(times)
5.470926
5.624726
0.972657
i = self._coordinates for name, _i in COORDINATES.items(): if i==_i: return name return i
def coordinates(self)
Get or set the internal coordinate system. Available coordinate systems are: - ``'jacobi'`` (default) - ``'democraticheliocentric'`` - ``'whds'``
9.348881
10.450334
0.894601
warnings.warn( "rebound.Simulation.from_archive(filename,snapshot) is deprecated and will be removed in the future. Use rebound.Simulation(filename,snapshot) instead", FutureWarning) return cls(filename=filename,snapshot=snapshot)
def from_archive(cls, filename,snapshot=-1)
rebound.Simulation.from_archive(filename,snapshot) is deprecated and will be removed in the futute. Use rebound.Simulation(filename,snapshot) instead
3.988977
2.234042
1.785543
w = c_int(0) sim = Simulation() clibrebound._reb_copy_simulation_with_messages(byref(sim),byref(self),byref(w)) for majorerror, value, message in BINARY_WARNINGS: if w.value & value: if majorerror: raise RuntimeError(message) else: # Just a warning warnings.warn(message, RuntimeWarning) return sim
def copy(self)
Returns a deep copy of a REBOUND simulation. You need to reset any function pointers on the copy. Returns ------- A rebound.Simulation object.
9.344072
8.416058
1.110267
from .widget import Widget # ondemand from ipywidgets import DOMWidget from IPython.display import display, HTML if not hasattr(self, '_widgets'): self._widgets = [] def display_heartbeat(simp): for w in self._widgets: w.refresh(simp,isauto=1) self.visualization = VISUALIZATIONS["webgl"] clibrebound.reb_display_init_data(byref(self)); self._dhbf = AFF(display_heartbeat) self._display_heartbeat = self._dhbf display(HTML(Widget.getClientCode())) # HACK! Javascript should go into custom.js newWidget = Widget(self,**kwargs) self._widgets.append(newWidget) newWidget.refresh(isauto=0) return newWidget
def getWidget(self,**kwargs)
Wrapper function that returns a new widget attached to this simulation. Widgets provide real-time 3D visualizations from within an Jupyter notebook. See the Widget class for more details on the possible arguments. Arguments --------- All arguments passed to this wrapper function will be passed to /Widget class. Returns ------- A rebound.Widget object. Examples -------- >>> sim = rebound.Simulation() >>> sim.add(m=1.) >>> sim.add(m=1.e-3,x=1.,vy=1.) >>> sim.getWidget()
11.118963
11.472612
0.969174
if hasattr(self, '_widgets'): for w in self._widgets: w.refresh(isauto=0) else: raise RuntimeError("No widgets found")
def refreshWidgets(self)
This function manually refreshed all widgets attached to this simulation. You want to call this function if any particle data has been manually changed.
6.784305
6.732412
1.007708
modes = sum(1 for i in [interval, walltime,step] if i != None) if modes != 1: raise AttributeError("Need to specify either interval, walltime, or step") if deletefile and os.path.isfile(filename): os.remove(filename) if interval: clibrebound.reb_simulationarchive_automate_interval(byref(self), c_char_p(filename.encode("ascii")), c_double(interval)) if walltime: clibrebound.reb_simulationarchive_automate_walltime(byref(self), c_char_p(filename.encode("ascii")), c_double(walltime)) if step: clibrebound.reb_simulationarchive_automate_step(byref(self), c_char_p(filename.encode("ascii")), c_ulonglong(step)) self.process_messages()
def automateSimulationArchive(self, filename, interval=None, walltime=None, step=None, deletefile=False)
This function automates taking snapshots during a simulationusing the Simulation Archive. Instead of using this function, one can also call simulationarchive_snapshot() manually to create snapshots. Arguments --------- filename : str Filename of the binary file. interval : float Interval between outputs in code units. walltime : float Interval between outputs in wall time (seconds). Useful when using IAS15 with adaptive timesteps. step : int Interval between outputs in number of timesteps. Useful when outputs need to be spaced exactly. Examples -------- The following example creates a simulation, then initializes the Simulation Archive and integrates it forward in time. >>> sim = rebound.Simulation() >>> sim.add(m=1.) >>> sim.add(m=1.e-3,x=1.,vy=1.) >>> sim.automateSimulationArchive("sa.bin",interval=1000.) >>> sim.integrate(1e8) The SimulationArchive can later be read in using the following syntax: >>> sa = rebound.SimulationArchive("sa.bin") >>> sim = sa[0] # get the first snapshot in the SA file (initial conditions) >>> sim = sa[-1] # get the last snapshot in the SA file
2.426007
2.971781
0.816348
clibrebound.reb_simulationarchive_snapshot(byref(self), c_char_p(filename.encode("ascii")))
def simulationarchive_snapshot(self, filename)
Take a snapshot and save it to a SimulationArchive file. If the file does not exist yet, a new one will be created. If the file does exist, a snapshot will be appended. Arguments --------- filename : str Filename of the binary file.
7.288963
10.762334
0.677266
from rebound import __version__, __build__ s= "" s += "---------------------------------\n" s += "REBOUND version: \t%s\n" %__version__ s += "REBOUND built on: \t%s\n" %__build__ s += "Number of particles: \t%d\n" %self.N s += "Selected integrator: \t" + self.integrator + "\n" s += "Simulation time: \t%.16e\n" %self.t s += "Current timestep: \t%f\n" %self.dt if self.N>0: s += "---------------------------------\n" for p in self.particles: s += str(p) + "\n" s += "---------------------------------" print(s)
def status(self)
Prints a summary of the current status of the simulation.
3.139601
2.988282
1.050637
i = self._integrator for name, _i in INTEGRATORS.items(): if i==_i: return name return i
def integrator(self)
Get or set the intergrator module. Available integrators are: - ``'ias15'`` (default) - ``'whfast'`` - ``'sei'`` - ``'leapfrog'`` - ``'janus'`` - ``'mercurius'`` - ``'bs'`` - ``'none'`` Check the online documentation for a full description of each of the integrators.
7.266043
7.537865
0.963939
i = self._boundary for name, _i in BOUNDARIES.items(): if i==_i: return name return i
def boundary(self)
Get or set the boundary module. Available boundary modules are: - ``'none'`` (default) - ``'open'`` - ``'periodic'`` - ``'shear'`` Check the online documentation for a full description of each of the modules.
9.508259
11.815009
0.804761
i = self._gravity for name, _i in GRAVITIES.items(): if i==_i: return name return i
def gravity(self)
Get or set the gravity module. Available gravity modules are: - ``'none'`` - ``'basic'`` (default) - ``'compensated'`` - ``'tree'`` Check the online documentation for a full description of each of the modules.
9.675379
10.675131
0.906348
i = self._collision for name, _i in COLLISIONS.items(): if i==_i: return name return i
def collision(self)
Get or set the collision module. Available collision modules are: - ``'none'`` (default) - ``'direct'`` - ``'tree'`` - ``'mercurius'`` - ``'direct'`` Check the online documentation for a full description of each of the modules.
8.77113
10.134928
0.865436
return {'length':hash_to_unit(self.python_unit_l), 'mass':hash_to_unit(self.python_unit_m), 'time':hash_to_unit(self.python_unit_t)}
def units(self)
Tuple of the units for length, time and mass. Can be set in any order, and strings are not case-sensitive. See ipython_examples/Units.ipynb for more information. You can check the units' exact values and add Additional units in rebound/rebound/units.py. Units should be set before adding particles to the simulation (will give error otherwise). Currently supported Units ------------------------- Times: Hr : Hours Yr : Julian years Jyr : Julian years Sidereal_yr : Sidereal year Yr2pi : Year divided by 2pi, with year defined as orbital period of planet at 1AU around 1Msun star Kyr : Kiloyears (Julian) Myr : Megayears (Julian) Gyr : Gigayears (Julian) Lengths: M : Meters Cm : Centimeters Km : Kilometers AU : Astronomical Units Masses: Kg : Kilograms Msun : Solar masses Mmercury : Mercury masses Mvenus : Venus masses Mearth : Earth masses Mmars : Mars masses Mjupiter : Jupiter masses Msaturn : Saturn masses Muranus : Neptune masses Mpluto : Pluto masses Examples -------- >>> sim = rebound.Simulation() >>> sim.units = ('yr', 'AU', 'Msun')
5.048948
4.85325
1.040323
raise AttributeError("Must set sim.units before calling convert_particle_units in order to know what units to convert from.") new_l, new_t, new_m = check_units(args) for p in self.particles: units_convert_particle(p, hash_to_unit(self.python_unit_l), hash_to_unit(self.python_unit_t), hash_to_unit(self.python_unit_m), new_l, new_t, new_m) self.update_units((new_l, new_t, new_m))
def convert_particle_units(self, *args): if self.python_unit_l == 0 or self.python_unit_m == 0 or self.python_unit_t == 0
Will convert the units for the simulation (i.e. convert G) as well as the particles' cartesian elements. Must have set sim.units ahead of calling this function so REBOUND knows what units to convert from. Parameters ---------- 3 strings corresponding to units of time, length and mass. Can be in any order and aren't case sensitive. You can add new units to rebound/rebound/units.py
3.815821
3.421046
1.115396
cur_var_config_N = self.var_config_N if order==1: index = clibrebound.reb_add_var_1st_order(byref(self),c_int(testparticle)) elif order==2: if first_order is None: raise AttributeError("Please specify corresponding first order variational equations when initializing second order variational equations.") if first_order_2 is None: first_order_2 = first_order index = clibrebound.reb_add_var_2nd_order(byref(self),c_int(testparticle),c_int(first_order.index),c_int(first_order_2.index)) else: raise AttributeError("Only variational equations of first and second order are supported.") # Need a copy because location of original might shift if more variations added s = Variation.from_buffer_copy(self.var_config[cur_var_config_N]) return s
def add_variation(self,order=1,first_order=None, first_order_2=None, testparticle=-1)
This function adds a set of variational particles to the simulation. If there are N real particles in the simulation, this functions adds N additional variational particles. To see how many particles (real and variational) are in a simulation, use ``'sim.N'``. To see how many variational particles are in a simulation use ``'sim.N_var'``. Currently Leapfrog, WHFast and IAS15 support first order variational equations. IAS15 also supports second order variational equations. Parameters ---------- order : integer, optional By default the function adds a set of first order variational particles to the simulation. Set this flag to 2 for second order. first_order : Variation, optional Second order variational equations depend on their corresponding first order variational equations. This parameter expects the Variation object corresponding to the first order variational equations. first_order_2 : Variation, optional Same as first_order. But allows to set two different indicies to calculate off-diagonal elements. If omitted, then first_order will be used for both first order equations. testparticle : int, optional If set to a value >= 0, then only one variational particle will be added and be treated as a test particle. Returns ------- Returns Variation object (a copy--you can only modify it through its particles property or vary method).
4.868663
4.417874
1.102038
if seed is None: clibrebound.reb_tools_megno_init(byref(self)) else: clibrebound.reb_tools_megno_init_seed(byref(self), c_uint(seed))
def init_megno(self, seed=None)
This function initialises the chaos indicator MEGNO particles and enables their integration. MEGNO is short for Mean Exponential Growth of Nearby orbits. It can be used to test if a system is chaotic or not. In the backend, the integrator is integrating an additional set of particles using the variational equation. Note that variational equations are better suited for this than shadow particles. MEGNO is currently only supported in the IAS15 and WHFast integrators. This function also needs to be called if you are interested in the Lyapunov exponent as it is calculate with the help of MEGNO. See Rein and Tamayo 2015 for details on the implementation. For more information on MENGO see e.g. http://dx.doi.org/10.1051/0004-6361:20011189
4.788415
4.614296
1.037735
if self._calculate_megno==0: raise RuntimeError("MEGNO cannot be calculated. Make sure to call init_megno() after adding all particles but before integrating the simulation.") clibrebound.reb_tools_calculate_megno.restype = c_double return clibrebound.reb_tools_calculate_megno(byref(self))
def calculate_megno(self)
Return the current MEGNO value. Note that you need to call init_megno() before the start of the simulation.
7.901402
5.945397
1.328995
if self._calculate_megno==0: raise RuntimeError("Lyapunov Characteristic Number cannot be calculated. Make sure to call init_megno() after adding all particles but before integrating the simulation.") clibrebound.reb_tools_calculate_lyapunov.restype = c_double return clibrebound.reb_tools_calculate_lyapunov(byref(self))
def calculate_lyapunov(self)
Return the current Lyapunov Characteristic Number (LCN). Note that you need to call init_megno() before the start of the simulation. To get a timescale (the Lyapunov timescale), take the inverse of this quantity.
11.080628
6.148474
1.802175
if isinstance(particle, Particle): if (self.gravity == "tree" or self.collision == "tree") and self.root_size <=0.: raise ValueError("The tree code for gravity and/or collision detection has been selected. However, the simulation box has not been configured yet. You cannot add particles until the the simulation box has a finite size.") clibrebound.reb_add(byref(self), particle) elif isinstance(particle, list): for p in particle: self.add(p, **kwargs) elif isinstance(particle,str): if self.python_unit_l == 0 or self.python_unit_m == 0 or self.python_unit_t == 0: self.units = ('AU', 'yr2pi', 'Msun') self.add(horizons.getParticle(particle, **kwargs), hash=particle) units_convert_particle(self.particles[-1], 'km', 's', 'kg', hash_to_unit(self.python_unit_l), hash_to_unit(self.python_unit_t), hash_to_unit(self.python_unit_m)) else: raise ValueError("Argument passed to add() not supported.") else: self.add(Particle(simulation=self, **kwargs)) if hasattr(self, '_widgets'): self._display_heartbeat(pointer(self))
def add(self, particle=None, **kwargs): if particle is not None
Adds a particle to REBOUND. Accepts one of the following: 1) A single Particle structure. 2) The particle's mass and a set of cartesian coordinates: m,x,y,z,vx,vy,vz. 3) The primary as a Particle structure, the particle's mass and a set of orbital elements: primary,m,a,anom,e,omega,inv,Omega,MEAN (see :class:`.Orbit` for the definition of orbital elements). 4) A name of an object (uses NASA Horizons to look up coordinates) 5) A list of particles or names.
7.203328
6.749108
1.067301
if index is not None: clibrebound.reb_remove(byref(self), index, keepSorted) if hash is not None: hash_types = c_uint32, c_uint, c_ulong PY3 = sys.version_info[0] == 3 if PY3: string_types = str, int_types = int, else: string_types = basestring, int_types = int, long if isinstance(hash, string_types): clibrebound.reb_remove_by_hash(byref(self), rebhash(hash), keepSorted) elif isinstance(hash, int_types): clibrebound.reb_remove_by_hash(byref(self), c_uint32(hash), keepSorted) elif isinstance(hash, hash_types): clibrebound.reb_remove_by_hash(byref(self), hash, keepSorted) if hasattr(self, '_widgets'): self._display_heartbeat(pointer(self)) self.process_messages()
def remove(self, index=None, hash=None, keepSorted=True)
Removes a particle from the simulation. Parameters ---------- index : int, optional Specify particle to remove by index. hash : c_uint32 or string, optional Specifiy particle to remove by hash (if a string is passed, the corresponding hash is calculated). keepSorted : bool, optional By default, remove preserves the order of particles in the particles array. Might set it to zero in cases with many particles and many removals to speed things up.
2.880172
2.809786
1.02505
s = "" for p in self.particles: s += (("%%.%de "%prec) * 8)%(p.m, p.r, p.x, p.y, p.z, p.vx, p.vy, p.vz) + "\n" if len(s): s = s[:-1] return s
def particles_ascii(self, prec=8)
Returns an ASCII string with all particles' masses, radii, positions and velocities. Parameters ---------- prec : int, optional Number of digits after decimal point. Default 8.
3.465431
3.341079
1.037219
for l in s.split("\n"): r = l.split() if len(r): try: r = [float(x) for x in r] p = Particle(simulation=self, m=r[0], r=r[1], x=r[2], y=r[3], z=r[4], vx=r[5], vy=r[6], vz=r[7]) self.add(p) except: raise AttributeError("Each line requires 8 floats corresponding to mass, radius, position (x,y,z) and velocity (x,y,z).")
def add_particles_ascii(self, s)
Adds particles from an ASCII string. Parameters ---------- s : string One particle per line. Each line should include particle's mass, radius, position and velocity.
2.696072
2.520487
1.069663
orbits = [] if heliocentric is not None or barycentric is not None: raise AttributeError('heliocentric and barycentric keywords in calculate_orbits are deprecated. Pass primary keyword instead (sim.particles[0] for heliocentric and sim.calculate_com() for barycentric)') if primary is None: jacobi = True primary = self.particles[0] clibrebound.reb_get_com_of_pair.restype = Particle else: jacobi = False for p in self.particles[1:self.N_real]: if jacobi_masses is True: interior_mass = primary.m # orbit conversion uses mu=G*(p.m+primary.m) so set prim.m=Mjac-m so mu=G*Mjac primary.m = self.particles[0].m*(p.m + interior_mass)/interior_mass - p.m orbits.append(p.calculate_orbit(primary=primary)) primary.m = interior_mass # back to total mass of interior bodies to update com else: orbits.append(p.calculate_orbit(primary=primary)) if jacobi is True: # update com to include current particle for next iteration primary = clibrebound.reb_get_com_of_pair(primary, p) return orbits
def calculate_orbits(self, primary=None, jacobi_masses=False, heliocentric=None, barycentric=None)
Calculate orbital parameters for all partices in the simulation. By default this functions returns the orbits in Jacobi coordinates. If MEGNO is enabled, variational particles will be ignored. Parameters ---------- primary : rebound.Particle, optional Set the primary against which to reference the osculating orbit. Default(use Jacobi center of mass) jacobi_masses: bool Whether to use jacobi primary mass in orbit calculation. (Default: False) heliocentric: bool, DEPRECATED To calculate heliocentric elements, pass primary=sim.particles[0] barycentric : bool, DEPRECATED To calculate barycentric elements, pass primary=sim.calculate_com() Returns ------- Returns an array of Orbits of length N-1.
5.380496
4.782621
1.12501
if last is None: last = self.N_real clibrebound.reb_get_com_range.restype = Particle return clibrebound.reb_get_com_range(byref(self), c_int(first), c_int(last))
def calculate_com(self, first=0, last=None)
Returns the center of momentum for all particles in the simulation. Parameters ---------- first: int, optional If ``first`` is specified, only calculate the center of momentum starting from index=``first``. last : int or None, optional If ``last`` is specified only calculate the center of momentum up to (but excluding) index=``last``. Same behavior as Python's range function. Examples -------- >>> sim = rebound.Simulation() >>> sim.add(m=1, x=-20) >>> sim.add(m=1, x=-10) >>> sim.add(m=1, x=0) >>> sim.add(m=1, x=10) >>> sim.add(m=1, x=20) >>> com = sim.calculate_com() >>> com.x 0.0 >>> com = sim.calculate_com(first=2,last=4) # Considers indices 2,3 >>> com.x 5.0
5.302313
6.464428
0.820229
N = self.N possible_keys = ["hash","m","r","xyz","vxvyvz","xyzvxvyvz"] d = {x:None for x in possible_keys} for k,v in kwargs.items(): if k in d: if k == "hash": if v.dtype!= "uint32": raise AttributeError("Expected 'uint32' data type for '%s' array."%k) if v.size<N: raise AttributeError("Array '%s' is not large enough."%k) d[k] = v.ctypes.data_as(ctypes.POINTER(ctypes.c_uint32)) else: if v.dtype!= "float64": raise AttributeError("Expected 'float64' data type for %s array."%k) if k in ["xyz", "vxvyvz"]: minsize = 3*N elif k in ["xyzvxvyvz"]: minsize = 6*N else: minsize = N if v.size<minsize: raise AttributeError("Array '%s' is not large enough."%k) d[k] = v.ctypes.data_as(ctypes.POINTER(ctypes.c_double)) else: raise AttributeError("Only '%s' are currently supported attributes for serialization." % "', '".join(d.keys())) clibrebound.reb_serialize_particle_data(byref(self), d["hash"], d["m"], d["r"], d["xyz"], d["vxvyvz"], d["xyzvxvyvz"])
def serialize_particle_data(self,**kwargs)
Fast way to access serialized particle data via numpy arrays. This function can directly set the values of numpy arrays to current particle data. This is significantly faster than accessing particle data via `sim.particles` as all the copying is done on the C side. No memory is allocated by this function. It expects correctly sized numpy arrays as arguments. The argument name indicates what kind of particle data is written to the array. Possible argument names are "hash", "m", "r", "xyz", "vxvyvz", and "xyzvxvyvz". The datatype for the "hash" array needs to be uint32. The other arrays expect a datatype of float64. The lengths of "hash", "m", "r" arrays need to be at least sim.N. The lengths of xyz and vxvyvz need to be at least 3*sim.N. The length of "xyzvxvyvz" arrays need to be 6*sim.N. Exceptions are raised otherwise. Note that this routine is only intended for special use cases where speed is an issue. For normal use, it is recommended to access particle data via the `sim.particles` array. Be aware of potential issues that arrise by directly accesing the memory of numpy arrays (see numpy documentation for more details). Examples -------- This sets an array to the xyz positions of all particles: >>> import numpy as np >>> a = np.zeros((sim.N,3),dtype="float64") >>> sim.serialize_particle_data(xyz=a) >>> print(a) To get all current radii of particles: >>> a = np.zeros(sim.N,dtype="float64") >>> sim.serialize_particle_data(r=a) >>> print(a) To get all current radii and hashes of particles: >>> a = np.zeros(sim.N,dtype="float64") >>> b = np.zeros(sim.N,dtype="uint32") >>> sim.serialize_particle_data(r=a,hash=b) >>> print(a,b)
2.614686
2.320778
1.126642
clibrebound.reb_tools_energy.restype = c_double return clibrebound.reb_tools_energy(byref(self))
def calculate_energy(self)
Returns the sum of potential and kinetic energy of all particles in the simulation.
10.186749
7.862191
1.295663
clibrebound.reb_tools_angular_momentum.restype = reb_vec3d L = clibrebound.reb_tools_angular_momentum(byref(self)) return [L.x, L.y, L.z]
def calculate_angular_momentum(self)
Returns a list of the three (x,y,z) components of the total angular momentum of all particles in the simulation.
6.314867
5.430882
1.16277
clibrebound.reb_configure_box(byref(self), c_double(boxsize), c_int(root_nx), c_int(root_ny), c_int(root_nz)) return
def configure_box(self, boxsize, root_nx=1, root_ny=1, root_nz=1)
Initialize the simulation box. This function only needs to be called it boundary conditions other than "none" are used. In such a case the boxsize must be known and is set with this function. Parameters ---------- boxsize : float, optional The size of one root box. root_nx, root_ny, root_nz : int, optional The number of root boxes in each direction. The total size of the simulation box will be ``root_nx * boxsize``, ``root_ny * boxsize`` and ``root_nz * boxsize``. By default there will be exactly one root box in each direction.
3.617522
4.421557
0.818155
clibrebound.nghostx = c_int(nghostx) clibrebound.nghosty = c_int(nghosty) clibrebound.nghostz = c_int(nghostz) return
def configure_ghostboxes(self, nghostx=0, nghosty=0, nghostz=0)
Initialize the ghost boxes. This function only needs to be called it boundary conditions other than "none" or "open" are used. In such a case the number of ghostboxes must be known and is set with this function. Parameters ---------- nghostx, nghosty, nghostz : int The number of ghost boxes in each direction. All values default to 0 (no ghost boxes).
3.024731
3.337265
0.90635
clibrebound.reb_output_binary(byref(self), c_char_p(filename.encode("ascii")))
def save(self, filename)
Save the entire REBOUND simulation to a binary file.
18.240425
7.856945
2.321567
if debug.integrator_package =="REBOUND": self.exact_finish_time = c_int(exact_finish_time) ret_value = clibrebound.reb_integrate(byref(self), c_double(tmax)) if ret_value == 1: self.process_messages() raise SimulationError("An error occured during the integration.") if ret_value == 2: raise NoParticles("No more particles left in simulation.") if ret_value == 3: raise Encounter("Two particles had a close encounter (d<exit_min_distance).") if ret_value == 4: raise Escape("A particle escaped (r>exit_max_distance).") if ret_value == 5: raise Escape("User caused exit. Simulation did not finish.") # should not occur in python if ret_value == 6: raise KeyboardInterrupt if ret_value == 7: raise Collision("Two particles collided (d < r1+r2)") else: debug.integrate_other_package(tmax,exact_finish_time) self.process_messages()
def integrate(self, tmax, exact_finish_time=1)
Main integration function. Call this function when you have setup your simulation and want to integrate it forward (or backward) in time. The function might be called many times to integrate the simulation in steps and create outputs in-between steps. Parameters ---------- tmax : float The final time of your simulation. If the current time is 100, and tmax=200, then after the calling the integrate routine, the time has advanced to t=200. If tmax is larger than or equal to the current time, no integration will be performed. exact_finish_time: int, optional This argument determines whether REBOUND should try to finish at the exact time (tmax) you give it or if it is allowed to overshoot. Overshooting could happen if one starts at t=0, has a timestep of dt=10 and wants to integrate to tmax=25. With ``exact_finish_time=1``, the integrator will choose the last timestep such that t is exactly 25 after the integration, otherwise t=30. Note that changing the timestep does affect the accuracy of symplectic integrators negatively. Exceptions ---------- Exceptions are thrown when no more particles are left in the simulation or when a generic integration error occured. If you specified exit_min_distance or exit_max_distance, then additional exceptions might thrown for escaping particles or particles that undergo a clos encounter. Examples -------- The typical usage is as follows. Note the use of ``np.linspace`` to create equally spaced outputs. Using ``np.logspace`` can be used to easily produce logarithmically spaced outputs. >>> import numpy as np >>> for time in np.linspace(0,100.,10): >>> sim.integrate(time) >>> perform_output(sim)
5.96017
5.004393
1.190988
if self.order==2 and variation2 is None: variation2 = variation if self._sim is not None: sim = self._sim.contents particles = sim.particles else: raise RuntimeError("Something went wrong. Cannot seem to find simulation corresponding to variation.") if self.testparticle >= 0: particles[self.index] = Particle(simulation=sim,particle=particles[particle_index], variation=variation, variation2=variation2, primary=primary) else: particles[self.index + particle_index] = Particle(simulation=sim,particle=particles[particle_index], variation=variation, variation2=variation2, primary=primary)
def vary(self, particle_index, variation, variation2=None, primary=None)
This function can be used to initialize the variational particles that are part of a Variation. Note that rather than using this convenience function, one can also directly manipulate the particles' coordinate using the following syntax: >>> var = sim.add_variation() >>> var.particles[0].x = 1. The ``vary()`` function is useful for initializing variations corresponding to changes in one of the orbital parameters for a particle on a bound Keplerian orbit. The function supports both first and second order variations in the following classical orbital parameters: a, e, inc, omega, Omega, f as well as the Pal (2009) coordinates: a, h, k, ix, iy, lambda and in both cases the mass m of the particle. The advantage of the Pal coordinate system is that all derivatives are well behaved (infinitely differentiable). Classical orbital parameters on the other hand exhibit coordinate singularities, for example when e=0. The following example initializes the variational particles corresponding to a change in the semi-major axis of the particle with index 1: >>> var = sim.add_variation() >>> var.vary(1,"a") Parameters ---------- particle_index : int The index of the particle that should be varied. The index starts at 0 and runs through N-1. The first particle added to a simulation receives the index 0, the second 1, and the on. variation : string This parameter determines which orbital parameter is varied. variation2: string, optional This is only used for second order variations which can depend on two varying parameters. If omitted, then it is assumed that the parameter variation is variation2. primary: Particle, optional By default variational particles are created in the Heliocentric frame. Set this parameter to use any other particles as a primary (e.g. the center of mass).
4.356681
4.532327
0.961246
sim = self._sim.contents ps = [] if self.testparticle>=0: N = 1 else: N = sim.N-sim.N_var ParticleList = Particle*N ps = ParticleList.from_address(ctypes.addressof(sim._particles.contents)+self.index*ctypes.sizeof(Particle)) return ps
def particles(self)
Access the variational particles corresponding to this set of variational equations. The function returns a list of particles which are sorted in the same way as those in sim.particles The particles are pointers and thus can be modified. If there are N real particles, this function will also return a list of N particles (all of which are variational particles).
8.399372
9.014688
0.931743
link_object = {} if not getattr(instance, 'pk', None): # If instance doesn't have a `pk` field, we'll assume it doesn't # have a canonical resource URL to hang a link off of. # This generally only affectes Ephemeral Objects. return data link_fields = serializer.get_link_fields() for name, field in six.iteritems(link_fields): # For included fields, omit link if there's no data. if name in data and not data[name]: continue link = getattr(field, 'link', None) if link is None: base_url = '' if settings.ENABLE_HOST_RELATIVE_LINKS: # if the resource isn't registered, this will default back to # using resource-relative urls for links. base_url = DynamicRouter.get_canonical_path( serializer.get_resource_key(), instance.pk ) or '' link = '%s%s/' % (base_url, name) # Default to DREST-generated relation endpoints. elif callable(link): link = link(name, field, data, instance) link_object[name] = link if link_object: data['links'] = link_object return data
def merge_link_object(serializer, data, instance)
Add a 'links' attribute to the data that maps field names to URLs. NOTE: This is the format that Ember Data supports, but alternative implementations are possible to support other formats.
6.034217
5.794843
1.041308
global POST_PROCESSORS key = func.__name__ POST_PROCESSORS[key] = func return func
def register_post_processor(func)
Register a post processor function to be run as the final step in serialization. The data passed in will already have gone through the sideloading processor. Usage: @register_post_processor def my_post_processor(data): # do stuff with `data` return data
4.741318
5.278626
0.898211
if isinstance(obj, list): for key, o in enumerate(obj): # traverse into lists of objects self.process(o, parent=obj, parent_key=key, depth=depth) elif isinstance(obj, dict): dynamic = self.is_dynamic(obj) returned = isinstance(obj, ReturnDict) if dynamic or returned: # recursively check all fields for key, o in six.iteritems(obj): if isinstance(o, list) or isinstance(o, dict): # lists or dicts indicate a relation self.process( o, parent=obj, parent_key=key, depth=depth + 1 ) if not dynamic or getattr(obj, 'embed', False): return serializer = obj.serializer name = serializer.get_plural_name() instance = getattr(obj, 'instance', serializer.instance) instance_pk = instance.pk if instance else None pk = getattr(obj, 'pk_value', instance_pk) or instance_pk # For polymorphic relations, `pk` can be a dict, so use the # string representation (dict isn't hashable). pk_key = repr(pk) # sideloading seen = True # if this object has not yet been seen if pk_key not in self.seen[name]: seen = False self.seen[name].add(pk_key) # prevent sideloading the primary objects if depth == 0: return # TODO: spec out the exact behavior for secondary instances of # the primary resource # if the primary resource is embedded, add it to a prefixed key if name == self.plural_name: name = '%s%s' % ( settings.ADDITIONAL_PRIMARY_RESOURCE_PREFIX, name ) if not seen: # allocate a top-level key in the data for this resource # type if name not in self.data: self.data[name] = [] # move the object into a new top-level bucket # and mark it as seen self.data[name].append(obj) else: # obj sideloaded, but maybe with other fields for o in self.data.get(name, []): if o.instance.pk == pk: o.update(obj) break # replace the object with a reference if parent is not None and parent_key is not None: parent[parent_key] = pk
def process(self, obj, parent=None, parent_key=None, depth=0)
Recursively process the data for sideloading. Converts the nested representation into a sideloaded representation.
4.745172
4.733169
1.002536
if not self.parent: return None if not getattr(self.parent, 'request_fields'): return None if not isinstance(self.parent.request_fields, dict): return None return self.parent.request_fields.get(self.field_name)
def _get_request_fields_from_parent(self)
Get request fields from the parent serializer.
2.49554
2.113918
1.180528
import copy import django def prefetch_one_level(instances, prefetcher, lookup, level): rel_qs, rel_obj_attr, instance_attr, single, cache_name = ( prefetcher.get_prefetch_queryset( instances, lookup.get_current_queryset(level))) additional_lookups = [ copy.copy(additional_lookup) for additional_lookup in getattr(rel_qs, '_prefetch_related_lookups', []) ] if additional_lookups: rel_qs._prefetch_related_lookups = [] all_related_objects = list(rel_qs) rel_obj_cache = {} for rel_obj in all_related_objects: rel_attr_val = rel_obj_attr(rel_obj) rel_obj_cache.setdefault(rel_attr_val, []).append(rel_obj) for obj in instances: instance_attr_val = instance_attr(obj) vals = rel_obj_cache.get(instance_attr_val, []) to_attr, as_attr = lookup.get_current_to_attr(level) if single: val = vals[0] if vals else None to_attr = to_attr if as_attr else cache_name setattr(obj, to_attr, val) else: if as_attr: setattr(obj, to_attr, vals) else: qs = getattr(obj, to_attr).all() qs._result_cache = vals qs._prefetch_done = True obj._prefetched_objects_cache[cache_name] = qs return all_related_objects, additional_lookups # apply the patch from django.db.models import query if django.VERSION < (2, 0, 0): query.prefetch_one_level = prefetch_one_level
def patch_prefetch_one_level()
This patch address Django bug https://code.djangoproject.com/ticket/24873, which was merged into Django master in commit 025c6553771a09b80563baedb5b8300a8b01312f into django.db.models.query. The code that follows is identical to the code in the above commit, with all comments stripped out.
3.02306
2.921202
1.034868
metadata = super( DynamicMetadata, self).determine_metadata( request, view) metadata['features'] = getattr(view, 'features', []) if hasattr(view, 'get_serializer'): serializer = view.get_serializer(dynamic=False) if hasattr(serializer, 'get_name'): metadata['resource_name'] = serializer.get_name() if hasattr(serializer, 'get_plural_name'): metadata['resource_name_plural'] = serializer.get_plural_name() metadata['properties'] = self.get_serializer_info(serializer) return metadata
def determine_metadata(self, request, view)
Adds `properties` and `features` to the metadata response.
2.767339
2.446041
1.131354
field_info = OrderedDict() for attr in ('required', 'read_only', 'default', 'label'): field_info[attr] = getattr(field, attr) if field_info['default'] is empty: field_info['default'] = None if hasattr(field, 'immutable'): field_info['immutable'] = field.immutable field_info['nullable'] = field.allow_null if hasattr(field, 'choices'): field_info['choices'] = [ { 'value': choice_value, 'display_name': force_text(choice_name, strings_only=True) } for choice_value, choice_name in field.choices.items() ] many = False if isinstance(field, DynamicRelationField): field = field.serializer if isinstance(field, ListSerializer): field = field.child many = True if isinstance(field, ModelSerializer): type = 'many' if many else 'one' field_info['related_to'] = field.get_plural_name() else: type = self.label_lookup[field] field_info['type'] = type return field_info
def get_field_info(self, field)
Adds `related_to` and `nullable` to the metadata response.
2.644306
2.483678
1.064674
meta = model._meta try: if DJANGO19: field = meta.get_field(field_name) else: field = meta.get_field_by_name(field_name)[0] return field except: if DJANGO19: related_objs = ( f for f in meta.get_fields() if (f.one_to_many or f.one_to_one) and f.auto_created and not f.concrete ) related_m2m_objs = ( f for f in meta.get_fields(include_hidden=True) if f.many_to_many and f.auto_created ) else: related_objs = meta.get_all_related_objects() related_m2m_objs = meta.get_all_related_many_to_many_objects() related_objects = { o.get_accessor_name(): o for o in chain(related_objs, related_m2m_objs) } if field_name in related_objects: return related_objects[field_name] else: # check virtual fields (1.7) if hasattr(meta, 'virtual_fields'): for field in meta.virtual_fields: if field.name == field_name: return field raise AttributeError( '%s is not a valid field for %s' % (field_name, model) )
def get_model_field(model, field_name)
Return a field given a model and field name. Arguments: model: a Django model field_name: the name of a field Returns: A Django field if `field_name` is a valid field for `model`, None otherwise.
1.871753
1.933026
0.968302
if not hasattr(model, '_meta'): # ephemeral model with no metaclass return False model_field = get_model_field(model, field_name) return isinstance(model_field, (ManyToManyField, RelatedObject))
def is_field_remote(model, field_name)
Check whether a given model field is a remote field. A remote field is the inverse of a one-to-many or a many-to-many relationship. Arguments: model: a Django model field_name: the name of a field Returns: True if `field_name` is a remote field, False otherwise.
4.755122
5.513414
0.862464
def wrapper(self): if not hasattr(self, '_resettable_cached_properties'): self._resettable_cached_properties = {} if func.__name__ not in self._resettable_cached_properties: self._resettable_cached_properties[func.__name__] = func(self) return self._resettable_cached_properties[func.__name__] # Returns a property whose getter is the 'wrapper' function return property(wrapper)
def resettable_cached_property(func)
Decorator to add cached computed properties to an object. Similar to Django's `cached_property` decorator, except stores all the data under a single well-known key so that it can easily be blown away.
2.223562
2.036888
1.091647
def reset(self): if hasattr(self, '_resettable_cached_properties'): self._resettable_cached_properties = {} cls.reset = reset return cls
def cacheable_object(cls)
Decorator to add a reset() method that clears data cached by the @resettable_cached_property decorator. Technically this could be a mixin...
5.523616
2.999158
1.841722
setting, value = kwargs['setting'], kwargs['value'] if setting == self.name: self._reload(value)
def _settings_changed(self, *args, **kwargs)
Handle changes to core settings.
6.51578
4.766717
1.366932
if self.bound: # Prevent double-binding return super(DynamicRelationField, self).bind(*args, **kwargs) self.bound = True parent_model = getattr(self.parent.Meta, 'model', None) remote = is_field_remote(parent_model, self.source) try: model_field = get_model_field(parent_model, self.source) except: # model field may not be available for m2o fields with no # related_name model_field = None # Infer `required` and `allow_null` if 'required' not in self.kwargs and ( remote or ( model_field and ( model_field.has_default() or model_field.null ) ) ): self.required = False if 'allow_null' not in self.kwargs and getattr( model_field, 'null', False ): self.allow_null = True self.model_field = model_field
def bind(self, *args, **kwargs)
Bind to the parent serializer.
3.551992
3.317404
1.070714
if not self.parent: # Don't cache, so that we'd recompute if parent is set. return None node = self seen = set() while True: seen.add(node) if getattr(node, 'parent', None): node = node.parent if node in seen: return None else: return node
def root_serializer(self)
Return the root serializer (serializer for the primary resource).
4.503519
4.159592
1.082683
if not self.parent or not self._is_dynamic: return kwargs if 'request_fields' not in kwargs: # If 'request_fields' isn't explicitly set, pull it from the # parent serializer. request_fields = self._get_request_fields_from_parent() if request_fields is None: # Default to 'id_only' for nested serializers. request_fields = True kwargs['request_fields'] = request_fields if self.embed and kwargs.get('request_fields') is True: # If 'embed' then make sure we fetch the full object. kwargs['request_fields'] = {} if hasattr(self.parent, 'sideloading'): kwargs['sideloading'] = self.parent.sideloading if hasattr(self.parent, 'debug'): kwargs['debug'] = self.parent.debug return kwargs
def _inherit_parent_kwargs(self, kwargs)
Extract any necessary attributes from parent serializer to propagate down to child serializer.
3.637237
3.545919
1.025753
init_args = { k: v for k, v in six.iteritems(self.kwargs) if k in self.SERIALIZER_KWARGS } kwargs = self._inherit_parent_kwargs(kwargs) init_args.update(kwargs) if self.embed and self._is_dynamic: init_args['embed'] = True return self._get_cached_serializer(args, init_args)
def get_serializer(self, *args, **kwargs)
Get an instance of the child serializer.
4.378504
4.245718
1.031275
serializer = self.serializer model = serializer.get_model() source = self.source if not self.kwargs['many'] and serializer.id_only(): # attempt to optimize by reading the related ID directly # from the current instance rather than from the related object source_id = '%s_id' % source # try the faster way first: if hasattr(instance, source_id): return getattr(instance, source_id) elif model is not None: # this is probably a one-to-one field, or a reverse related # lookup, so let's look it up the slow way and let the # serializer handle the id dereferencing try: instance = getattr(instance, source) except model.DoesNotExist: instance = None # dereference ephemeral objects if model is None: instance = getattr(instance, source) if instance is None: return None return serializer.to_representation(instance)
def to_representation(self, instance)
Represent the relationship, either as an ID or object.
5.611247
5.394092
1.040258
related_model = serializer.Meta.model if isinstance(data, related_model): return data try: instance = related_model.objects.get(pk=data) except related_model.DoesNotExist: raise ValidationError( "Invalid value for '%s': %s object with ID=%s not found" % (self.field_name, related_model.__name__, data) ) return instance
def to_internal_value_single(self, data, serializer)
Return the underlying object, given the serialized form.
2.589294
2.492923
1.038658
if self.kwargs['many']: serializer = self.serializer.child if not isinstance(data, list): raise ParseError("'%s' value must be a list" % self.field_name) return [ self.to_internal_value_single( instance, serializer ) for instance in data ] return self.to_internal_value_single(data, self.serializer)
def to_internal_value(self, data)
Return the underlying object(s), given the serialized form.
3.427453
3.1882
1.075043
serializer_class = self._serializer_class if not isinstance(serializer_class, six.string_types): return serializer_class parts = serializer_class.split('.') module_path = '.'.join(parts[:-1]) if not module_path: if getattr(self, 'parent', None) is None: raise Exception( "Can not load serializer '%s'" % serializer_class + ' before binding or without specifying full path') # try the module of the parent class module_path = self.parent.__module__ module = importlib.import_module(module_path) serializer_class = getattr(module, parts[-1]) self._serializer_class = serializer_class return serializer_class
def serializer_class(self)
Get the class of the child serializer. Resolves string imports.
3.023305
2.948531
1.02536
prefetches = [] for field, fprefetch in self.prefetches.items(): has_query = hasattr(fprefetch, 'query') qs = fprefetch.query.queryset if has_query else None prefetches.append( Prefetch(field, queryset=qs) ) queryset = self.queryset if prefetches: queryset = queryset.prefetch_related(*prefetches) return queryset
def _get_django_queryset(self)
Return Django QuerySet with prefetches properly configured.
3.609435
2.955045
1.221449
if isinstance(value, list): for val in value: self.appendlist(key, val) else: self.appendlist(key, value)
def add(self, key, value)
Method to accept a list of values and append to flat list. QueryDict.appendlist(), if given a list, will append the list, which creates nested lists. In most cases, we want to be able to pass in a list (for convenience) but have it appended into a flattened list. TODO: Possibly throw an error if add() is used on a non-list param.
2.974802
2.039631
1.4585
def handle_encodings(request): try: return QueryParams(request.GET) except UnicodeEncodeError: pass s = request.environ.get('QUERY_STRING', '') try: s = s.encode('utf-8') except UnicodeDecodeError: pass return QueryParams(s) request.GET = handle_encodings(request) request = super(WithDynamicViewSetMixin, self).initialize_request( request, *args, **kargs ) try: # Django<1.9, DRF<3.2 # MergeDict doesn't have the same API as dict. # Django has deprecated MergeDict and DRF is moving away from # using it - thus, were comfortable replacing it with a QueryDict # This will allow the data property to have normal dict methods. from django.utils.datastructures import MergeDict if isinstance(request._full_data, MergeDict): data_as_dict = request.data.dicts[0] for d in request.data.dicts[1:]: data_as_dict.update(d) request._full_data = data_as_dict except: pass return request
def initialize_request(self, request, *args, **kargs)
Override DRF initialize_request() method to swap request.GET (which is aliased by request.query_params) with a mutable instance of QueryParams, and to convert request MergeDict to a subclass of dict for consistency (MergeDict is not a subclass of dict)
5.312441
4.68688
1.133471
renderers = super(WithDynamicViewSetMixin, self).get_renderers() if settings.ENABLE_BROWSABLE_API is False: return [ r for r in renderers if not isinstance(r, BrowsableAPIRenderer) ] else: return renderers
def get_renderers(self)
Optionally block Browsable API rendering.
4.65143
3.411795
1.363338
if '[]' in name: # array-type return self.request.query_params.getlist( name) if name in self.features else None elif '{}' in name: # object-type (keys are not consistent) return self._extract_object_params( name) if name in self.features else {} else: # single-type return self.request.query_params.get( name) if name in self.features else None
def get_request_feature(self, name)
Parses the request for a particular feature. Arguments: name: A feature name. Returns: A feature parsed from the URL if the feature is supported, or None.
4.056142
4.377256
0.92664
params = self.request.query_params.lists() params_map = {} prefix = name[:-1] offset = len(prefix) for name, value in params: if name.startswith(prefix): if name.endswith('}'): name = name[offset:-1] elif name.endswith('}[]'): # strip off trailing [] # this fixes an Ember queryparams issue name = name[offset:-3] else: # malformed argument like: # filter{foo=bar raise exceptions.ParseError( '"%s" is not a well-formed filter key.' % name ) else: continue params_map[name] = value return params_map
def _extract_object_params(self, name)
Extract object params, return as dict
5.437516
5.356097
1.015201
serializer = self.get_serializer() return getattr(self, 'queryset', serializer.Meta.model.objects.all())
def get_queryset(self, queryset=None)
Returns a queryset for this request. Arguments: queryset: Optional root-level queryset.
5.398056
7.974971
0.676875
if hasattr(self, '_request_fields'): return self._request_fields include_fields = self.get_request_feature(self.INCLUDE) exclude_fields = self.get_request_feature(self.EXCLUDE) request_fields = {} for fields, include in( (include_fields, True), (exclude_fields, False)): if fields is None: continue for field in fields: field_segments = field.split('.') num_segments = len(field_segments) current_fields = request_fields for i, segment in enumerate(field_segments): last = i == num_segments - 1 if segment: if last: current_fields[segment] = include else: if segment not in current_fields: current_fields[segment] = {} current_fields = current_fields[segment] elif not last: # empty segment must be the last segment raise exceptions.ParseError( '"%s" is not a valid field.' % field ) self._request_fields = request_fields return request_fields
def get_request_fields(self)
Parses the INCLUDE and EXCLUDE features. Extracts the dynamic field features from the request parameters into a field map that can be passed to a serializer. Returns: A nested dict mapping serializer keys to True (include) or False (exclude).
2.461994
2.237964
1.100104
# Explicitly disable support filtering. Applying filters to this # endpoint would require us to pass through sideload filters, which # can have unintended consequences when applied asynchronously. if self.get_request_feature(self.FILTER): raise ValidationError( 'Filtering is not enabled on relation endpoints.' ) # Prefix include/exclude filters with field_name so it's scoped to # the parent object. field_prefix = field_name + '.' self._prefix_inex_params(request, self.INCLUDE, field_prefix) self._prefix_inex_params(request, self.EXCLUDE, field_prefix) # Filter for parent object, include related field. self.request.query_params.add('filter{pk}', pk) self.request.query_params.add(self.INCLUDE, field_prefix) # Get serializer and field. serializer = self.get_serializer() field = serializer.fields.get(field_name) if field is None: raise ValidationError('Unknown field: "%s".' % field_name) # Query for root object, with related field prefetched queryset = self.get_queryset() queryset = self.filter_queryset(queryset) obj = queryset.first() if not obj: return Response("Not found", status=404) # Serialize the related data. Use the field's serializer to ensure # it's configured identically to the sideload case. serializer = field.get_serializer(envelope=True) try: # TODO(ryo): Probably should use field.get_attribute() but that # seems to break a bunch of things. Investigate later. serializer.instance = getattr(obj, field.source) except ObjectDoesNotExist: # See: # http://jsonapi.org/format/#fetching-relationships-responses-404 # This is a case where the "link URL exists but the relationship # is empty" and therefore must return a 200. return Response({}, status=200) return Response(serializer.data)
def list_related(self, request, pk=None, field_name=None)
Fetch related object(s), as if sideloaded (used to support link objects). This method gets mapped to `/<resource>/<pk>/<field_name>/` by DynamicRouter for all DynamicRelationField fields. Generally, this method probably shouldn't be overridden. An alternative implementation would be to generate reverse queries. For an exploration of that approach, see: https://gist.github.com/ryochiji/54687d675978c7d96503
5.930616
5.833632
1.016625
# noqa if self.ENABLE_BULK_UPDATE: patch_all = self.get_request_patch_all() if self.ENABLE_PATCH_ALL and patch_all: # patch-all update data = request.data return self._patch_all( data, query=(patch_all == 'query') ) else: # bulk payload update partial = 'partial' in kwargs bulk_payload = self._get_bulk_payload(request) if bulk_payload: return self._bulk_update(bulk_payload, partial) # singular update try: return super(DynamicModelViewSet, self).update(request, *args, **kwargs) except AssertionError as e: err = str(e) if 'Fix your URL conf' in err: # this error is returned by DRF if a client # makes an update request (PUT or PATCH) without an ID # since DREST supports bulk updates with IDs contained in data, # we return a 400 instead of a 500 for this case, # as this is not considered a misconfiguration raise exceptions.ValidationError(err) else: raise
def update(self, request, *args, **kwargs)
Update one or more model instances. If ENABLE_BULK_UPDATE is set, multiple previously-fetched records may be updated in a single call, provided their IDs. If ENABLE_PATCH_ALL is set, multiple records may be updated in a single PATCH call, even without knowing their IDs. *WARNING*: ENABLE_PATCH_ALL should be considered an advanced feature and used with caution. This feature must be enabled at the viewset level and must also be requested explicitly by the client via the "patch-all" query parameter. This parameter can have one of the following values: true (or 1): records will be fetched and then updated in a transaction loop - The `Model.save` method will be called and model signals will run - This can be slow if there are too many signals or many records in the query - This is considered the more safe and default behavior query: records will be updated in a single query - The `QuerySet.update` method will be called and model signals will not run - This will be fast, but may break data constraints that are controlled by signals - This is considered unsafe but useful in certain situations The server's successful response to a patch-all request will NOT include any individual records. Instead, the response content will contain a "meta" object with an "updated" count of updated records. Examples: Update one dog: PATCH /dogs/1/ { 'fur': 'white' } Update many dogs by ID: PATCH /dogs/ [ {'id': 1, 'fur': 'white'}, {'id': 2, 'fur': 'black'}, {'id': 3, 'fur': 'yellow'} ] Update all dogs in a query: PATCH /dogs/?filter{fur.contains}=brown&patch-all=true { 'fur': 'gold' }
5.619061
5.585114
1.006078
bulk_payload = self._get_bulk_payload(request) if bulk_payload: return self._create_many(bulk_payload) return super(DynamicModelViewSet, self).create( request, *args, **kwargs)
def create(self, request, *args, **kwargs)
Either create a single or many model instances in bulk using the Serializer's many=True ability from Django REST >= 2.2.5. The data can be represented by the serializer name (single or plural forms), dict or list. Examples: POST /dogs/ { "name": "Fido", "age": 2 } POST /dogs/ { "dog": { "name": "Lucky", "age": 3 } } POST /dogs/ { "dogs": [ {"name": "Fido", "age": 2}, {"name": "Lucky", "age": 3} ] } POST /dogs/ [ {"name": "Fido", "age": 2}, {"name": "Lucky", "age": 3} ]
3.742463
4.10638
0.911378
bulk_payload = self._get_bulk_payload(request) if bulk_payload: return self._destroy_many(bulk_payload) lookup_url_kwarg = self.lookup_url_kwarg or self.lookup_field if lookup_url_kwarg not in kwargs: # assume that it is a poorly formatted bulk request return Response(status=status.HTTP_405_METHOD_NOT_ALLOWED) return super(DynamicModelViewSet, self).destroy( request, *args, **kwargs )
def destroy(self, request, *args, **kwargs)
Either delete a single or many model instances in bulk DELETE /dogs/ { "dogs": [ {"id": 1}, {"id": 2} ] } DELETE /dogs/ [ {"id": 1}, {"id": 2} ]
3.251379
3.365566
0.966072
model = self.get_model() if model: return get_model_table(model) else: return self.get_name()
def get_resource_key(self)
Return canonical resource key, usually the DB table name.
5.450719
4.065629
1.340683
data = super(DynamicListSerializer, self).data processed_data = ReturnDict( SideloadingProcessor(self, data).data, serializer=self ) if self.child.envelope else ReturnList( data, serializer=self ) processed_data = post_process(processed_data) return processed_data
def data(self)
Get the data, after performing post-processing if necessary.
9.832927
8.48591
1.158736
if not self.dynamic: return if (isinstance(self.request_fields, dict) and self.request_fields.pop('*', None) is False): exclude_fields = '*' only_fields = set(only_fields or []) include_fields = include_fields or [] exclude_fields = exclude_fields or [] if only_fields: exclude_fields = '*' include_fields = only_fields if exclude_fields == '*': # First exclude all, then add back in explicitly included fields. include_fields = set( list(include_fields) + [ field for field, val in six.iteritems(self.request_fields) if val or val == {} ] ) all_fields = set(self.get_all_fields().keys()) # this is slow exclude_fields = all_fields - include_fields elif include_fields == '*': all_fields = set(self.get_all_fields().keys()) # this is slow include_fields = all_fields for name in exclude_fields: self.request_fields[name] = False for name in include_fields: if not isinstance(self.request_fields.get(name), dict): # not sideloading this field self.request_fields[name] = True
def _dynamic_init(self, only_fields, include_fields, exclude_fields)
Modifies `request_fields` via higher-level dynamic field interfaces. Arguments: only_fields: List of field names to render. All other fields will be deferred (respects sideloads). include_fields: List of field names to include. Adds to default field set, (respects sideloads). `*` means include all fields. exclude_fields: List of field names to exclude. Removes from default field set. If set to '*', all fields are removed, except for ones that are explicitly included.
2.900916
2.644125
1.097117
if not hasattr(cls.Meta, 'name'): class_name = getattr(cls.get_model(), '__name__', None) setattr( cls.Meta, 'name', inflection.underscore(class_name) if class_name else None ) return cls.Meta.name
def get_name(cls)
Get the serializer name. The name can be defined on the Meta class or will be generated automatically from the model name.
3.45427
3.168839
1.090074
if not hasattr(cls.Meta, 'plural_name'): setattr( cls.Meta, 'plural_name', inflection.pluralize(cls.get_name()) ) return cls.Meta.plural_name
def get_plural_name(cls)
Get the serializer's plural name. The plural name may be defined on the Meta class. If the plural name is not defined, the pluralized form of the name will be returned.
2.83799
3.037679
0.934263
if ( not settings.ENABLE_FIELDS_CACHE or not self.ENABLE_FIELDS_CACHE or self.__class__ not in FIELDS_CACHE ): all_fields = super( WithDynamicSerializerMixin, self ).get_fields() if ( settings.ENABLE_FIELDS_CACHE and self.ENABLE_FIELDS_CACHE ): FIELDS_CACHE[self.__class__] = all_fields else: all_fields = copy.copy(FIELDS_CACHE[self.__class__]) for k, field in six.iteritems(all_fields): if hasattr(field, 'reset'): field.reset() for k, field in six.iteritems(all_fields): field.field_name = k field.parent = self return all_fields
def _all_fields(self)
Returns the entire serializer field set. Does not respect dynamic field inclusions/exclusions.
2.853302
2.645564
1.078523
all_fields = self.get_all_fields() if self.dynamic is False: return all_fields if self.id_only(): return {} serializer_fields = copy.deepcopy(all_fields) request_fields = self.request_fields deferred = self._get_deferred_field_names(serializer_fields) # apply request overrides if request_fields: for name, include in six.iteritems(request_fields): if name not in serializer_fields: raise exceptions.ParseError( '"%s" is not a valid field name for "%s".' % (name, self.get_name()) ) if include is not False and name in deferred: deferred.remove(name) elif include is False: deferred.add(name) for name in deferred: serializer_fields.pop(name) # Set read_only flags based on read_only_fields meta list. # Here to cover DynamicFields not covered by DRF. ro_fields = getattr(self.Meta, 'read_only_fields', []) self.flag_fields(serializer_fields, ro_fields, 'read_only', True) pw_fields = getattr(self.Meta, 'untrimmed_fields', []) self.flag_fields( serializer_fields, pw_fields, 'trim_whitespace', False, ) # Toggle read_only flags for immutable fields. # Note: This overrides `read_only` if both are set, to allow # inferred DRF fields to be made immutable. immutable_field_names = self._get_flagged_field_names( serializer_fields, 'immutable' ) self.flag_fields( serializer_fields, immutable_field_names, 'read_only', value=False if self.get_request_method() == 'POST' else True ) return serializer_fields
def get_fields(self)
Returns the serializer's field set. If `dynamic` is True, respects field inclusions/exlcusions. Otherwise, reverts back to standard DRF behavior.
3.679257
3.482058
1.056633
query_params = self.get_request_attribute('query_params', {}) if 'exclude_links' in query_params: return {} else: all_fields = self.get_all_fields() return { name: field for name, field in six.iteritems(all_fields) if isinstance(field, DynamicRelationField) and getattr(field, 'link', True) and not ( # Skip sideloaded fields name in self.fields and self.is_field_sideloaded(name) ) and not ( # Skip included single relations # TODO: Use links, when we can generate canonical URLs name in self.fields and not getattr(field, 'many', False) ) }
def _link_fields(self)
Construct dict of name:field for linkable fields.
5.152291
4.737242
1.087614
ret = {} fields = self._readable_fields is_fast = isinstance(instance, prefetch.FastObject) id_fields = self._readable_id_fields for field in fields: attribute = None # we exclude dynamic fields here because the proper fastquery # dereferencing happens in the `get_attribute` method now if ( is_fast and not isinstance( field, (DynamicGenericRelationField, DynamicRelationField) ) ): if field in id_fields and field.source not in instance: # TODO - make better. attribute = instance.get(field.source + '_id') ret[field.field_name] = attribute continue else: try: attribute = instance[field.source] except KeyError: # slower, but does more stuff # Also, some temp debugging if hasattr(instance, field.source): attribute = getattr(instance, field.source) else: # Fall back on DRF behavior attribute = field.get_attribute(instance) print( 'Missing %s from %s' % ( field.field_name, self.__class__.__name__ ) ) else: try: attribute = field.get_attribute(instance) except SkipField: continue if attribute is None: # We skip `to_representation` for `None` values so that # fields do not have to explicitly deal with that case. ret[field.field_name] = None else: ret[field.field_name] = field.to_representation(attribute) return ret
def _faster_to_representation(self, instance)
Modified to_representation with optimizations. 1) Returns a plain old dict as opposed to OrderedDict. (Constructing ordered dict is ~100x slower than `{}`.) 2) Ensure we use a cached list of fields (this optimization exists in DRF 3.2 but not 3.1) Arguments: instance: a model instance or data object Returns: Dict of primitive datatypes.
4.624118
4.612341
1.002553
if self.enable_optimization: representation = self._faster_to_representation(instance) else: representation = super( WithDynamicSerializerMixin, self ).to_representation(instance) if settings.ENABLE_LINKS: # TODO: Make this function configurable to support other # formats like JSON API link objects. representation = merge_link_object( self, representation, instance ) if self.debug: representation['_meta'] = { 'id': instance.pk, 'type': self.get_plural_name() } # tag the representation with the serializer and instance return tag_dict( representation, serializer=self, instance=instance, embed=self.embed )
def _to_representation(self, instance)
Uncached `to_representation`.
5.729294
5.374195
1.066075
if self.id_only(): return instance.pk pk = getattr(instance, 'pk', None) if not settings.ENABLE_SERIALIZER_OBJECT_CACHE or pk is None: return self._to_representation(instance) else: if pk not in self.obj_cache: self.obj_cache[pk] = self._to_representation(instance) return self.obj_cache[pk]
def to_representation(self, instance)
Modified to_representation method. Optionally may cache objects. Arguments: instance: A model instance or data object. Returns: Instance ID if the serializer is meant to represent its ID. Otherwise, a tagged data dict representation.
3.278243
2.926517
1.120186
update = getattr(self, 'instance', None) is not None instance = super( WithDynamicSerializerMixin, self ).save( *args, **kwargs ) view = self._context.get('view') if view and update: if int(DRF_VERSION[0]) <= 3 and int(DRF_VERSION[1]) < 5: # Reload the object on update # to get around prefetch cache issues # Fixed in DRF in 3.5.0 instance = self.instance = view.get_object() return instance
def save(self, *args, **kwargs)
Serializer save that address prefetch issues.
5.630423
4.919737
1.144456
model = self.get_model() out = [model._meta.pk.name] # get PK field name # If this is being called, it means it # is a many-relation to its parent. # Django wants the FK to the parent, # but since accurately inferring the FK # pointing back to the parent is less than trivial, # we will just pull all ID fields. # TODO: We also might need to return all non-nullable fields, # or else it is possible Django will issue another request. for field in model._meta.fields: if isinstance(field, models.ForeignKey): out.append(field.name + '_id') return out
def get_id_fields(self)
Called to return a list of fields consisting of, at minimum, the PK field name. The output of this method is used to construct a Prefetch object with a .only() queryset when this field is not being sideloaded but we need to return a list of IDs.
10.095717
9.551258
1.057004
if not isinstance(instance, dict): data = super( DynamicEphemeralSerializer, self ).to_representation(instance) else: data = instance instance = EphemeralObject(data) if self.id_only(): return data else: return tag_dict(data, serializer=self, instance=instance)
def to_representation(self, instance)
Provides post processing. Sub-classes should implement their own to_representation method, but pass the resulting dict through this function to get tagging and field selection. Arguments: instance: Serialized dict, or object. If object, it will be serialized by the super class's to_representation() method.
6.478725
5.908912
1.096433
def get_url(url): return reverse(url, request=request) if url else url def is_active_url(path, url): return path.startswith(url) if url and path else False path = request.path directory_list = [] def sort_key(r): return r[0] # TODO(ant): support arbitrarily nested # structure, for now it is capped at a single level # for UX reasons for group_name, endpoints in sorted( six.iteritems(directory), key=sort_key ): endpoints_list = [] for endpoint_name, endpoint in sorted( six.iteritems(endpoints), key=sort_key ): if endpoint_name[:1] == '_': continue endpoint_url = get_url(endpoint.get('_url', None)) active = is_active_url(path, endpoint_url) endpoints_list.append( (endpoint_name, endpoint_url, [], active) ) url = get_url(endpoints.get('_url', None)) active = is_active_url(path, url) directory_list.append( (group_name, url, endpoints_list, active) ) return directory_list
def get_directory(request)
Get API directory as a nested list of lists.
3.222152
3.166271
1.017649
class API(views.APIView): _ignore_model_permissions = True def get(self, request, *args, **kwargs): directory_list = get_directory(request) result = OrderedDict() for group_name, url, endpoints, _ in directory_list: if url: result[group_name] = url else: group = OrderedDict() for endpoint_name, url, _, _ in endpoints: group[endpoint_name] = url result[group_name] = group return Response(result) return API.as_view()
def get_api_root_view(self, **kwargs)
Return API root view, using the global directory.
3.139354
2.857528
1.098626
if base_name is None: base_name = prefix super(DynamicRouter, self).register(prefix, viewset, base_name) prefix_parts = prefix.split('/') if len(prefix_parts) > 1: prefix = prefix_parts[0] endpoint = '/'.join(prefix_parts[1:]) else: endpoint = prefix prefix = None if prefix and prefix not in directory: current = directory[prefix] = {} else: current = directory.get(prefix, directory) list_name = self.routes[0].name url_name = list_name.format(basename=base_name) if endpoint not in current: current[endpoint] = {} current[endpoint]['_url'] = url_name current[endpoint]['_viewset'] = viewset
def register(self, prefix, viewset, base_name=None)
Add any registered route into a global API directory. If the prefix includes a path separator, store the URL in the directory under the first path segment. Otherwise, store it as-is. For example, if there are two registered prefixes, 'v1/users' and 'groups', `directory` will look liks: { 'v1': { 'users': { '_url': 'users-list' '_viewset': <class 'UserViewSet'> }, } 'groups': { '_url': 'groups-list' '_viewset': <class 'GroupViewSet'> } }
2.768459
2.612054
1.059878
# Try to extract resource name from viewset. try: serializer = viewset.serializer_class() resource_key = serializer.get_resource_key() resource_name = serializer.get_name() path_name = serializer.get_plural_name() except: import traceback traceback.print_exc() raise Exception( "Failed to extract resource name from viewset: '%s'." " It, or its serializer, may not be DREST-compatible." % ( viewset ) ) # Construct canonical path and register it. if namespace: namespace = namespace.rstrip('/') + '/' base_path = namespace or '' base_path = r'%s' % base_path + path_name self.register(base_path, viewset) # Make sure resource isn't already registered. if resource_key in resource_map: raise Exception( "The resource '%s' has already been mapped to '%s'." " Each resource can only be mapped to one canonical" " path. " % ( resource_key, resource_map[resource_key]['path'] ) ) # Register resource in reverse map. resource_map[resource_key] = { 'path': base_path, 'viewset': viewset } # Make sure the resource name isn't registered, either # TODO: Think of a better way to clean this up, there's a lot of # duplicated effort here, between `resource_name` and `resource_key` # This resource name -> key mapping is currently only used by # the DynamicGenericRelationField if resource_name in resource_name_map: resource_key = resource_name_map[resource_name] raise Exception( "The resource name '%s' has already been mapped to '%s'." " A resource name can only be used once." % ( resource_name, resource_map[resource_key]['path'] ) ) # map the resource name to the resource key for easier lookup resource_name_map[resource_name] = resource_key
def register_resource(self, viewset, namespace=None)
Register a viewset that should be considered the canonical endpoint for a particular resource. In addition to generating and registering the route, it adds the route in a reverse map to allow DREST to build the canonical URL for a given resource. Arguments: viewset - viewset class, should have `serializer_class` attr. namespace - (optional) URL namespace, e.g. 'v3'.
3.486202
3.283626
1.061693
if resource_key not in resource_map: # Note: Maybe raise? return None base_path = get_script_prefix() + resource_map[resource_key]['path'] if pk: return '%s/%s/' % (base_path, pk) else: return base_path
def get_canonical_path(resource_key, pk=None)
Return canonical resource path. Arguments: resource_key - Canonical resource key i.e. Serializer.get_resource_key(). pk - (Optional) Object's primary key for a single-resource URL. Returns: Absolute URL as string.
4.018827
3.800351
1.057488