code
string
signature
string
docstring
string
loss_without_docstring
float64
loss_with_docstring
float64
factor
float64
if size is not None: data = self.rfile.readline(size) self.bytes_read += len(data) self._check_length() return data # User didn't specify a size ... # We read the line in chunks to make sure it's not a 100MB line ! res = [] while True: data = self.rfile.readline(256) self.bytes_read += len(data) self._check_length() res.append(data) # See https://github.com/cherrypy/cherrypy/issues/421 if len(data) < 256 or data[-1:] == LF: return EMPTY.join(res)
def readline(self, size=None)
Read a single line from rfile buffer and return it. Args: size (int): minimum amount of data to read Returns: bytes: One line from rfile.
3.680486
3.823936
0.962486
data = EMPTY if size == 0: return data while True: if size and len(data) >= size: return data if not self.buffer: self._fetch() if not self.buffer: # EOF return data if size: remaining = size - len(data) data += self.buffer[:remaining] self.buffer = self.buffer[remaining:] else: data += self.buffer self.buffer = EMPTY
def read(self, size=None)
Read a chunk from rfile buffer and return it. Args: size (int): amount of data to read Returns: bytes: Chunk from rfile, limited by size if specified.
2.467748
2.890005
0.853891
data = EMPTY if size == 0: return data while True: if size and len(data) >= size: return data if not self.buffer: self._fetch() if not self.buffer: # EOF return data newline_pos = self.buffer.find(LF) if size: if newline_pos == -1: remaining = size - len(data) data += self.buffer[:remaining] self.buffer = self.buffer[remaining:] else: remaining = min(size - len(data), newline_pos) data += self.buffer[:remaining] self.buffer = self.buffer[remaining:] else: if newline_pos == -1: data += self.buffer self.buffer = EMPTY else: data += self.buffer[:newline_pos] self.buffer = self.buffer[newline_pos:]
def readline(self, size=None)
Read a single line from rfile buffer and return it. Args: size (int): minimum amount of data to read Returns: bytes: One line from rfile.
1.966955
2.147692
0.915846
if not self.closed: raise ValueError( 'Cannot read trailers until the request body has been read.', ) while True: line = self.rfile.readline() if not line: # No more data--illegal end of headers raise ValueError('Illegal end of headers.') self.bytes_read += len(line) if self.maxlen and self.bytes_read > self.maxlen: raise IOError('Request Entity Too Large') if line == CRLF: # Normal end of headers break if not line.endswith(CRLF): raise ValueError('HTTP requires CRLF terminators') yield line
def read_trailer_lines(self)
Read HTTP headers and yield them. Returns: Generator: yields CRLF separated lines.
4.444986
4.164444
1.067366
# then all the http headers try: self.header_reader(self.rfile, self.inheaders) except ValueError as ex: self.simple_response('400 Bad Request', ex.args[0]) return False mrbs = self.server.max_request_body_size try: cl = int(self.inheaders.get(b'Content-Length', 0)) except ValueError: self.simple_response( '400 Bad Request', 'Malformed Content-Length Header.', ) return False if mrbs and cl > mrbs: self.simple_response( '413 Request Entity Too Large', 'The entity sent with the request exceeds the maximum ' 'allowed bytes.', ) return False # Persistent connection support if self.response_protocol == 'HTTP/1.1': # Both server and client are HTTP/1.1 if self.inheaders.get(b'Connection', b'') == b'close': self.close_connection = True else: # Either the server or client (or both) are HTTP/1.0 if self.inheaders.get(b'Connection', b'') != b'Keep-Alive': self.close_connection = True # Transfer-Encoding support te = None if self.response_protocol == 'HTTP/1.1': te = self.inheaders.get(b'Transfer-Encoding') if te: te = [x.strip().lower() for x in te.split(b',') if x.strip()] self.chunked_read = False if te: for enc in te: if enc == b'chunked': self.chunked_read = True else: # Note that, even if we see "chunked", we must reject # if there is an extension we don't recognize. self.simple_response('501 Unimplemented') self.close_connection = True return False # From PEP 333: # "Servers and gateways that implement HTTP 1.1 must provide # transparent support for HTTP 1.1's "expect/continue" mechanism. # This may be done in any of several ways: # 1. Respond to requests containing an Expect: 100-continue request # with an immediate "100 Continue" response, and proceed normally. # 2. Proceed with the request normally, but provide the application # with a wsgi.input stream that will send the "100 Continue" # response if/when the application first attempts to read from # the input stream. The read request must then remain blocked # until the client responds. # 3. Wait until the client decides that the server does not support # expect/continue, and sends the request body on its own. # (This is suboptimal, and is not recommended.) # # We used to do 3, but are now doing 1. Maybe we'll do 2 someday, # but it seems like it would be a big slowdown for such a rare case. if self.inheaders.get(b'Expect', b'') == b'100-continue': # Don't use simple_response here, because it emits headers # we don't want. See # https://github.com/cherrypy/cherrypy/issues/951 msg = self.server.protocol.encode('ascii') msg += b' 100 Continue\r\n\r\n' try: self.conn.wfile.write(msg) except socket.error as ex: if ex.args[0] not in errors.socket_errors_to_ignore: raise return True
def read_request_headers(self)
Read self.rfile into self.inheaders. Return success.
3.669054
3.578032
1.025439
mrbs = self.server.max_request_body_size if self.chunked_read: self.rfile = ChunkedRFile(self.conn.rfile, mrbs) else: cl = int(self.inheaders.get(b'Content-Length', 0)) if mrbs and mrbs < cl: if not self.sent_headers: self.simple_response( '413 Request Entity Too Large', 'The entity sent with the request exceeds the ' 'maximum allowed bytes.', ) return self.rfile = KnownLengthRFile(self.conn.rfile, cl) self.server.gateway(self).respond() self.ready and self.ensure_headers_sent() if self.chunked_write: self.conn.wfile.write(b'0\r\n\r\n')
def respond(self)
Call the gateway and write its iterable output.
4.622083
4.387043
1.053576
status = str(status) proto_status = '%s %s\r\n' % (self.server.protocol, status) content_length = 'Content-Length: %s\r\n' % len(msg) content_type = 'Content-Type: text/plain\r\n' buf = [ proto_status.encode('ISO-8859-1'), content_length.encode('ISO-8859-1'), content_type.encode('ISO-8859-1'), ] if status[:3] in ('413', '414'): # Request Entity Too Large / Request-URI Too Long self.close_connection = True if self.response_protocol == 'HTTP/1.1': # This will not be true for 414, since read_request_line # usually raises 414 before reading the whole line, and we # therefore cannot know the proper response_protocol. buf.append(b'Connection: close\r\n') else: # HTTP/1.0 had no 413/414 status nor Connection header. # Emit 400 instead and trust the message body is enough. status = '400 Bad Request' buf.append(CRLF) if msg: if isinstance(msg, six.text_type): msg = msg.encode('ISO-8859-1') buf.append(msg) try: self.conn.wfile.write(EMPTY.join(buf)) except socket.error as ex: if ex.args[0] not in errors.socket_errors_to_ignore: raise
def simple_response(self, status, msg='')
Write a simple response back to the client.
3.424922
3.406885
1.005294
if self.chunked_write and chunk: chunk_size_hex = hex(len(chunk))[2:].encode('ascii') buf = [chunk_size_hex, CRLF, chunk, CRLF] self.conn.wfile.write(EMPTY.join(buf)) else: self.conn.wfile.write(chunk)
def write(self, chunk)
Write unbuffered data to the client.
3.687797
3.41799
1.078937
hkeys = [key.lower() for key, value in self.outheaders] status = int(self.status[:3]) if status == 413: # Request Entity Too Large. Close conn to avoid garbage. self.close_connection = True elif b'content-length' not in hkeys: # "All 1xx (informational), 204 (no content), # and 304 (not modified) responses MUST NOT # include a message-body." So no point chunking. if status < 200 or status in (204, 205, 304): pass else: needs_chunked = ( self.response_protocol == 'HTTP/1.1' and self.method != b'HEAD' ) if needs_chunked: # Use the chunked transfer-coding self.chunked_write = True self.outheaders.append((b'Transfer-Encoding', b'chunked')) else: # Closing the conn is the only way to determine len. self.close_connection = True if b'connection' not in hkeys: if self.response_protocol == 'HTTP/1.1': # Both server and client are HTTP/1.1 or better if self.close_connection: self.outheaders.append((b'Connection', b'close')) else: # Server and/or client are HTTP/1.0 if not self.close_connection: self.outheaders.append((b'Connection', b'Keep-Alive')) if (not self.close_connection) and (not self.chunked_read): # Read any remaining request body data on the socket. # "If an origin server receives a request that does not include an # Expect request-header field with the "100-continue" expectation, # the request includes a request body, and the server responds # with a final status code before reading the entire request body # from the transport connection, then the server SHOULD NOT close # the transport connection until it has read the entire request, # or until the client closes the connection. Otherwise, the client # might not reliably receive the response message. However, this # requirement is not be construed as preventing a server from # defending itself against denial-of-service attacks, or from # badly broken client implementations." remaining = getattr(self.rfile, 'remaining', 0) if remaining > 0: self.rfile.read(remaining) if b'date' not in hkeys: self.outheaders.append(( b'Date', email.utils.formatdate(usegmt=True).encode('ISO-8859-1'), )) if b'server' not in hkeys: self.outheaders.append(( b'Server', self.server.server_name.encode('ISO-8859-1'), )) proto = self.server.protocol.encode('ascii') buf = [proto + SPACE + self.status + CRLF] for k, v in self.outheaders: buf.append(k + COLON + SPACE + v + CRLF) buf.append(CRLF) self.conn.wfile.write(EMPTY.join(buf))
def send_headers(self)
Assert, process, and send the HTTP response message-headers. You must set self.status, and self.outheaders before calling this.
3.292131
3.2307
1.019015
request_seen = False try: while True: # (re)set req to None so that if something goes wrong in # the RequestHandlerClass constructor, the error doesn't # get written to the previous request. req = None req = self.RequestHandlerClass(self.server, self) # This order of operations should guarantee correct pipelining. req.parse_request() if self.server.stats['Enabled']: self.requests_seen += 1 if not req.ready: # Something went wrong in the parsing (and the server has # probably already made a simple_response). Return and # let the conn close. return request_seen = True req.respond() if req.close_connection: return except socket.error as ex: errnum = ex.args[0] # sadly SSL sockets return a different (longer) time out string timeout_errs = 'timed out', 'The read operation timed out' if errnum in timeout_errs: # Don't error if we're between requests; only error # if 1) no request has been started at all, or 2) we're # in the middle of a request. # See https://github.com/cherrypy/cherrypy/issues/853 if (not request_seen) or (req and req.started_request): self._conditional_error(req, '408 Request Timeout') elif errnum not in errors.socket_errors_to_ignore: self.server.error_log( 'socket.error %s' % repr(errnum), level=logging.WARNING, traceback=True, ) self._conditional_error(req, '500 Internal Server Error') except (KeyboardInterrupt, SystemExit): raise except errors.FatalSSLAlert: pass except errors.NoSSLError: self._handle_no_ssl(req) except Exception as ex: self.server.error_log( repr(ex), level=logging.ERROR, traceback=True, ) self._conditional_error(req, '500 Internal Server Error')
def communicate(self)
Read each request and respond appropriately.
5.345315
5.149406
1.038045
if not req or req.sent_headers: return try: req.simple_response(response) except errors.FatalSSLAlert: pass except errors.NoSSLError: self._handle_no_ssl(req)
def _conditional_error(self, req, response)
Respond with an error. Don't bother writing if a response has already started being written.
8.234344
7.781406
1.058208
self.rfile.close() if not self.linger: self._close_kernel_socket() self.socket.close() else: # On the other hand, sometimes we want to hang around for a bit # to make sure the client has a chance to read our entire # response. Skipping the close() calls here delays the FIN # packet until the socket object is garbage-collected later. # Someday, perhaps, we'll do the full lingering_close that # Apache does, but not today. pass
def close(self)
Close the socket underlying this connection.
12.056458
11.346565
1.062565
raise NotImplementedError( 'SO_PEERCRED is only supported in Linux kernel and WSL', ) elif not self.peercreds_enabled: raise RuntimeError( 'Peer creds lookup is disabled within this server', ) try: peer_creds = self.socket.getsockopt( # FIXME: Use LOCAL_CREDS for BSD-like OSs # Ref: https://gist.github.com/LucaFilipozzi/e4f1e118202aff27af6aadebda1b5d91 # noqa socket.SOL_SOCKET, socket.SO_PEERCRED, struct.calcsize(PEERCRED_STRUCT_DEF), ) except socket.error as socket_err: six.raise_from( # 3.6+: raise RuntimeError from socket_err RuntimeError, socket_err, ) else: pid, uid, gid = struct.unpack(PEERCRED_STRUCT_DEF, peer_creds) return pid, uid, gid
def get_peer_creds(self): # LRU cached on per-instance basis, see __init__ PEERCRED_STRUCT_DEF = '3i' if IS_WINDOWS or self.socket.family != socket.AF_UNIX
Return the PID/UID/GID tuple of the peer socket for UNIX sockets. This function uses SO_PEERCRED to query the UNIX PID, UID, GID of the peer, which is only available if the bind address is a UNIX domain socket. Raises: NotImplementedError: in case of unsupported socket type RuntimeError: in case of SO_PEERCRED lookup unsupported or disabled
7.415126
6.554342
1.13133
raise NotImplementedError( 'UID/GID lookup is unavailable under current platform. ' 'It can only be done under UNIX-like OS ' 'but not under the Google App Engine', ) elif not self.peercreds_resolve_enabled: raise RuntimeError( 'UID/GID lookup is disabled within this server', ) user = pwd.getpwuid(self.peer_uid).pw_name # [0] group = grp.getgrgid(self.peer_gid).gr_name # [0] return user, group
def resolve_peer_creds(self): # LRU cached on per-instance basis if not IS_UID_GID_RESOLVABLE
Return the username and group tuple of the peercreds if available. Raises: NotImplementedError: in case of unsupported OS RuntimeError: in case of UID/GID lookup unsupported or disabled
5.875578
4.698075
1.250635
if six.PY2 and hasattr(self.socket, '_sock'): self.socket._sock.close()
def _close_kernel_socket(self)
Close kernel socket in outdated Python versions. On old Python versions, Python's socket module does NOT call close on the kernel socket when you call socket.close(). We do so manually here because we want this server to send a FIN TCP segment immediately. Note this must be called *before* calling socket.close(), because the latter drops its reference to the kernel socket.
5.496287
4.974021
1.104999
self._start_time = None self._run_time = 0 self.stats = { 'Enabled': False, 'Bind Address': lambda s: repr(self.bind_addr), 'Run time': lambda s: (not s['Enabled']) and -1 or self.runtime(), 'Accepts': 0, 'Accepts/sec': lambda s: s['Accepts'] / self.runtime(), 'Queue': lambda s: getattr(self.requests, 'qsize', None), 'Threads': lambda s: len(getattr(self.requests, '_threads', [])), 'Threads Idle': lambda s: getattr(self.requests, 'idle', None), 'Socket Errors': 0, 'Requests': lambda s: (not s['Enabled']) and -1 or sum( [w['Requests'](w) for w in s['Worker Threads'].values()], 0, ), 'Bytes Read': lambda s: (not s['Enabled']) and -1 or sum( [w['Bytes Read'](w) for w in s['Worker Threads'].values()], 0, ), 'Bytes Written': lambda s: (not s['Enabled']) and -1 or sum( [w['Bytes Written'](w) for w in s['Worker Threads'].values()], 0, ), 'Work Time': lambda s: (not s['Enabled']) and -1 or sum( [w['Work Time'](w) for w in s['Worker Threads'].values()], 0, ), 'Read Throughput': lambda s: (not s['Enabled']) and -1 or sum( [w['Bytes Read'](w) / (w['Work Time'](w) or 1e-6) for w in s['Worker Threads'].values()], 0, ), 'Write Throughput': lambda s: (not s['Enabled']) and -1 or sum( [w['Bytes Written'](w) / (w['Work Time'](w) or 1e-6) for w in s['Worker Threads'].values()], 0, ), 'Worker Threads': {}, } logging.statistics['Cheroot HTTPServer %d' % id(self)] = self.stats
def clear_stats(self)
Reset server stat counters..
2.411875
2.356169
1.023643
if self._start_time is None: return self._run_time else: return self._run_time + (time.time() - self._start_time)
def runtime(self)
Return server uptime.
3.272002
3.007063
1.088106
if isinstance(value, tuple) and value[0] in ('', None): # Despite the socket module docs, using '' does not # allow AI_PASSIVE to work. Passing None instead # returns '0.0.0.0' like we want. In other words: # host AI_PASSIVE result # '' Y 192.168.x.y # '' N 192.168.x.y # None Y 0.0.0.0 # None N 127.0.0.1 # But since you can get the same effect with an explicit # '0.0.0.0', we deny both the empty string and None as values. raise ValueError( "Host values of '' or None are not allowed. " "Use '0.0.0.0' (IPv4) or '::' (IPv6) instead " 'to listen on all active interfaces.', ) self._bind_addr = value
def bind_addr(self, value)
Set the interface on which to listen for connections.
4.996041
4.882638
1.023226
try: self.start() except (KeyboardInterrupt, IOError): # The time.sleep call might raise # "IOError: [Errno 4] Interrupted function call" on KBInt. self.error_log('Keyboard Interrupt: shutting down') self.stop() raise except SystemExit: self.error_log('SystemExit raised: shutting down') self.stop() raise
def safe_start(self)
Run the server forever, and stop it cleanly on exit.
5.175201
4.95408
1.044634
self._interrupt = None if self.software is None: self.software = '%s Server' % self.version # Select the appropriate socket self.socket = None msg = 'No socket could be created' if os.getenv('LISTEN_PID', None): # systemd socket activation self.socket = socket.fromfd(3, socket.AF_INET, socket.SOCK_STREAM) elif isinstance(self.bind_addr, six.string_types): # AF_UNIX socket try: self.bind_unix_socket(self.bind_addr) except socket.error as serr: msg = '%s -- (%s: %s)' % (msg, self.bind_addr, serr) six.raise_from(socket.error(msg), serr) else: # AF_INET or AF_INET6 socket # Get the correct address family for our host (allows IPv6 # addresses) host, port = self.bind_addr try: info = socket.getaddrinfo( host, port, socket.AF_UNSPEC, socket.SOCK_STREAM, 0, socket.AI_PASSIVE, ) except socket.gaierror: sock_type = socket.AF_INET bind_addr = self.bind_addr if ':' in host: sock_type = socket.AF_INET6 bind_addr = bind_addr + (0, 0) info = [(sock_type, socket.SOCK_STREAM, 0, '', bind_addr)] for res in info: af, socktype, proto, canonname, sa = res try: self.bind(af, socktype, proto) break except socket.error as serr: msg = '%s -- (%s: %s)' % (msg, sa, serr) if self.socket: self.socket.close() self.socket = None if not self.socket: raise socket.error(msg) # Timeout so KeyboardInterrupt can be caught on Win32 self.socket.settimeout(1) self.socket.listen(self.request_queue_size) # Create worker threads self.requests.start() self.ready = True self._start_time = time.time()
def prepare(self)
Prepare server to serving requests. It binds a socket's port, setups the socket to ``listen()`` and does other preparing things.
2.982574
2.873788
1.037855
while self.ready: try: self.tick() except (KeyboardInterrupt, SystemExit): raise except Exception: self.error_log( 'Error in HTTPServer.tick', level=logging.ERROR, traceback=True, ) if self.interrupt: while self.interrupt is True: # Wait for self.stop() to complete. See _set_interrupt. time.sleep(0.1) if self.interrupt: raise self.interrupt
def serve(self)
Serve requests, after invoking :func:`prepare()`.
5.559775
5.483487
1.013912
# Override this in subclasses as desired sys.stderr.write(msg + '\n') sys.stderr.flush() if traceback: tblines = traceback_.format_exc() sys.stderr.write(tblines) sys.stderr.flush()
def error_log(self, msg='', level=20, traceback=False)
Write error message to log. Args: msg (str): error message level (int): logging level traceback (bool): add traceback to output or not
4.386037
5.999669
0.731047
sock = self.prepare_socket( self.bind_addr, family, type, proto, self.nodelay, self.ssl_adapter, ) sock = self.socket = self.bind_socket(sock, self.bind_addr) self.bind_addr = self.resolve_real_bind_addr(sock) return sock
def bind(self, family, type, proto=0)
Create (or recreate) the actual socket object.
5.316588
5.404225
0.983784
if IS_WINDOWS: raise ValueError( # or RuntimeError? 'AF_UNIX sockets are not supported under Windows.', ) fs_permissions = 0o777 # TODO: allow changing mode try: # Make possible reusing the socket... os.unlink(self.bind_addr) except OSError: sock = self.prepare_socket( bind_addr=bind_addr, family=socket.AF_UNIX, type=socket.SOCK_STREAM, proto=0, nodelay=self.nodelay, ssl_adapter=self.ssl_adapter, ) try: # Allow everyone access the socket... os.fchmod(sock.fileno(), fs_permissions) FS_PERMS_SET = True except OSError: FS_PERMS_SET = False try: sock = self.bind_socket(sock, bind_addr) except socket.error: sock.close() raise bind_addr = self.resolve_real_bind_addr(sock) try: if not FS_PERMS_SET: try: os.lchmod(bind_addr, fs_permissions) except AttributeError: os.chmod(bind_addr, fs_permissions, follow_symlinks=False) FS_PERMS_SET = True except OSError: pass if not FS_PERMS_SET: self.error_log( 'Failed to set socket fs mode permissions', level=logging.WARNING, ) self.bind_addr = bind_addr self.socket = sock return sock
def bind_unix_socket(self, bind_addr)
Create (or recreate) a UNIX socket object.
3.94514
3.932322
1.00326
sock = socket.socket(family, type, proto) prevent_socket_inheritance(sock) host, port = bind_addr[:2] IS_EPHEMERAL_PORT = port == 0 if not (IS_WINDOWS or IS_EPHEMERAL_PORT): sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) if nodelay and not isinstance(bind_addr, str): sock.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1) if ssl_adapter is not None: sock = ssl_adapter.bind(sock) # If listening on the IPV6 any address ('::' = IN6ADDR_ANY), # activate dual-stack. See # https://github.com/cherrypy/cherrypy/issues/871. listening_ipv6 = ( hasattr(socket, 'AF_INET6') and family == socket.AF_INET6 and host in ('::', '::0', '::0.0.0.0') ) if listening_ipv6: try: sock.setsockopt( socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0, ) except (AttributeError, socket.error): # Apparently, the socket option is not available in # this machine's TCP stack pass return sock
def prepare_socket(bind_addr, family, type, proto, nodelay, ssl_adapter)
Create and prepare the socket object.
3.370959
3.41372
0.987474
# FIXME: keep requested bind_addr separate real bound_addr (port # is different in case of ephemeral port 0) bind_addr = socket_.getsockname() if socket_.family in ( # Windows doesn't have socket.AF_UNIX, so not using it in check socket.AF_INET, socket.AF_INET6, ): return bind_addr[:2] return bind_addr
def resolve_real_bind_addr(socket_)
Retrieve actual bind addr from bound socket.
8.557335
8.00664
1.06878
try: s, addr = self.socket.accept() if self.stats['Enabled']: self.stats['Accepts'] += 1 if not self.ready: return prevent_socket_inheritance(s) if hasattr(s, 'settimeout'): s.settimeout(self.timeout) mf = MakeFile ssl_env = {} # if ssl cert and key are set, we try to be a secure HTTP server if self.ssl_adapter is not None: try: s, ssl_env = self.ssl_adapter.wrap(s) except errors.NoSSLError: msg = ( 'The client sent a plain HTTP request, but ' 'this server only speaks HTTPS on this port.' ) buf = [ '%s 400 Bad Request\r\n' % self.protocol, 'Content-Length: %s\r\n' % len(msg), 'Content-Type: text/plain\r\n\r\n', msg, ] sock_to_make = s if six.PY3 else s._sock wfile = mf(sock_to_make, 'wb', io.DEFAULT_BUFFER_SIZE) try: wfile.write(''.join(buf).encode('ISO-8859-1')) except socket.error as ex: if ex.args[0] not in errors.socket_errors_to_ignore: raise return if not s: return mf = self.ssl_adapter.makefile # Re-apply our timeout since we may have a new socket object if hasattr(s, 'settimeout'): s.settimeout(self.timeout) conn = self.ConnectionClass(self, s, mf) if not isinstance(self.bind_addr, six.string_types): # optional values # Until we do DNS lookups, omit REMOTE_HOST if addr is None: # sometimes this can happen # figure out if AF_INET or AF_INET6. if len(s.getsockname()) == 2: # AF_INET addr = ('0.0.0.0', 0) else: # AF_INET6 addr = ('::', 0) conn.remote_addr = addr[0] conn.remote_port = addr[1] conn.ssl_env = ssl_env try: self.requests.put(conn) except queue.Full: # Just drop the conn. TODO: write 503 back? conn.close() return except socket.timeout: # The only reason for the timeout in start() is so we can # notice keyboard interrupts on Win32, which don't interrupt # accept() by default return except socket.error as ex: if self.stats['Enabled']: self.stats['Socket Errors'] += 1 if ex.args[0] in errors.socket_error_eintr: # I *think* this is right. EINTR should occur when a signal # is received during the accept() call; all docs say retry # the call, and I *think* I'm reading it right that Python # will then go ahead and poll for and handle the signal # elsewhere. See # https://github.com/cherrypy/cherrypy/issues/707. return if ex.args[0] in errors.socket_errors_nonblocking: # Just try again. See # https://github.com/cherrypy/cherrypy/issues/479. return if ex.args[0] in errors.socket_errors_to_ignore: # Our socket was closed. # See https://github.com/cherrypy/cherrypy/issues/686. return raise
def tick(self)
Accept a new connection and put it on the Queue.
4.098606
3.998963
1.024917
self._interrupt = True self.stop() self._interrupt = interrupt
def interrupt(self, interrupt)
Perform the shutdown of this server and save the exception.
8.970497
7.755051
1.15673
self.ready = False if self._start_time is not None: self._run_time += (time.time() - self._start_time) self._start_time = None sock = getattr(self, 'socket', None) if sock: if not isinstance(self.bind_addr, six.string_types): # Touch our own socket to make accept() return immediately. try: host, port = sock.getsockname()[:2] except socket.error as ex: if ex.args[0] not in errors.socket_errors_to_ignore: # Changed to use error code and not message # See # https://github.com/cherrypy/cherrypy/issues/860. raise else: # Note that we're explicitly NOT using AI_PASSIVE, # here, because we want an actual IP to touch. # localhost won't work if we've bound to a public IP, # but it will if we bound to '0.0.0.0' (INADDR_ANY). for res in socket.getaddrinfo( host, port, socket.AF_UNSPEC, socket.SOCK_STREAM, ): af, socktype, proto, canonname, sa = res s = None try: s = socket.socket(af, socktype, proto) # See # https://groups.google.com/group/cherrypy-users/ # browse_frm/thread/bbfe5eb39c904fe0 s.settimeout(1.0) s.connect((host, port)) s.close() except socket.error: if s: s.close() if hasattr(sock, 'close'): sock.close() self.socket = None self.requests.stop(self.shutdown_timeout)
def stop(self)
Gracefully shutdown a server that is serving forever.
3.884644
3.794996
1.023623
self.server.stats['Worker Threads'][self.getName()] = self.stats try: self.ready = True while True: conn = self.server.requests.get() if conn is _SHUTDOWNREQUEST: return self.conn = conn if self.server.stats['Enabled']: self.start_time = time.time() try: conn.communicate() finally: conn.close() if self.server.stats['Enabled']: self.requests_seen += self.conn.requests_seen self.bytes_read += self.conn.rfile.bytes_read self.bytes_written += self.conn.wfile.bytes_written self.work_time += time.time() - self.start_time self.start_time = None self.conn = None except (KeyboardInterrupt, SystemExit) as ex: self.server.interrupt = ex
def run(self)
Process incoming HTTP connections. Retrieves incoming connections from thread pool.
3.534935
3.344212
1.057031
self._queue.put(obj, block=True, timeout=self._queue_put_timeout) if obj is _SHUTDOWNREQUEST: return
def put(self, obj)
Put request into queue. Args: obj (cheroot.server.HTTPConnection): HTTP connection waiting to be processed
6.656611
8.126086
0.819166
if self.max > 0: budget = max(self.max - len(self._threads), 0) else: # self.max <= 0 indicates no maximum budget = float('inf') n_new = min(amount, budget) workers = [self._spawn_worker() for i in range(n_new)] while not all(worker.ready for worker in workers): time.sleep(.1) self._threads.extend(workers)
def grow(self, amount)
Spawn new worker threads (not above self.max).
4.12277
3.386707
1.217339
# Grow/shrink the pool if necessary. # Remove any dead threads from our list for t in self._threads: if not t.isAlive(): self._threads.remove(t) amount -= 1 # calculate the number of threads above the minimum n_extra = max(len(self._threads) - self.min, 0) # don't remove more than amount n_to_remove = min(amount, n_extra) # put shutdown requests on the queue equal to the number of threads # to remove. As each request is processed by a worker, that worker # will terminate and be culled from the list. for n in range(n_to_remove): self._queue.put(_SHUTDOWNREQUEST)
def shrink(self, amount)
Kill off worker threads (not below self.min).
5.515808
5.114064
1.078557
# Must shut down threads here so the code that calls # this method can know when all threads are stopped. for worker in self._threads: self._queue.put(_SHUTDOWNREQUEST) # Don't join currentThread (when stop is called inside a request). current = threading.currentThread() if timeout is not None and timeout >= 0: endtime = time.time() + timeout while self._threads: worker = self._threads.pop() if worker is not current and worker.isAlive(): try: if timeout is None or timeout < 0: worker.join() else: remaining_time = endtime - time.time() if remaining_time > 0: worker.join(remaining_time) if worker.isAlive(): # We exhausted the timeout. # Forcibly shut down the socket. c = worker.conn if c and not c.rfile.closed: try: c.socket.shutdown(socket.SHUT_RD) except TypeError: # pyOpenSSL sockets don't take an arg c.socket.shutdown() worker.join() except ( AssertionError, # Ignore repeated Ctrl-C. # See # https://github.com/cherrypy/cherrypy/issues/691. KeyboardInterrupt, ): pass
def stop(self, timeout=5)
Terminate all worker threads. Args: timeout (int): time to wait for threads to stop gracefully
4.856451
4.918696
0.987345
ignored_fields = set(['ctx', 'decorator_list', 'names', 'returns']) fields = node._fields # The fields of ast.Call are in the wrong order. if isinstance(node, ast.Call): fields = ('func', 'args', 'starargs', 'keywords', 'kwargs') for name in reversed(fields): if name in ignored_fields: continue try: last_field = getattr(node, name) except AttributeError: continue # Ignore non-AST objects like "is_async", "level" and "nl". if isinstance(last_field, ast.AST): return last_field elif isinstance(last_field, list) and last_field: return last_field[-1] return None
def _get_last_child_with_lineno(node)
Return the last direct child of `node` that has a lineno attribute, or None if `node` has no such children. Almost all node._field lists are sorted by the order in which they appear in source code. For some nodes however, we have to skip some fields that either don't have line numbers (e.g., "ctx" and "names") or that are in the wrong position (e.g., "decorator_list" and "returns"). Then we choose the first field (i.e., the field with the highest line number) that actually contains a node. If it contains a list of nodes, we return the last one.
3.920639
3.402422
1.152308
max_lineno = node.lineno while True: last_child = _get_last_child_with_lineno(node) if last_child is None: return max_lineno else: try: max_lineno = max(max_lineno, last_child.lineno) except AttributeError: pass node = last_child
def get_last_line_number(node)
Estimate last line number of the given AST node. The estimate is based on the line number of the last descendant of `node` that has a lineno attribute. Therefore, it underestimates the size of code ending with, e.g., multiline strings and comments. When traversing the tree, we may see a mix of nodes with line numbers and nodes without line numbers. We therefore, store the maximum line number seen so far and report it at the end. A more accurate (but also slower to compute) estimate would traverse all children, instead of just the last one, since choosing the last one may lead to a path that ends with a node without line number.
2.625901
2.769073
0.948296
if isinstance(node, ast.BoolOp): results = [_safe_eval(value, default) for value in node.values] if isinstance(node.op, ast.And): return all(results) else: return any(results) elif isinstance(node, ast.UnaryOp) and isinstance(node.op, ast.Not): return not _safe_eval(node.operand, not default) else: try: return ast.literal_eval(node) except ValueError: return default
def _safe_eval(node, default)
Safely evaluate the Boolean expression under the given AST node. Substitute `default` for all sub-expressions that cannot be evaluated (because variables or functions are undefined). We could use eval() to evaluate more sub-expressions. However, this function is not safe for arbitrary Python code. Even after overwriting the "__builtins__" dictionary, the original dictionary can be restored (https://nedbatchelder.com/blog/201206/eval_really_is_dangerous.html).
1.764357
1.914756
0.921453
modules = [] for path in paths: path = os.path.abspath(path) if toplevel and path.endswith('.pyc'): sys.exit('.pyc files are not supported: {0}'.format(path)) if os.path.isfile(path) and (path.endswith('.py') or toplevel): modules.append(path) elif os.path.isdir(path): subpaths = [ os.path.join(path, filename) for filename in sorted(os.listdir(path))] modules.extend(get_modules(subpaths, toplevel=False)) elif toplevel: sys.exit('Error: {0} could not be found.'.format(path)) return modules
def get_modules(paths, toplevel=True)
Take files from the command line even if they don't end with .py.
2.195931
2.060853
1.065545
return ( varname in IGNORED_VARIABLE_NAMES or (varname.startswith('_') and not varname.startswith('__')) or _is_special_name(varname))
def _ignore_variable(filename, varname)
Ignore _ (Python idiom), _x (pylint convention) and __x__ (special variable or method), but not __x.
4.280729
4.288848
0.998107
if not 0 <= min_confidence <= 100: raise ValueError('min_confidence must be between 0 and 100.') def by_name(item): return (item.filename.lower(), item.first_lineno) def by_size(item): return (item.size,) + by_name(item) unused_code = (self.unused_attrs + self.unused_classes + self.unused_funcs + self.unused_imports + self.unused_props + self.unused_vars + self.unreachable_code) confidently_unused = [obj for obj in unused_code if obj.confidence >= min_confidence] return sorted(confidently_unused, key=by_size if sort_by_size else by_name)
def get_unused_code(self, min_confidence=0, sort_by_size=False)
Return ordered list of unused Item objects.
3.132479
2.940758
1.065194
for item in self.get_unused_code( min_confidence=min_confidence, sort_by_size=sort_by_size): print(item.get_whitelist_string() if make_whitelist else item.get_report(add_size=sort_by_size)) self.found_dead_code_or_error = True return self.found_dead_code_or_error
def report(self, min_confidence=0, sort_by_size=False, make_whitelist=False)
Print ordered list of Item objects to stdout.
3.983023
3.794623
1.049649
assert isinstance(node, (ast.Import, ast.ImportFrom)) for name_and_alias in node.names: # Store only top-level module name ("os.path" -> "os"). # We can't easily detect when "os.path" is used. name = name_and_alias.name.partition('.')[0] alias = name_and_alias.asname self._define( self.defined_imports, alias or name, node, confidence=90, ignore=_ignore_import) if alias is not None: self.used_names.add(name_and_alias.name)
def _add_aliases(self, node)
We delegate to this method instead of using visit_alias() to have access to line numbers and to filter imports from __future__.
5.813469
5.563484
1.044933
# Old format strings. self.used_names |= set(re.findall(r'\%\((\w+)\)', node.s)) def is_identifier(s): return bool(re.match(r'[a-zA-Z_][a-zA-Z0-9_]*', s)) # New format strings. parser = string.Formatter() try: names = [name for _, name, _, _ in parser.parse(node.s) if name] except ValueError: # Invalid format string. names = [] for field_name in names: # Remove brackets and contents: "a[0][b].c[d].e" -> "a.c.e". # "a.b.c" -> name = "a", attributes = ["b", "c"] name_and_attrs = re.sub(r'\[\w*\]', '', field_name).split('.') name = name_and_attrs[0] if is_identifier(name): self.used_names.add(name) for attr in name_and_attrs[1:]: if is_identifier(attr): self.used_attrs.add(attr)
def visit_Str(self, node)
Parse variable names in format strings: '%(my_var)s' % locals() '{my_var}'.format(**locals())
3.299778
3.086992
1.06893
for index, node in enumerate(ast_list): if isinstance(node, (ast.Break, ast.Continue, ast.Raise, ast.Return)): try: first_unreachable_node = ast_list[index + 1] except IndexError: continue class_name = node.__class__.__name__.lower() self._define( self.unreachable_code, class_name, first_unreachable_node, last_node=ast_list[-1], message="unreachable code after '{class_name}'".format( **locals()), confidence=100) return
def _handle_ast_list(self, ast_list)
Find unreachable nodes in the given sequence of ast nodes.
4.311241
3.839388
1.122898
for _, value in ast.iter_fields(node): if isinstance(value, list): self._handle_ast_list(value) for item in value: if isinstance(item, ast.AST): self.visit(item) elif isinstance(value, ast.AST): self.visit(value)
def generic_visit(self, node)
Called if no explicit visitor function exists for a node.
2.294183
2.34431
0.978617
name = worder.get_name_at(resource, offset) this_pymodule = project.get_pymodule(resource) primary, pyname = rope.base.evaluate.eval_location2( this_pymodule, offset) def is_match(occurrence): return unsure finder = occurrences.create_finder( project, name, pyname, unsure=is_match, in_hierarchy=in_hierarchy, instance=primary) if resources is None: resources = project.get_python_files() job_set = task_handle.create_jobset('Finding Occurrences', count=len(resources)) return _find_locations(finder, resources, job_set)
def find_occurrences(project, resource, offset, unsure=False, resources=None, in_hierarchy=False, task_handle=taskhandle.NullTaskHandle())
Return a list of `Location`\s If `unsure` is `True`, possible matches are returned, too. You can use `Location.unsure` to see which are unsure occurrences. `resources` can be a list of `rope.base.resource.File`\s that should be searched for occurrences; if `None` all python files in the project are searched.
6.928961
6.887112
1.006076
name = worder.get_name_at(resource, offset) this_pymodule = project.get_pymodule(resource) pyname = rope.base.evaluate.eval_location(this_pymodule, offset) if pyname is not None: pyobject = pyname.get_object() if not isinstance(pyobject, rope.base.pyobjects.PyFunction) or \ pyobject.get_kind() != 'method': raise exceptions.BadIdentifierError('Not a method!') else: raise exceptions.BadIdentifierError('Cannot resolve the identifier!') def is_defined(occurrence): if not occurrence.is_defined(): return False def not_self(occurrence): if occurrence.get_pyname().get_object() == pyname.get_object(): return False filters = [is_defined, not_self, occurrences.InHierarchyFilter(pyname, True)] finder = occurrences.Finder(project, name, filters=filters) if resources is None: resources = project.get_python_files() job_set = task_handle.create_jobset('Finding Implementations', count=len(resources)) return _find_locations(finder, resources, job_set)
def find_implementations(project, resource, offset, resources=None, task_handle=taskhandle.NullTaskHandle())
Find the places a given method is overridden. Finds the places a method is implemented. Returns a list of `Location`\s.
4.543427
4.390358
1.034865
fixer = fixsyntax.FixSyntax(project, code, resource, maxfixes) pyname = fixer.pyname_at(offset) if pyname is not None: module, lineno = pyname.get_definition_location() name = rope.base.worder.Worder(code).get_word_at(offset) if lineno is not None: start = module.lines.get_line_start(lineno) def check_offset(occurrence): if occurrence.offset < start: return False pyname_filter = occurrences.PyNameFilter(pyname) finder = occurrences.Finder(project, name, [check_offset, pyname_filter]) for occurrence in finder.find_occurrences(pymodule=module): return Location(occurrence)
def find_definition(project, code, offset, resource=None, maxfixes=1)
Return the definition location of the python name at `offset` A `Location` object is returned if the definition location can be determined, otherwise ``None`` is returned.
5.776089
5.46106
1.057686
pymodule = project.get_pymodule(resource) finder = _BadAccessFinder(pymodule) ast.walk(pymodule.get_ast(), finder) return finder.errors
def find_errors(project, resource)
Find possible bad name and attribute accesses It returns a list of `Error`\s.
7.703657
7.222857
1.066566
if pyobject is None: return ('none',) object_type = type(pyobject) try: method = getattr(self, object_type.__name__ + '_to_textual') return method(pyobject) except AttributeError: return ('unknown',)
def transform(self, pyobject)
Transform a `PyObject` to textual form
3.35851
2.87961
1.166307
if textual is None: return None type = textual[0] try: method = getattr(self, type + '_to_pyobject') return method(textual) except AttributeError: return None
def transform(self, textual)
Transform an object from textual form to `PyObject`
4.271918
3.21376
1.329258
return patch_ast(ast.parse(source), source, sorted_children)
def get_patched_ast(source, sorted_children=False)
Adds ``region`` and ``sorted_children`` fields to nodes Adds ``sorted_children`` field only if `sorted_children` is True.
5.187381
9.526042
0.544547
if hasattr(node, 'region'): return node walker = _PatchingASTWalker(source, children=sorted_children) ast.call_for_nodes(node, walker) return node
def patch_ast(node, source, sorted_children=False)
Patches the given node After calling, each node in `node` will have a new field named `region` that is a tuple containing the start and end offsets of the code that generated it. If `sorted_children` is true, a `sorted_children` field will be created for each node, too. It is a list containing child nodes as well as whitespaces and comments that occur between them.
7.663242
8.324743
0.920538
result = [] for child in patched_ast_node.sorted_children: if isinstance(child, ast.AST): result.append(write_ast(child)) else: result.append(child) return ''.join(result)
def write_ast(patched_ast_node)
Extract source form a patched AST node with `sorted_children` field If the node is patched with sorted_children turned off you can use `node_region` function for obtaining code using module source code.
2.433344
2.072146
1.174311
opens, closes = self._count_needed_parens(formats) old_end = self.source.offset new_end = None for i in range(closes): new_end = self.source.consume(')')[1] if new_end is not None: if self.children: children.append(self.source[old_end:new_end]) new_start = start for i in range(opens): new_start = self.source.rfind_token('(', 0, new_start) if new_start != start: if self.children: children.appendleft(self.source[new_start:start]) start = new_start return start
def _handle_parens(self, children, start, formats)
Changes `children` and returns new start
3.454901
3.368849
1.025543
if start is None: start = self.offset try: comment_index = self.source.rindex('#', start, offset) except ValueError: return True try: new_line_index = self.source.rindex('\n', start, offset) except ValueError: return False return comment_index < new_line_index
def _good_token(self, token, offset, start=None)
Checks whether consumed token is in comments
2.590503
2.354866
1.100064
pattern = get_block_start_patterns() for i in range(lineno, 0, -1): match = pattern.search(lines.get_line(i)) if match is not None and \ count_line_indents(lines.get_line(i)) <= maximum_indents: striped = match.string.lstrip() # Maybe we're in a list comprehension or generator expression if i > 1 and striped.startswith('if') or striped.startswith('for'): bracs = 0 for j in range(i, min(i + 5, lines.length() + 1)): for c in lines.get_line(j): if c == '#': break if c in '[(': bracs += 1 if c in ')]': bracs -= 1 if bracs < 0: break if bracs < 0: break if bracs < 0: continue return i return 1
def get_block_start(lines, lineno, maximum_indents=80)
Approximate block start
3.336843
3.25586
1.024873
size = self.lines.length() + 1 self._starts = [None] * size self._ends = [None] * size for start, end in self._generate(self.lines): self._starts[start] = True self._ends[end] = True
def _init_logicals(self)
Should initialize _starts and _ends attributes
4.37719
3.308455
1.323031
if offset is None: return MoveModule(project, resource) this_pymodule = project.get_pymodule(resource) pyname = evaluate.eval_location(this_pymodule, offset) if pyname is not None: pyobject = pyname.get_object() if isinstance(pyobject, pyobjects.PyModule) or \ isinstance(pyobject, pyobjects.PyPackage): return MoveModule(project, pyobject.get_resource()) if isinstance(pyobject, pyobjects.PyFunction) and \ isinstance(pyobject.parent, pyobjects.PyClass): return MoveMethod(project, resource, offset) if isinstance(pyobject, pyobjects.PyDefinedObject) and \ isinstance(pyobject.parent, pyobjects.PyModule) or \ isinstance(pyname, pynames.AssignedName): return MoveGlobal(project, resource, offset) raise exceptions.RefactoringError( 'Move only works on global classes/functions/variables, modules and ' 'methods.')
def create_move(project, resource, offset=None)
A factory for creating Move objects Based on `resource` and `offset`, return one of `MoveModule`, `MoveGlobal` or `MoveMethod` for performing move refactoring.
3.292477
2.922634
1.126544
changes = ChangeSet('Moving method <%s>' % self.method_name) if resources is None: resources = self.project.get_python_files() if new_name is None: new_name = self.get_method_name() resource1, start1, end1, new_content1 = \ self._get_changes_made_by_old_class(dest_attr, new_name) collector1 = codeanalyze.ChangeCollector(resource1.read()) collector1.add_change(start1, end1, new_content1) resource2, start2, end2, new_content2 = \ self._get_changes_made_by_new_class(dest_attr, new_name) if resource1 == resource2: collector1.add_change(start2, end2, new_content2) else: collector2 = codeanalyze.ChangeCollector(resource2.read()) collector2.add_change(start2, end2, new_content2) result = collector2.get_changed() import_tools = importutils.ImportTools(self.project) new_imports = self._get_used_imports(import_tools) if new_imports: goal_pymodule = libutils.get_string_module( self.project, result, resource2) result = _add_imports_to_module( import_tools, goal_pymodule, new_imports) if resource2 in resources: changes.add_change(ChangeContents(resource2, result)) if resource1 in resources: changes.add_change(ChangeContents(resource1, collector1.get_changed())) return changes
def get_changes(self, dest_attr, new_name=None, resources=None, task_handle=taskhandle.NullTaskHandle())
Return the changes needed for this refactoring Parameters: - `dest_attr`: the name of the destination attribute - `new_name`: the name of the new method; if `None` uses the old name - `resources` can be a list of `rope.base.resources.File`\s to apply this refactoring on. If `None`, the restructuring will be applied to all python files.
3.356292
3.202459
1.048036
scope = self.parent.get_scope() if isinstance(self.parent, PyClass): for decorator in self.decorators: pyname = rope.base.evaluate.eval_node(scope, decorator) if pyname == rope.base.builtins.builtins['staticmethod']: return 'staticmethod' if pyname == rope.base.builtins.builtins['classmethod']: return 'classmethod' return 'method' return 'function'
def get_kind(self)
Get function type It returns one of 'function', 'method', 'staticmethod' or 'classmethod' strs.
4.062504
3.649673
1.113115
method_name = '_' + node.__class__.__name__ method = getattr(walker, method_name, None) if method is not None: if isinstance(node, _ast.ImportFrom) and node.module is None: # In python < 2.7 ``node.module == ''`` for relative imports # but for python 2.7 it is None. Generalizing it to ''. node.module = '' return method(node) for child in get_child_nodes(node): walk(child, walker)
def walk(node, walker)
Walk the syntax tree
4.095876
4.114665
0.995434
result = callback(node) if recursive and not result: for child in get_child_nodes(node): call_for_nodes(child, callback, recursive)
def call_for_nodes(node, callback, recursive=False)
If callback returns `True` the child nodes are skipped
2.80617
2.880458
0.974209
# there is a bug in cygwin for os.path.abspath() for abs paths if sys.platform == 'cygwin': if path[1:3] == ':\\': return path elif path[1:3] == ':/': path = "/cygdrive/" + path[0] + path[2:] return os.path.abspath(os.path.expanduser(path)) return os.path.realpath(os.path.abspath(os.path.expanduser(path)))
def _realpath(path)
Return the real path of `path` Is equivalent to ``realpath(abspath(expanduser(path)))``. Of the particular notice is the hack dealing with the unfortunate sitaution of running native-Windows python (os.name == 'nt') inside of Cygwin (abspath starts with '/'), which apparently normal os.path.realpath completely messes up.
3.082964
3.139811
0.981895
path = self._get_resource_path(resource_name) if not os.path.exists(path): raise exceptions.ResourceNotFoundError( 'Resource <%s> does not exist' % resource_name) elif os.path.isfile(path): return File(self, resource_name) elif os.path.isdir(path): return Folder(self, resource_name) else: raise exceptions.ResourceNotFoundError('Unknown resource ' + resource_name)
def get_resource(self, resource_name)
Get a resource in a project. `resource_name` is the path of a resource in a project. It is the path of a resource relative to project root. Project root folder address is an empty string. If the resource does not exist a `exceptions.ResourceNotFound` exception would be raised. Use `get_file()` and `get_folder()` when you need to get nonexistent `Resource`\s.
2.38893
2.18631
1.092677
# check if this is a builtin module pymod = self.pycore.builtin_module(name) if pymod is not None: return pymod module = self.find_module(name, folder) if module is None: raise ModuleNotFoundError('Module %s not found' % name) return self.pycore.resource_to_pyobject(module)
def get_module(self, name, folder=None)
Returns a `PyObject` if the module was found.
3.538385
3.3013
1.071816
if self.root is None: return [] result = list(self._custom_source_folders) result.extend(self.pycore._find_source_folders(self.root)) return result
def get_source_folders(self)
Returns project source folders
5.750186
5.253496
1.094545
for observer in list(self.observers): observer.validate(folder)
def validate(self, folder)
Validate files and folders contained in this folder It validates all of the files and folders contained in this folder if some observers are interested in them.
8.953312
5.965432
1.500866
self.history.do(changes, task_handle=task_handle)
def do(self, changes, task_handle=taskhandle.NullTaskHandle())
Apply the changes in a `ChangeSet` Most of the time you call this function for committing the changes for a refactoring.
5.527226
7.614398
0.725891
for src in self.get_source_folders(): module = _find_module_in_folder(src, modname) if module is not None: return module for src in self.get_python_path_folders(): module = _find_module_in_folder(src, modname) if module is not None: return module if folder is not None: module = _find_module_in_folder(folder, modname) if module is not None: return module return None
def find_module(self, modname, folder=None)
Returns a resource corresponding to the given module returns None if it can not be found
1.930735
1.925003
1.002978
return [resource for resource in self.get_files() if self.pycore.is_python_file(resource)]
def get_python_files(self)
Returns all python files available in the project
8.691216
7.512955
1.156831
if self.lineno is None and self.assignments: self.lineno = self.assignments[0].get_lineno() return (self.module, self.lineno)
def get_definition_location(self)
Returns a (module, lineno) tuple
5.428199
3.476004
1.56162
return rope.base.oi.soi.get_passed_objects( self.pyfunction, self.index)
def get_objects(self)
Returns the list of objects passed as this parameter
39.343033
33.201466
1.184979
result = [] for project, refactoring in zip(self.projects, self.refactorings): args, kwds = self._resources_for_args(project, args, kwds) result.append((project, refactoring.get_changes(*args, **kwds))) return result
def get_all_changes(self, *args, **kwds)
Get a project to changes dict
4.211558
4.107675
1.02529
if resources is None: resources = self.project.get_python_files() changes = ChangeSet('Encapsulate field <%s>' % self.name) job_set = task_handle.create_jobset('Collecting Changes', len(resources)) if getter is None: getter = 'get_' + self.name if setter is None: setter = 'set_' + self.name renamer = GetterSetterRenameInModule( self.project, self.name, self.pyname, getter, setter) for file in resources: job_set.started_job(file.path) if file == self.resource: result = self._change_holding_module(changes, renamer, getter, setter) changes.add_change(ChangeContents(self.resource, result)) else: result = renamer.get_changed_module(file) if result is not None: changes.add_change(ChangeContents(file, result)) job_set.finished_job() return changes
def get_changes(self, getter=None, setter=None, resources=None, task_handle=taskhandle.NullTaskHandle())
Get the changes this refactoring makes If `getter` is not `None`, that will be the name of the getter, otherwise ``get_${field_name}`` will be used. The same is true for `setter` and if it is None set_${field_name} is used. `resources` can be a list of `rope.base.resource.File`\s that the refactoring should be applied on; if `None` all python files in the project are searched.
4.125227
3.709591
1.112044
name = '_' + func.__name__ def _wrapper(self, *args, **kwds): if not hasattr(self, name): setattr(self, name, func(self, *args, **kwds)) return getattr(self, name) return _wrapper
def saveit(func)
A decorator that caches the return value of a function
2.28031
2.178375
1.046794
def decorator(func): name = '_calling_%s_' % func.__name__ def newfunc(self, *args, **kwds): if getattr(self, name, False): return default() setattr(self, name, True) try: return func(self, *args, **kwds) finally: setattr(self, name, False) return newfunc return decorator
def prevent_recursion(default)
A decorator that returns the return value of `default` in recursions
2.318051
2.354192
0.984648
def _decorator(func): def newfunc(*args, **kwds): try: return func(*args, **kwds) except exception_class: pass return newfunc return _decorator
def ignore_exception(exception_class)
A decorator that ignores `exception_class` exceptions
2.359818
2.396544
0.984676
def _decorator(func, message=message): if message is None: message = '%s is deprecated' % func.__name__ def newfunc(*args, **kwds): warnings.warn(message, DeprecationWarning, stacklevel=2) return func(*args, **kwds) return newfunc return _decorator
def deprecated(message=None)
A decorator for deprecated functions
2.301294
2.231344
1.031349
def decorator(func): cached_func = _Cached(func, size) return lambda *a, **kw: cached_func(*a, **kw) return decorator
def cached(size)
A caching decorator based on parameter objects
3.175643
2.945475
1.078143
from rope.base.utils.pycompat import string_types if not isinstance(str_or_obj, string_types): return str_or_obj if '.' not in str_or_obj: str_or_obj += '.' mod_name, obj_name = str_or_obj.rsplit('.', 1) __import__(mod_name) mod = sys.modules[mod_name] return getattr(mod, obj_name) if obj_name else mod
def resolve(str_or_obj)
Returns object from string
2.103806
2.063489
1.019538
last_atom = offset offset = self._find_last_non_space_char(last_atom) while offset > 0 and self.code[offset] in ')]': last_atom = self._find_parens_start(offset) offset = self._find_last_non_space_char(last_atom - 1) if offset >= 0 and (self.code[offset] in '"\'})]' or self._is_id_char(offset)): atom_start = self._find_atom_start(offset) if not keyword.iskeyword(self.code[atom_start:offset + 1]): return atom_start return last_atom
def _find_primary_without_dot_start(self, offset)
It tries to find the undotted primary start It is different from `self._get_atom_start()` in that it follows function calls, too; such as in ``f(x)``.
3.46831
3.398316
1.020597
if offset == 0: return ('', '', 0) end = offset - 1 word_start = self._find_atom_start(end) real_start = self._find_primary_start(end) if self.code[word_start:offset].strip() == '': word_start = end if self.code[end].isspace(): word_start = end if self.code[real_start:word_start].strip() == '': real_start = word_start if real_start == word_start == end and not self._is_id_char(end): return ('', '', offset) if real_start == word_start: return ('', self.raw[word_start:offset], word_start) else: if self.code[end] == '.': return (self.raw[real_start:end], '', offset) last_dot_position = word_start if self.code[word_start] != '.': last_dot_position = \ self._find_last_non_space_char(word_start - 1) last_char_position = \ self._find_last_non_space_char(last_dot_position - 1) if self.code[word_start].isspace(): word_start = offset return (self.raw[real_start:last_char_position + 1], self.raw[word_start:offset], word_start)
def get_splitted_primary_before(self, offset)
returns expression, starting, starting_offset This function is used in `rope.codeassist.assist` function.
2.570659
2.521506
1.019493
pyname = _get_pyname(project, resource, offset) message = 'Inline refactoring should be performed on ' \ 'a method, local variable or parameter.' if pyname is None: raise rope.base.exceptions.RefactoringError(message) if isinstance(pyname, pynames.ImportedName): pyname = pyname._get_imported_pyname() if isinstance(pyname, pynames.AssignedName): return InlineVariable(project, resource, offset) if isinstance(pyname, pynames.ParameterName): return InlineParameter(project, resource, offset) if isinstance(pyname.get_object(), pyobjects.PyFunction): return InlineMethod(project, resource, offset) else: raise rope.base.exceptions.RefactoringError(message)
def create_inline(project, resource, offset)
Create a refactoring object for inlining Based on `resource` and `offset` it returns an instance of `InlineMethod`, `InlineVariable` or `InlineParameter`.
3.385924
2.823276
1.199289
changes = ChangeSet('Inline method <%s>' % self.name) if resources is None: resources = self.project.get_python_files() if only_current: resources = [self.original] if remove: resources.append(self.resource) job_set = task_handle.create_jobset('Collecting Changes', len(resources)) for file in resources: job_set.started_job(file.path) if file == self.resource: changes.add_change(self._defining_file_changes( changes, remove=remove, only_current=only_current)) else: aim = None if only_current and self.original == file: aim = self.offset handle = _InlineFunctionCallsForModuleHandle( self.project, file, self.others_generator, aim) result = move.ModuleSkipRenamer( self.occurrence_finder, file, handle).get_changed_module() if result is not None: result = _add_imports(self.project, result, file, self.imports) if remove: result = _remove_from(self.project, self.pyname, result, file) changes.add_change(ChangeContents(file, result)) job_set.finished_job() return changes
def get_changes(self, remove=True, only_current=False, resources=None, task_handle=taskhandle.NullTaskHandle())
Get the changes this refactoring makes If `remove` is `False` the definition will not be removed. If `only_current` is `True`, the the current occurrence will be inlined, only.
5.686137
5.619081
1.011934
if self.level == 0: return context.project.find_module( self.module_name, folder=context.folder) else: return context.project.find_relative_module( self.module_name, context.folder, self.level)
def get_imported_resource(self, context)
Get the imported resource Returns `None` if module was not found.
4.380127
3.858695
1.135132
if self.level == 0: return context.project.get_module( self.module_name, context.folder) else: return context.project.get_relative_module( self.module_name, context.folder, self.level)
def get_imported_module(self, context)
Get the imported `PyModule` Raises `rope.base.exceptions.ModuleNotFoundError` if module could not be found.
3.735352
3.290571
1.135168
if templates is not None: warnings.warn('Codeassist no longer supports templates', DeprecationWarning, stacklevel=2) assist = _PythonCodeAssist( project, source_code, offset, resource=resource, maxfixes=maxfixes, later_locals=later_locals) return assist()
def code_assist(project, source_code, offset, resource=None, templates=None, maxfixes=1, later_locals=True)
Return python code completions as a list of `CodeAssistProposal`\s `resource` is a `rope.base.resources.Resource` object. If provided, relative imports are handled. `maxfixes` is the maximum number of errors to fix if the code has errors in it. If `later_locals` is `False` names defined in this scope and after this line is ignored.
3.396779
4.625729
0.734323
word_finder = worder.Worder(source_code, True) expression, starting, starting_offset = \ word_finder.get_splitted_primary_before(offset) return starting_offset
def starting_offset(source_code, offset)
Return the offset in which the completion should be inserted Usually code assist proposals should be inserted like:: completion = proposal.name result = (source_code[:starting_offset] + completion + source_code[offset:]) Where starting_offset is the offset returned by this function.
12.919349
15.98604
0.808164
fixer = fixsyntax.FixSyntax(project, source_code, resource, maxfixes) pyname = fixer.pyname_at(offset) if pyname is None: return None pyobject = pyname.get_object() return PyDocExtractor().get_doc(pyobject)
def get_doc(project, source_code, offset, resource=None, maxfixes=1)
Get the pydoc
5.111558
5.010873
1.020093
fixer = fixsyntax.FixSyntax(project, source_code, resource, maxfixes) pyname = fixer.pyname_at(offset) if pyname is None: return None pyobject = pyname.get_object() return PyDocExtractor().get_calltip(pyobject, ignore_unknown, remove_self)
def get_calltip(project, source_code, offset, resource=None, maxfixes=1, ignore_unknown=False, remove_self=False)
Get the calltip of a function The format of the returned string is ``module_name.holding_scope_names.function_name(arguments)``. For classes `__init__()` and for normal objects `__call__()` function is used. Note that the offset is on the function itself *not* after the its open parenthesis. (Actually it used to be the other way but it was easily confused when string literals were involved. So I decided it is better for it not to try to be too clever when it cannot be clever enough). You can use a simple search like:: offset = source_code.rindex('(', 0, offset) - 1 to handle simple situations. If `ignore_unknown` is `True`, `None` is returned for functions without source-code like builtins and extensions. If `remove_self` is `True`, the first parameter whose name is self will be removed for methods.
4.53531
6.646644
0.682346
fixer = fixsyntax.FixSyntax(project, source_code, resource, maxfixes) pyname = fixer.pyname_at(offset) if pyname is not None: module, lineno = pyname.get_definition_location() if module is not None: return module.get_module().get_resource(), lineno return (None, None)
def get_definition_location(project, source_code, offset, resource=None, maxfixes=1)
Return the definition location of the python name at `offset` Return a (`rope.base.resources.Resource`, lineno) tuple. If no `resource` is given and the definition is inside the same module, the first element of the returned tuple would be `None`. If the location cannot be determined ``(None, None)`` is returned.
4.196427
3.685237
1.138713
# Retrieve the PyName. pymod = project.get_pymodule(resource) pyname = rope.base.evaluate.eval_location(pymod, offset) # Now get the location of the definition and its containing scope. defmod, lineno = pyname.get_definition_location() if not defmod: return None scope = defmod.get_scope().get_inner_scope_for_line(lineno) # Start with the name of the object we're interested in. names = [] if isinstance(pyname, pynamesdef.ParameterName): names = [(worder.get_name_at(pymod.get_resource(), offset), 'PARAMETER') ] elif isinstance(pyname, pynamesdef.AssignedName): names = [(worder.get_name_at(pymod.get_resource(), offset), 'VARIABLE')] # Collect scope names. while scope.parent: if isinstance(scope, pyscopes.FunctionScope): scope_type = 'FUNCTION' elif isinstance(scope, pyscopes.ClassScope): scope_type = 'CLASS' else: scope_type = None names.append((scope.pyobject.get_name(), scope_type)) scope = scope.parent names.append((defmod.get_resource().real_path, 'MODULE')) names.reverse() return names
def get_canonical_path(project, resource, offset)
Get the canonical path to an object. Given the offset of the object, this returns a list of (name, name_type) tuples representing the canonical path to the object. For example, the 'x' in the following code: class Foo(object): def bar(self): class Qux(object): def mux(self, x): pass we will return: [('Foo', 'CLASS'), ('bar', 'FUNCTION'), ('Qux', 'CLASS'), ('mux', 'FUNCTION'), ('x', 'PARAMETER')] `resource` is a `rope.base.resources.Resource` object. `offset` is the offset of the pyname you want the path to.
3.857073
3.548854
1.08685
sorter = _ProposalSorter(proposals, scopepref, typepref) return sorter.get_sorted_proposal_list()
def sorted_proposals(proposals, scopepref=None, typepref=None)
Sort a list of proposals Return a sorted list of the given `CodeAssistProposal`\s. `scopepref` can be a list of proposal scopes. Defaults to ``['parameter_keyword', 'local', 'global', 'imported', 'attribute', 'builtin', 'keyword']``. `typepref` can be a list of proposal types. Defaults to ``['class', 'function', 'instance', 'module', None]``. (`None` stands for completions with no type like keywords.)
3.89703
5.724115
0.680809
word_finder = worder.Worder(source_code, True) expression, starting, starting_offset = \ word_finder.get_splitted_primary_before(offset) if expression: return expression + '.' + starting return starting
def starting_expression(source_code, offset)
Return the expression to complete
10.23343
10.164968
1.006735
pyname = self.pyname if isinstance(pyname, pynames.ImportedName): pyname = pyname._get_imported_pyname() if isinstance(pyname, pynames.DefinedName): pyobject = pyname.get_object() if isinstance(pyobject, pyobjects.AbstractFunction): return pyobject.get_param_names()
def parameters(self)
The names of the parameters the function takes. Returns None if this completion is not a function.
4.228269
4.178563
1.011895
if not self.pyname: return None pyobject = self.pyname.get_object() if not hasattr(pyobject, 'get_doc'): return None return self.pyname.get_object().get_doc()
def get_doc(self)
Get the proposed object's docstring. Returns None if it can not be get.
3.75011
3.352297
1.118669
definfo = functionutils.DefinitionInfo.read(self._function) for arg, default in definfo.args_with_defaults: if self.argname == arg: return default return None
def get_default(self)
Get a string representation of a param's default value. Returns None if there is no default value for this param.
9.654617
10.189676
0.94749
proposals = {} for proposal in self.proposals: proposals.setdefault(proposal.scope, []).append(proposal) result = [] for scope in self.scopepref: scope_proposals = proposals.get(scope, []) scope_proposals = [proposal for proposal in scope_proposals if proposal.type in self.typerank] scope_proposals.sort(key=self._proposal_key) result.extend(scope_proposals) return result
def get_sorted_proposal_list(self)
Return a list of `CodeAssistProposal`
3.031667
2.833944
1.069769
if self.moved is not None: self.moved(resource, new_resource)
def resource_moved(self, resource, new_resource)
It is called when a resource is moved
4.264326
4.104096
1.039042