sentence1
stringlengths
52
3.87M
sentence2
stringlengths
1
47.2k
label
stringclasses
1 value
def async_refresh(self, *args, **kwargs): """ Trigger an asynchronous job to refresh the cache """ # We trigger the task with the class path to import as well as the # (a) args and kwargs for instantiating the class # (b) args and kwargs for calling the 'refresh' method try: enqueue_task( dict( klass_str=self.class_path, obj_args=self.get_init_args(), obj_kwargs=self.get_init_kwargs(), call_args=args, call_kwargs=kwargs ), task_options=self.task_options ) except Exception: # Handle exceptions from talking to RabbitMQ - eg connection # refused. When this happens, we try to run the task # synchronously. logger.error("Unable to trigger task asynchronously - failing " "over to synchronous refresh", exc_info=True) try: return self.refresh(*args, **kwargs) except Exception as e: # Something went wrong while running the task logger.error("Unable to refresh data synchronously: %s", e, exc_info=True) else: logger.debug("Failover synchronous refresh completed successfully")
Trigger an asynchronous job to refresh the cache
entailment
def should_stale_item_be_fetched_synchronously(self, delta, *args, **kwargs): """ Return whether to refresh an item synchronously when it is found in the cache but stale """ if self.fetch_on_stale_threshold is None: return False return delta > (self.fetch_on_stale_threshold - self.lifetime)
Return whether to refresh an item synchronously when it is found in the cache but stale
entailment
def key(self, *args, **kwargs): """ Return the cache key to use. If you're passing anything but primitive types to the ``get`` method, it's likely that you'll need to override this method. """ if not args and not kwargs: return self.class_path try: if args and not kwargs: return "%s:%s" % (self.class_path, self.hash(args)) # The line might break if your passed values are un-hashable. If # it does, you need to override this method and implement your own # key algorithm. return "%s:%s:%s:%s" % (self.class_path, self.hash(args), self.hash([k for k in sorted(kwargs)]), self.hash([kwargs[k] for k in sorted(kwargs)])) except TypeError: raise RuntimeError( "Unable to generate cache key due to unhashable" "args or kwargs - you need to implement your own" "key generation method to avoid this problem")
Return the cache key to use. If you're passing anything but primitive types to the ``get`` method, it's likely that you'll need to override this method.
entailment
def hash(self, value): """ Generate a hash of the given iterable. This is for use in a cache key. """ if is_iterable(value): value = tuple(to_bytestring(v) for v in value) return hashlib.md5(six.b(':').join(value)).hexdigest()
Generate a hash of the given iterable. This is for use in a cache key.
entailment
def perform_async_refresh(cls, klass_str, obj_args, obj_kwargs, call_args, call_kwargs): """ Re-populate cache using the given job class. The job class is instantiated with the passed constructor args and the refresh method is called with the passed call args. That is:: data = klass(*obj_args, **obj_kwargs).refresh( *call_args, **call_kwargs) :klass_str: String repr of class (eg 'apps.twitter.jobs.FetchTweetsJob') :obj_args: Constructor args :obj_kwargs: Constructor kwargs :call_args: Refresh args :call_kwargs: Refresh kwargs """ klass = get_job_class(klass_str) if klass is None: logger.error("Unable to construct %s with args %r and kwargs %r", klass_str, obj_args, obj_kwargs) return logger.info("Using %s with constructor args %r and kwargs %r", klass_str, obj_args, obj_kwargs) logger.info("Calling refresh with args %r and kwargs %r", call_args, call_kwargs) start = time.time() try: klass(*obj_args, **obj_kwargs).refresh( *call_args, **call_kwargs) except Exception as e: logger.exception("Error running job: '%s'", e) else: duration = time.time() - start logger.info("Refreshed cache in %.6f seconds", duration)
Re-populate cache using the given job class. The job class is instantiated with the passed constructor args and the refresh method is called with the passed call args. That is:: data = klass(*obj_args, **obj_kwargs).refresh( *call_args, **call_kwargs) :klass_str: String repr of class (eg 'apps.twitter.jobs.FetchTweetsJob') :obj_args: Constructor args :obj_kwargs: Constructor kwargs :call_args: Refresh args :call_kwargs: Refresh kwargs
entailment
def cacheback(lifetime=None, fetch_on_miss=None, cache_alias=None, job_class=None, task_options=None, **job_class_kwargs): """ Decorate function to cache its return value. :lifetime: How long to cache items for :fetch_on_miss: Whether to perform a synchronous fetch when no cached result is found :cache_alias: The Django cache alias to store the result into. :job_class: The class to use for running the cache refresh job. Defaults using the FunctionJob. :job_class_kwargs: Any extra kwargs to pass to job_class constructor. Useful with custom job_class implementations. """ if job_class is None: job_class = FunctionJob job = job_class(lifetime=lifetime, fetch_on_miss=fetch_on_miss, cache_alias=cache_alias, task_options=task_options, **job_class_kwargs) def _wrapper(fn): # using available_attrs to work around http://bugs.python.org/issue3445 @wraps(fn, assigned=available_attrs(fn)) def __wrapper(*args, **kwargs): return job.get(fn, *args, **kwargs) # Assign reference to unwrapped function so that we can access it # later without descending into infinite regress. __wrapper.fn = fn # Assign reference to job so we can use the full Job API __wrapper.job = job return __wrapper return _wrapper
Decorate function to cache its return value. :lifetime: How long to cache items for :fetch_on_miss: Whether to perform a synchronous fetch when no cached result is found :cache_alias: The Django cache alias to store the result into. :job_class: The class to use for running the cache refresh job. Defaults using the FunctionJob. :job_class_kwargs: Any extra kwargs to pass to job_class constructor. Useful with custom job_class implementations.
entailment
def angle(v1, v2): """Return the angle in radians between vectors 'v1' and 'v2'.""" v1_u = unit_vector(v1) v2_u = unit_vector(v2) return np.arccos(np.clip(np.dot(v1_u, v2_u), -1.0, 1.0))
Return the angle in radians between vectors 'v1' and 'v2'.
entailment
def keep_high_angle(vertices, min_angle_deg): """Keep vertices with angles higher then given minimum.""" accepted = [] v = vertices v1 = v[1] - v[0] accepted.append((v[0][0], v[0][1])) for i in range(1, len(v) - 2): v2 = v[i + 1] - v[i - 1] diff_angle = np.fabs(angle(v1, v2) * 180.0 / np.pi) if diff_angle > min_angle_deg: accepted.append((v[i][0], v[i][1])) v1 = v[i] - v[i - 1] accepted.append((v[-1][0], v[-1][1])) return np.array(accepted, dtype=vertices.dtype)
Keep vertices with angles higher then given minimum.
entailment
def set_contourf_properties(stroke_width, fcolor, fill_opacity, contour_levels, contourf_idx, unit): """Set property values for Polygon.""" return { "stroke": fcolor, "stroke-width": stroke_width, "stroke-opacity": 1, "fill": fcolor, "fill-opacity": fill_opacity, "title": "%.2f" % contour_levels[contourf_idx] + ' ' + unit }
Set property values for Polygon.
entailment
def contour_to_geojson(contour, geojson_filepath=None, min_angle_deg=None, ndigits=5, unit='', stroke_width=1, geojson_properties=None, strdump=False, serialize=True): """Transform matplotlib.contour to geojson.""" collections = contour.collections contour_index = 0 line_features = [] for collection in collections: color = collection.get_edgecolor() for path in collection.get_paths(): v = path.vertices if len(v) < 3: continue coordinates = keep_high_angle(v, min_angle_deg) if ndigits: coordinates = np.around(coordinates, ndigits) line = LineString(coordinates.tolist()) properties = { "stroke-width": stroke_width, "stroke": rgb2hex(color[0]), "title": "%.2f" % contour.levels[contour_index] + ' ' + unit, "level-value": float("%.6f" % contour.levels[contour_index]), "level-index": contour_index } if geojson_properties: properties.update(geojson_properties) line_features.append(Feature(geometry=line, properties=properties)) contour_index += 1 feature_collection = FeatureCollection(line_features) return _render_feature_collection(feature_collection, geojson_filepath, strdump, serialize)
Transform matplotlib.contour to geojson.
entailment
def contourf_to_geojson_overlap(contourf, geojson_filepath=None, min_angle_deg=None, ndigits=5, unit='', stroke_width=1, fill_opacity=.9, geojson_properties=None, strdump=False, serialize=True): """Transform matplotlib.contourf to geojson with overlapping filled contours.""" polygon_features = [] contourf_idx = 0 for collection in contourf.collections: color = collection.get_facecolor() for path in collection.get_paths(): for coord in path.to_polygons(): if min_angle_deg: coord = keep_high_angle(coord, min_angle_deg) coord = np.around(coord, ndigits) if ndigits else coord polygon = Polygon(coordinates=[coord.tolist()]) fcolor = rgb2hex(color[0]) properties = set_contourf_properties(stroke_width, fcolor, fill_opacity, contourf.levels, contourf_idx, unit) if geojson_properties: properties.update(geojson_properties) feature = Feature(geometry=polygon, properties=properties) polygon_features.append(feature) contourf_idx += 1 feature_collection = FeatureCollection(polygon_features) return _render_feature_collection(feature_collection, geojson_filepath, strdump, serialize)
Transform matplotlib.contourf to geojson with overlapping filled contours.
entailment
def contourf_to_geojson(contourf, geojson_filepath=None, min_angle_deg=None, ndigits=5, unit='', stroke_width=1, fill_opacity=.9, geojson_properties=None, strdump=False, serialize=True): """Transform matplotlib.contourf to geojson with MultiPolygons.""" polygon_features = [] mps = [] contourf_idx = 0 for coll in contourf.collections: color = coll.get_facecolor() for path in coll.get_paths(): for coord in path.to_polygons(): if min_angle_deg: coord = keep_high_angle(coord, min_angle_deg) coord = np.around(coord, ndigits) if ndigits else coord op = MP(contourf.levels[contourf_idx], rgb2hex(color[0])) if op in mps: for i, k in enumerate(mps): if k == op: mps[i].add_coords(coord.tolist()) else: op.add_coords(coord.tolist()) mps.append(op) contourf_idx += 1 # starting here the multipolys will be extracted contourf_idx = 0 for muli in mps: polygon = muli.mpoly() fcolor = muli.color properties = set_contourf_properties(stroke_width, fcolor, fill_opacity, contourf.levels, contourf_idx, unit) if geojson_properties: properties.update(geojson_properties) feature = Feature(geometry=polygon, properties=properties) polygon_features.append(feature) contourf_idx += 1 feature_collection = FeatureCollection(polygon_features) return _render_feature_collection(feature_collection, geojson_filepath, strdump, serialize)
Transform matplotlib.contourf to geojson with MultiPolygons.
entailment
def get_authorize_callback(endpoint, provider_id): """Get a qualified URL for the provider to return to upon authorization param: endpoint: Absolute path to append to the application's host """ endpoint_prefix = config_value('BLUEPRINT_NAME') url = url_for(endpoint_prefix + '.' + endpoint, provider_id=provider_id) return request.url_root[:-1] + url
Get a qualified URL for the provider to return to upon authorization param: endpoint: Absolute path to append to the application's host
entailment
def delete_connection(self, **kwargs): """Remove a single connection to a provider for the specified user.""" conn = self.find_connection(**kwargs) if not conn: return False self.delete(conn) return True
Remove a single connection to a provider for the specified user.
entailment
def delete_connections(self, **kwargs): """Remove a single connection to a provider for the specified user.""" rv = False for c in self.find_connections(**kwargs): self.delete(c) rv = True return rv
Remove a single connection to a provider for the specified user.
entailment
def login(provider_id): """Starts the provider login OAuth flow""" provider = get_provider_or_404(provider_id) callback_url = get_authorize_callback('login', provider_id) post_login = request.form.get('next', get_post_login_redirect()) session[config_value('POST_OAUTH_LOGIN_SESSION_KEY')] = post_login return provider.authorize(callback_url)
Starts the provider login OAuth flow
entailment
def connect(provider_id): """Starts the provider connection OAuth flow""" provider = get_provider_or_404(provider_id) callback_url = get_authorize_callback('connect', provider_id) allow_view = get_url(config_value('CONNECT_ALLOW_VIEW')) pc = request.form.get('next', allow_view) session[config_value('POST_OAUTH_CONNECT_SESSION_KEY')] = pc return provider.authorize(callback_url)
Starts the provider connection OAuth flow
entailment
def remove_all_connections(provider_id): """Remove all connections for the authenticated user to the specified provider """ provider = get_provider_or_404(provider_id) ctx = dict(provider=provider.name, user=current_user) deleted = _datastore.delete_connections(user_id=current_user.get_id(), provider_id=provider_id) if deleted: after_this_request(_commit) msg = ('All connections to %s removed' % provider.name, 'info') connection_removed.send(current_app._get_current_object(), user=current_user._get_current_object(), provider_id=provider_id) else: msg = ('Unable to remove connection to %(provider)s' % ctx, 'error') do_flash(*msg) return redirect(request.referrer)
Remove all connections for the authenticated user to the specified provider
entailment
def remove_connection(provider_id, provider_user_id): """Remove a specific connection for the authenticated user to the specified provider """ provider = get_provider_or_404(provider_id) ctx = dict(provider=provider.name, user=current_user, provider_user_id=provider_user_id) deleted = _datastore.delete_connection(user_id=current_user.get_id(), provider_id=provider_id, provider_user_id=provider_user_id) if deleted: after_this_request(_commit) msg = ('Connection to %(provider)s removed' % ctx, 'info') connection_removed.send(current_app._get_current_object(), user=current_user._get_current_object(), provider_id=provider_id) else: msg = ('Unabled to remove connection to %(provider)s' % ctx, 'error') do_flash(*msg) return redirect(request.referrer or get_post_login_redirect())
Remove a specific connection for the authenticated user to the specified provider
entailment
def connect_handler(cv, provider): """Shared method to handle the connection process :param connection_values: A dictionary containing the connection values :param provider_id: The provider ID the connection shoudl be made to """ cv.setdefault('user_id', current_user.get_id()) connection = _datastore.find_connection( provider_id=cv['provider_id'], provider_user_id=cv['provider_user_id']) if connection is None: after_this_request(_commit) connection = _datastore.create_connection(**cv) msg = ('Connection established to %s' % provider.name, 'success') connection_created.send(current_app._get_current_object(), user=current_user._get_current_object(), connection=connection) else: msg = ('A connection is already established with %s ' 'to your account' % provider.name, 'notice') connection_failed.send(current_app._get_current_object(), user=current_user._get_current_object()) redirect_url = session.pop(config_value('POST_OAUTH_CONNECT_SESSION_KEY'), get_url(config_value('CONNECT_ALLOW_VIEW'))) do_flash(*msg) return redirect(redirect_url)
Shared method to handle the connection process :param connection_values: A dictionary containing the connection values :param provider_id: The provider ID the connection shoudl be made to
entailment
def login_handler(response, provider, query): """Shared method to handle the signin process""" connection = _datastore.find_connection(**query) if connection: after_this_request(_commit) token_pair = get_token_pair_from_oauth_response(provider, response) if (token_pair['access_token'] != connection.access_token or token_pair['secret'] != connection.secret): connection.access_token = token_pair['access_token'] connection.secret = token_pair['secret'] _datastore.put(connection) user = connection.user login_user(user) key = _social.post_oauth_login_session_key redirect_url = session.pop(key, get_post_login_redirect()) login_completed.send(current_app._get_current_object(), provider=provider, user=user) return redirect(redirect_url) login_failed.send(current_app._get_current_object(), provider=provider, oauth_response=response) next = get_url(_security.login_manager.login_view) msg = '%s account not associated with an existing user' % provider.name do_flash(msg, 'error') return redirect(next)
Shared method to handle the signin process
entailment
def init_app(self, app, datastore=None): """Initialize the application with the Social extension :param app: The Flask application :param datastore: Connection datastore instance """ datastore = datastore or self.datastore for key, value in default_config.items(): app.config.setdefault(key, value) providers = dict() for key, config in app.config.items(): if not key.startswith('SOCIAL_') or config is None or key in default_config: continue suffix = key.lower().replace('social_', '') default_module_name = 'flask_social.providers.%s' % suffix module_name = config.get('module', default_module_name) module = import_module(module_name) config = update_recursive(module.config, config) providers[config['id']] = OAuthRemoteApp(**config) providers[config['id']].tokengetter(_get_token) state = _get_state(app, datastore, providers) app.register_blueprint(create_blueprint(state, __name__)) app.extensions['social'] = state return state
Initialize the application with the Social extension :param app: The Flask application :param datastore: Connection datastore instance
entailment
def guess(filename, fallback='application/octet-stream'): """ Using the mimetypes library, guess the mimetype and encoding for a given *filename*. If the mimetype cannot be guessed, *fallback* is assumed instead. :param filename: Filename- can be absolute path. :param fallback: A fallback mimetype. """ guessed, encoding = mimetypes.guess_type(filename, strict=False) if guessed is None: return fallback, encoding return guessed, encoding
Using the mimetypes library, guess the mimetype and encoding for a given *filename*. If the mimetype cannot be guessed, *fallback* is assumed instead. :param filename: Filename- can be absolute path. :param fallback: A fallback mimetype.
entailment
def format_addresses(addrs): """ Given an iterable of addresses or name-address tuples *addrs*, return a header value that joins all of them together with a space and a comma. """ return ', '.join( formataddr(item) if isinstance(item, tuple) else item for item in addrs )
Given an iterable of addresses or name-address tuples *addrs*, return a header value that joins all of them together with a space and a comma.
entailment
def stringify_address(addr, encoding='utf-8'): """ Given an email address *addr*, try to encode it with ASCII. If it's not possible, encode the *local-part* with the *encoding* and the *domain* with IDNA. The result is a unicode string with the domain encoded as idna. """ if isinstance(addr, bytes_type): return addr try: addr = addr.encode('ascii') except UnicodeEncodeError: if '@' in addr: localpart, domain = addr.split('@', 1) addr = b'@'.join([ localpart.encode(encoding), domain.encode('idna'), ]) else: addr = addr.encode(encoding) return addr.decode('utf-8')
Given an email address *addr*, try to encode it with ASCII. If it's not possible, encode the *local-part* with the *encoding* and the *domain* with IDNA. The result is a unicode string with the domain encoded as idna.
entailment
def email(sender=None, receivers=(), cc=(), bcc=(), subject=None, content=None, encoding='utf8', attachments=()): """ Creates a Collection object with a HTML *content*, and *attachments*. :param content: HTML content. :param encoding: Encoding of the email. :param attachments: List of filenames to attach to the email. """ enclosure = [HTML(content, encoding)] enclosure.extend(Attachment(k) for k in attachments) return Collection( *enclosure, headers=[ headers.subject(subject), headers.sender(sender), headers.to(*receivers), headers.cc(*cc), headers.bcc(*bcc), headers.date(), headers.message_id(), ] )
Creates a Collection object with a HTML *content*, and *attachments*. :param content: HTML content. :param encoding: Encoding of the email. :param attachments: List of filenames to attach to the email.
entailment
def postman(host, port=587, auth=(None, None), force_tls=False, options=None): """ Creates a Postman object with TLS and Auth middleware. TLS is placed before authentication because usually authentication happens and is accepted only after TLS is enabled. :param auth: Tuple of (username, password) to be used to ``login`` to the server. :param force_tls: Whether TLS should be forced. :param options: Dictionary of keyword arguments to be used when the SMTP class is called. """ return Postman( host=host, port=port, middlewares=[ middleware.tls(force=force_tls), middleware.auth(*auth), ], **options )
Creates a Postman object with TLS and Auth middleware. TLS is placed before authentication because usually authentication happens and is accepted only after TLS is enabled. :param auth: Tuple of (username, password) to be used to ``login`` to the server. :param force_tls: Whether TLS should be forced. :param options: Dictionary of keyword arguments to be used when the SMTP class is called.
entailment
def mime(self): """ Returns the finalised mime object, after applying the internal headers. Usually this is not to be overriden. """ mime = self.mime_object() self.headers.prepare(mime) return mime
Returns the finalised mime object, after applying the internal headers. Usually this is not to be overriden.
entailment
def send(self, envelope): """ Send an *envelope* which may be an envelope or an enclosure-like object, see :class:`~mailthon.enclosure.Enclosure` and :class:`~mailthon.envelope.Envelope`, and returns a :class:`~mailthon.response.SendmailResponse` object. """ rejected = self.conn.sendmail( stringify_address(envelope.sender), [stringify_address(k) for k in envelope.receivers], envelope.string(), ) status_code, reason = self.conn.noop() return SendmailResponse( status_code, reason, rejected, )
Send an *envelope* which may be an envelope or an enclosure-like object, see :class:`~mailthon.enclosure.Enclosure` and :class:`~mailthon.envelope.Envelope`, and returns a :class:`~mailthon.response.SendmailResponse` object.
entailment
def connection(self): """ A context manager that returns a connection to the server using some *session*. """ conn = self.session(**self.options) try: for item in self.middlewares: item(conn) yield conn finally: conn.teardown()
A context manager that returns a connection to the server using some *session*.
entailment
def sender(self): """ Returns the sender, respecting the Resent-* headers. In any case, prefer Sender over From, meaning that if Sender is present then From is ignored, as per the RFC. """ to_fetch = ( ['Resent-Sender', 'Resent-From'] if self.resent else ['Sender', 'From'] ) for item in to_fetch: if item in self: _, addr = getaddresses([self[item]])[0] return addr
Returns the sender, respecting the Resent-* headers. In any case, prefer Sender over From, meaning that if Sender is present then From is ignored, as per the RFC.
entailment
def receivers(self): """ Returns a list of receivers, obtained from the To, Cc, and Bcc headers, respecting the Resent-* headers if the email was resent. """ attrs = ( ['Resent-To', 'Resent-Cc', 'Resent-Bcc'] if self.resent else ['To', 'Cc', 'Bcc'] ) addrs = (v for v in (self.get(k) for k in attrs) if v) return [addr for _, addr in getaddresses(addrs)]
Returns a list of receivers, obtained from the To, Cc, and Bcc headers, respecting the Resent-* headers if the email was resent.
entailment
def prepare(self, mime): """ Prepares a MIME object by applying the headers to the *mime* object. Ignores any Bcc or Resent-Bcc headers. """ for key in self: if key == 'Bcc' or key == 'Resent-Bcc': continue del mime[key] # Python 3.* email's compatibility layer will handle # unicode field values in proper way but Python 2 # won't (it will encode not only additional field # values but also all header values) parsed_header, additional_fields = parse_header( self[key] if IS_PY3 else self[key].encode("utf-8") ) mime.add_header(key, parsed_header, **additional_fields)
Prepares a MIME object by applying the headers to the *mime* object. Ignores any Bcc or Resent-Bcc headers.
entailment
def tls(force=False): """ Middleware implementing TLS for SMTP connections. By default this is not forced- TLS is only used if STARTTLS is available. If the *force* parameter is set to True, it will not query the server for TLS features before upgrading to TLS. """ def middleware(conn): if force or conn.has_extn('STARTTLS'): conn.starttls() conn.ehlo() return middleware
Middleware implementing TLS for SMTP connections. By default this is not forced- TLS is only used if STARTTLS is available. If the *force* parameter is set to True, it will not query the server for TLS features before upgrading to TLS.
entailment
def auth(username, password): """ Middleware implementing authentication via LOGIN. Most of the time this middleware needs to be placed *after* TLS. :param username: Username to login with. :param password: Password of the user. """ def middleware(conn): conn.login(username, password) return middleware
Middleware implementing authentication via LOGIN. Most of the time this middleware needs to be placed *after* TLS. :param username: Username to login with. :param password: Password of the user.
entailment
def get_existing_model(model_name): """ Try to find existing model class named `model_name`. :param model_name: String name of the model class. """ try: model_cls = engine.get_document_cls(model_name) log.debug('Model `{}` already exists. Using existing one'.format( model_name)) return model_cls except ValueError: log.debug('Model `{}` does not exist'.format(model_name))
Try to find existing model class named `model_name`. :param model_name: String name of the model class.
entailment
def prepare_relationship(config, model_name, raml_resource): """ Create referenced model if it doesn't exist. When preparing a relationship, we check to see if the model that will be referenced already exists. If not, it is created so that it will be possible to use it in a relationship. Thus the first usage of this model in RAML file must provide its schema in POST method resource body schema. :param model_name: Name of model which should be generated. :param raml_resource: Instance of ramlfications.raml.ResourceNode for which :model_name: will be defined. """ if get_existing_model(model_name) is None: plural_route = '/' + pluralize(model_name.lower()) route = '/' + model_name.lower() for res in raml_resource.root.resources: if res.method.upper() != 'POST': continue if res.path.endswith(plural_route) or res.path.endswith(route): break else: raise ValueError('Model `{}` used in relationship is not ' 'defined'.format(model_name)) setup_data_model(config, res, model_name)
Create referenced model if it doesn't exist. When preparing a relationship, we check to see if the model that will be referenced already exists. If not, it is created so that it will be possible to use it in a relationship. Thus the first usage of this model in RAML file must provide its schema in POST method resource body schema. :param model_name: Name of model which should be generated. :param raml_resource: Instance of ramlfications.raml.ResourceNode for which :model_name: will be defined.
entailment
def generate_model_cls(config, schema, model_name, raml_resource, es_based=True): """ Generate model class. Engine DB field types are determined using `type_fields` and only those types may be used. :param schema: Model schema dict parsed from RAML. :param model_name: String that is used as new model's name. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param es_based: Boolean indicating if generated model should be a subclass of Elasticsearch-based document class or not. It True, ESBaseDocument is used; BaseDocument is used otherwise. Defaults to True. """ from nefertari.authentication.models import AuthModelMethodsMixin base_cls = engine.ESBaseDocument if es_based else engine.BaseDocument model_name = str(model_name) metaclass = type(base_cls) auth_model = schema.get('_auth_model', False) bases = [] if config.registry.database_acls: from nefertari_guards import engine as guards_engine bases.append(guards_engine.DocumentACLMixin) if auth_model: bases.append(AuthModelMethodsMixin) bases.append(base_cls) attrs = { '__tablename__': model_name.lower(), '_public_fields': schema.get('_public_fields') or [], '_auth_fields': schema.get('_auth_fields') or [], '_hidden_fields': schema.get('_hidden_fields') or [], '_nested_relationships': schema.get('_nested_relationships') or [], } if '_nesting_depth' in schema: attrs['_nesting_depth'] = schema.get('_nesting_depth') # Generate fields from properties properties = schema.get('properties', {}) for field_name, props in properties.items(): if field_name in attrs: continue db_settings = props.get('_db_settings') if db_settings is None: continue field_kwargs = db_settings.copy() field_kwargs['required'] = bool(field_kwargs.get('required')) for default_attr_key in ('default', 'onupdate'): value = field_kwargs.get(default_attr_key) if is_callable_tag(value): field_kwargs[default_attr_key] = resolve_to_callable(value) type_name = ( field_kwargs.pop('type', 'string') or 'string').lower() if type_name not in type_fields: raise ValueError('Unknown type: {}'.format(type_name)) field_cls = type_fields[type_name] if field_cls is engine.Relationship: prepare_relationship( config, field_kwargs['document'], raml_resource) if field_cls is engine.ForeignKeyField: key = 'ref_column_type' field_kwargs[key] = type_fields[field_kwargs[key]] if field_cls is engine.ListField: key = 'item_type' field_kwargs[key] = type_fields[field_kwargs[key]] attrs[field_name] = field_cls(**field_kwargs) # Update model definition with methods and variables defined in registry attrs.update(registry.mget(model_name)) # Generate new model class model_cls = metaclass(model_name, tuple(bases), attrs) setup_model_event_subscribers(config, model_cls, schema) setup_fields_processors(config, model_cls, schema) return model_cls, auth_model
Generate model class. Engine DB field types are determined using `type_fields` and only those types may be used. :param schema: Model schema dict parsed from RAML. :param model_name: String that is used as new model's name. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param es_based: Boolean indicating if generated model should be a subclass of Elasticsearch-based document class or not. It True, ESBaseDocument is used; BaseDocument is used otherwise. Defaults to True.
entailment
def setup_data_model(config, raml_resource, model_name): """ Setup storage/data model and return generated model class. Process follows these steps: * Resource schema is found and restructured by `resource_schema`. * Model class is generated from properties dict using util function `generate_model_cls`. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param model_name: String representing model name. """ model_cls = get_existing_model(model_name) schema = resource_schema(raml_resource) if not schema: raise Exception('Missing schema for model `{}`'.format(model_name)) if model_cls is not None: return model_cls, schema.get('_auth_model', False) log.info('Generating model class `{}`'.format(model_name)) return generate_model_cls( config, schema=schema, model_name=model_name, raml_resource=raml_resource, )
Setup storage/data model and return generated model class. Process follows these steps: * Resource schema is found and restructured by `resource_schema`. * Model class is generated from properties dict using util function `generate_model_cls`. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param model_name: String representing model name.
entailment
def handle_model_generation(config, raml_resource): """ Generates model name and runs `setup_data_model` to get or generate actual model class. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ model_name = generate_model_name(raml_resource) try: return setup_data_model(config, raml_resource, model_name) except ValueError as ex: raise ValueError('{}: {}'.format(model_name, str(ex)))
Generates model name and runs `setup_data_model` to get or generate actual model class. :param raml_resource: Instance of ramlfications.raml.ResourceNode.
entailment
def setup_model_event_subscribers(config, model_cls, schema): """ Set up model event subscribers. :param config: Pyramid Configurator instance. :param model_cls: Model class for which handlers should be connected. :param schema: Dict of model JSON schema. """ events_map = get_events_map() model_events = schema.get('_event_handlers', {}) event_kwargs = {'model': model_cls} for event_tag, subscribers in model_events.items(): type_, action = event_tag.split('_') event_objects = events_map[type_][action] if not isinstance(event_objects, list): event_objects = [event_objects] for sub_name in subscribers: sub_func = resolve_to_callable(sub_name) config.subscribe_to_events( sub_func, event_objects, **event_kwargs)
Set up model event subscribers. :param config: Pyramid Configurator instance. :param model_cls: Model class for which handlers should be connected. :param schema: Dict of model JSON schema.
entailment
def setup_fields_processors(config, model_cls, schema): """ Set up model fields' processors. :param config: Pyramid Configurator instance. :param model_cls: Model class for field of which processors should be set up. :param schema: Dict of model JSON schema. """ properties = schema.get('properties', {}) for field_name, props in properties.items(): if not props: continue processors = props.get('_processors') backref_processors = props.get('_backref_processors') if processors: processors = [resolve_to_callable(val) for val in processors] setup_kwargs = {'model': model_cls, 'field': field_name} config.add_field_processors(processors, **setup_kwargs) if backref_processors: db_settings = props.get('_db_settings', {}) is_relationship = db_settings.get('type') == 'relationship' document = db_settings.get('document') backref_name = db_settings.get('backref_name') if not (is_relationship and document and backref_name): continue backref_processors = [ resolve_to_callable(val) for val in backref_processors] setup_kwargs = { 'model': engine.get_document_cls(document), 'field': backref_name } config.add_field_processors( backref_processors, **setup_kwargs)
Set up model fields' processors. :param config: Pyramid Configurator instance. :param model_cls: Model class for field of which processors should be set up. :param schema: Dict of model JSON schema.
entailment
def _setup_ticket_policy(config, params): """ Setup Pyramid AuthTktAuthenticationPolicy. Notes: * Initial `secret` params value is considered to be a name of config param that represents a cookie name. * `auth_model.get_groups_by_userid` is used as a `callback`. * Also connects basic routes to perform authentication actions. :param config: Pyramid Configurator instance. :param params: Nefertari dictset which contains security scheme `settings`. """ from nefertari.authentication.views import ( TicketAuthRegisterView, TicketAuthLoginView, TicketAuthLogoutView) log.info('Configuring Pyramid Ticket Authn policy') if 'secret' not in params: raise ValueError( 'Missing required security scheme settings: secret') params['secret'] = config.registry.settings[params['secret']] auth_model = config.registry.auth_model params['callback'] = auth_model.get_groups_by_userid config.add_request_method( auth_model.get_authuser_by_userid, 'user', reify=True) policy = AuthTktAuthenticationPolicy(**params) RegisterViewBase = TicketAuthRegisterView if config.registry.database_acls: class RegisterViewBase(ACLAssignRegisterMixin, TicketAuthRegisterView): pass class RamsesTicketAuthRegisterView(RegisterViewBase): Model = config.registry.auth_model class RamsesTicketAuthLoginView(TicketAuthLoginView): Model = config.registry.auth_model class RamsesTicketAuthLogoutView(TicketAuthLogoutView): Model = config.registry.auth_model common_kw = { 'prefix': 'auth', 'factory': 'nefertari.acl.AuthenticationACL', } root = config.get_root_resource() root.add('register', view=RamsesTicketAuthRegisterView, **common_kw) root.add('login', view=RamsesTicketAuthLoginView, **common_kw) root.add('logout', view=RamsesTicketAuthLogoutView, **common_kw) return policy
Setup Pyramid AuthTktAuthenticationPolicy. Notes: * Initial `secret` params value is considered to be a name of config param that represents a cookie name. * `auth_model.get_groups_by_userid` is used as a `callback`. * Also connects basic routes to perform authentication actions. :param config: Pyramid Configurator instance. :param params: Nefertari dictset which contains security scheme `settings`.
entailment
def _setup_apikey_policy(config, params): """ Setup `nefertari.ApiKeyAuthenticationPolicy`. Notes: * User may provide model name in :params['user_model']: do define the name of the user model. * `auth_model.get_groups_by_token` is used to perform username and token check * `auth_model.get_token_credentials` is used to get username and token from userid * Also connects basic routes to perform authentication actions. Arguments: :config: Pyramid Configurator instance. :params: Nefertari dictset which contains security scheme `settings`. """ from nefertari.authentication.views import ( TokenAuthRegisterView, TokenAuthClaimView, TokenAuthResetView) log.info('Configuring ApiKey Authn policy') auth_model = config.registry.auth_model params['check'] = auth_model.get_groups_by_token params['credentials_callback'] = auth_model.get_token_credentials params['user_model'] = auth_model config.add_request_method( auth_model.get_authuser_by_name, 'user', reify=True) policy = ApiKeyAuthenticationPolicy(**params) RegisterViewBase = TokenAuthRegisterView if config.registry.database_acls: class RegisterViewBase(ACLAssignRegisterMixin, TokenAuthRegisterView): pass class RamsesTokenAuthRegisterView(RegisterViewBase): Model = auth_model class RamsesTokenAuthClaimView(TokenAuthClaimView): Model = auth_model class RamsesTokenAuthResetView(TokenAuthResetView): Model = auth_model common_kw = { 'prefix': 'auth', 'factory': 'nefertari.acl.AuthenticationACL', } root = config.get_root_resource() root.add('register', view=RamsesTokenAuthRegisterView, **common_kw) root.add('token', view=RamsesTokenAuthClaimView, **common_kw) root.add('reset_token', view=RamsesTokenAuthResetView, **common_kw) return policy
Setup `nefertari.ApiKeyAuthenticationPolicy`. Notes: * User may provide model name in :params['user_model']: do define the name of the user model. * `auth_model.get_groups_by_token` is used to perform username and token check * `auth_model.get_token_credentials` is used to get username and token from userid * Also connects basic routes to perform authentication actions. Arguments: :config: Pyramid Configurator instance. :params: Nefertari dictset which contains security scheme `settings`.
entailment
def setup_auth_policies(config, raml_root): """ Setup authentication, authorization policies. Performs basic validation to check all the required values are present and performs authentication, authorization policies generation using generator functions from `AUTHENTICATION_POLICIES`. :param config: Pyramid Configurator instance. :param raml_root: Instance of ramlfications.raml.RootNode. """ log.info('Configuring auth policies') secured_by_all = raml_root.secured_by or [] secured_by = [item for item in secured_by_all if item] if not secured_by: log.info('API is not secured. `secured_by` attribute ' 'value missing.') return secured_by = secured_by[0] schemes = {scheme.name: scheme for scheme in raml_root.security_schemes} if secured_by not in schemes: raise ValueError( 'Undefined security scheme used in `secured_by`: {}'.format( secured_by)) scheme = schemes[secured_by] if scheme.type not in AUTHENTICATION_POLICIES: raise ValueError('Unsupported security scheme type: {}'.format( scheme.type)) # Setup Authentication policy policy_generator = AUTHENTICATION_POLICIES[scheme.type] params = dictset(scheme.settings or {}) authn_policy = policy_generator(config, params) config.set_authentication_policy(authn_policy) # Setup Authorization policy authz_policy = ACLAuthorizationPolicy() config.set_authorization_policy(authz_policy)
Setup authentication, authorization policies. Performs basic validation to check all the required values are present and performs authentication, authorization policies generation using generator functions from `AUTHENTICATION_POLICIES`. :param config: Pyramid Configurator instance. :param raml_root: Instance of ramlfications.raml.RootNode.
entailment
def get_authuser_model(): """ Define and return AuthUser model using nefertari base classes """ from nefertari.authentication.models import AuthUserMixin from nefertari import engine class AuthUser(AuthUserMixin, engine.BaseDocument): __tablename__ = 'ramses_authuser' return AuthUser
Define and return AuthUser model using nefertari base classes
entailment
def validate_permissions(perms): """ Validate :perms: contains valid permissions. :param perms: List of permission names or ALL_PERMISSIONS. """ if not isinstance(perms, (list, tuple)): perms = [perms] valid_perms = set(PERMISSIONS.values()) if ALL_PERMISSIONS in perms: return perms if set(perms) - valid_perms: raise ValueError( 'Invalid ACL permission names. Valid permissions ' 'are: {}'.format(', '.join(valid_perms))) return perms
Validate :perms: contains valid permissions. :param perms: List of permission names or ALL_PERMISSIONS.
entailment
def parse_permissions(perms): """ Parse permissions ("perms") which are either exact permission names or the keyword 'all'. :param perms: List or comma-separated string of nefertari permission names, or 'all' """ if isinstance(perms, six.string_types): perms = perms.split(',') perms = [perm.strip().lower() for perm in perms] if 'all' in perms: return ALL_PERMISSIONS return validate_permissions(perms)
Parse permissions ("perms") which are either exact permission names or the keyword 'all'. :param perms: List or comma-separated string of nefertari permission names, or 'all'
entailment
def parse_acl(acl_string): """ Parse raw string :acl_string: of RAML-defined ACLs. If :acl_string: is blank or None, all permissions are given. Values of ACL action and principal are parsed using `actions` and `special_principals` maps and are looked up after `strip()` and `lower()`. ACEs in :acl_string: may be separated by newlines or semicolons. Action, principal and permission lists must be separated by spaces. Permissions must be comma-separated. E.g. 'allow everyone view,create,update' and 'deny authenticated delete' :param acl_string: Raw RAML string containing defined ACEs. """ if not acl_string: return [ALLOW_ALL] aces_list = acl_string.replace('\n', ';').split(';') aces_list = [ace.strip().split(' ', 2) for ace in aces_list if ace] aces_list = [(a, b, c.split(',')) for a, b, c in aces_list] result_acl = [] for action_str, princ_str, perms in aces_list: # Process action action_str = action_str.strip().lower() action = actions.get(action_str) if action is None: raise ValueError( 'Unknown ACL action: {}. Valid actions: {}'.format( action_str, list(actions.keys()))) # Process principal princ_str = princ_str.strip().lower() if princ_str in special_principals: principal = special_principals[princ_str] elif is_callable_tag(princ_str): principal = resolve_to_callable(princ_str) else: principal = princ_str # Process permissions permissions = parse_permissions(perms) result_acl.append((action, principal, permissions)) return result_acl
Parse raw string :acl_string: of RAML-defined ACLs. If :acl_string: is blank or None, all permissions are given. Values of ACL action and principal are parsed using `actions` and `special_principals` maps and are looked up after `strip()` and `lower()`. ACEs in :acl_string: may be separated by newlines or semicolons. Action, principal and permission lists must be separated by spaces. Permissions must be comma-separated. E.g. 'allow everyone view,create,update' and 'deny authenticated delete' :param acl_string: Raw RAML string containing defined ACEs.
entailment
def generate_acl(config, model_cls, raml_resource, es_based=True): """ Generate an ACL. Generated ACL class has a `item_model` attribute set to :model_cls:. ACLs used for collection and item access control are generated from a first security scheme with type `x-ACL`. If :raml_resource: has no x-ACL security schemes defined then ALLOW_ALL ACL is used. If the `collection` or `item` settings are empty, then ALLOW_ALL ACL is used. :param model_cls: Generated model class :param raml_resource: Instance of ramlfications.raml.ResourceNode for which ACL is being generated :param es_based: Boolean inidicating whether ACL should query ES or not when getting an object """ schemes = raml_resource.security_schemes or [] schemes = [sch for sch in schemes if sch.type == 'x-ACL'] if not schemes: collection_acl = item_acl = [] log.debug('No ACL scheme applied. Using ACL: {}'.format(item_acl)) else: sec_scheme = schemes[0] log.debug('{} ACL scheme applied'.format(sec_scheme.name)) settings = sec_scheme.settings or {} collection_acl = parse_acl(acl_string=settings.get('collection')) item_acl = parse_acl(acl_string=settings.get('item')) class GeneratedACLBase(object): item_model = model_cls def __init__(self, request, es_based=es_based): super(GeneratedACLBase, self).__init__(request=request) self.es_based = es_based self._collection_acl = collection_acl self._item_acl = item_acl bases = [GeneratedACLBase] if config.registry.database_acls: from nefertari_guards.acl import DatabaseACLMixin as GuardsMixin bases += [DatabaseACLMixin, GuardsMixin] bases.append(BaseACL) return type('GeneratedACL', tuple(bases), {})
Generate an ACL. Generated ACL class has a `item_model` attribute set to :model_cls:. ACLs used for collection and item access control are generated from a first security scheme with type `x-ACL`. If :raml_resource: has no x-ACL security schemes defined then ALLOW_ALL ACL is used. If the `collection` or `item` settings are empty, then ALLOW_ALL ACL is used. :param model_cls: Generated model class :param raml_resource: Instance of ramlfications.raml.ResourceNode for which ACL is being generated :param es_based: Boolean inidicating whether ACL should query ES or not when getting an object
entailment
def _apply_callables(self, acl, obj=None): """ Iterate over ACEs from :acl: and apply callable principals if any. Principals are passed 3 arguments on call: :ace: Single ACE object that looks like (action, callable, permission or [permission]) :request: Current request object :obj: Object instance to be accessed via the ACL Principals must return a single ACE or a list of ACEs. :param acl: Sequence of valid Pyramid ACEs which will be processed :param obj: Object to be accessed via the ACL """ new_acl = [] for i, ace in enumerate(acl): principal = ace[1] if six.callable(principal): ace = principal(ace=ace, request=self.request, obj=obj) if not ace: continue if not isinstance(ace[0], (list, tuple)): ace = [ace] ace = [(a, b, validate_permissions(c)) for a, b, c in ace] else: ace = [ace] new_acl += ace return tuple(new_acl)
Iterate over ACEs from :acl: and apply callable principals if any. Principals are passed 3 arguments on call: :ace: Single ACE object that looks like (action, callable, permission or [permission]) :request: Current request object :obj: Object instance to be accessed via the ACL Principals must return a single ACE or a list of ACEs. :param acl: Sequence of valid Pyramid ACEs which will be processed :param obj: Object to be accessed via the ACL
entailment
def item_acl(self, item): """ Objectify ACL if ES is used or call item.get_acl() if db is used. """ if self.es_based: from nefertari_guards.elasticsearch import get_es_item_acl return get_es_item_acl(item) return super(DatabaseACLMixin, self).item_acl(item)
Objectify ACL if ES is used or call item.get_acl() if db is used.
entailment
def getitem_es(self, key): """ Override to support ACL filtering. To do so: passes `self.request` to `get_item` and uses `ACLFilterES`. """ from nefertari_guards.elasticsearch import ACLFilterES es = ACLFilterES(self.item_model.__name__) params = { 'id': key, 'request': self.request, } obj = es.get_item(**params) obj.__acl__ = self.item_acl(obj) obj.__parent__ = self obj.__name__ = key return obj
Override to support ACL filtering. To do so: passes `self.request` to `get_item` and uses `ACLFilterES`.
entailment
def convert_schema(raml_schema, mime_type): """ Restructure `raml_schema` to a dictionary that has 'properties' as well as other schema keys/values. The resulting dictionary looks like this:: { "properties": { "field1": { "required": boolean, "type": ..., ...more field options }, ...more properties }, "public_fields": [...], "auth_fields": [...], ...more schema options } :param raml_schema: RAML request body schema. :param mime_type: ContentType of the schema as a string from RAML file. Only JSON is currently supported. """ if mime_type == ContentTypes.JSON: if not isinstance(raml_schema, dict): raise TypeError( 'Schema is not a valid JSON. Please check your ' 'schema syntax.\n{}...'.format(str(raml_schema)[:60])) return raml_schema if mime_type == ContentTypes.TEXT_XML: # Process XML schema pass
Restructure `raml_schema` to a dictionary that has 'properties' as well as other schema keys/values. The resulting dictionary looks like this:: { "properties": { "field1": { "required": boolean, "type": ..., ...more field options }, ...more properties }, "public_fields": [...], "auth_fields": [...], ...more schema options } :param raml_schema: RAML request body schema. :param mime_type: ContentType of the schema as a string from RAML file. Only JSON is currently supported.
entailment
def generate_model_name(raml_resource): """ Generate model name. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ resource_uri = get_resource_uri(raml_resource).strip('/') resource_uri = re.sub('\W', ' ', resource_uri) model_name = inflection.titleize(resource_uri) return inflection.singularize(model_name).replace(' ', '')
Generate model name. :param raml_resource: Instance of ramlfications.raml.ResourceNode.
entailment
def dynamic_part_name(raml_resource, route_name, pk_field): """ Generate a dynamic part for a resource :raml_resource:. A dynamic part is generated using 2 parts: :route_name: of the resource and the dynamic part of first dynamic child resources. If :raml_resource: has no dynamic child resources, 'id' is used as the 2nd part. E.g. if your dynamic part on route 'stories' is named 'superId' then dynamic part will be 'stories_superId'. :param raml_resource: Instance of ramlfications.raml.ResourceNode for which dynamic part name is being generated. :param route_name: Cleaned name of :raml_resource: :param pk_field: Model Primary Key field name. """ subresources = get_resource_children(raml_resource) dynamic_uris = [res.path for res in subresources if is_dynamic_uri(res.path)] if dynamic_uris: dynamic_part = extract_dynamic_part(dynamic_uris[0]) else: dynamic_part = pk_field return '_'.join([route_name, dynamic_part])
Generate a dynamic part for a resource :raml_resource:. A dynamic part is generated using 2 parts: :route_name: of the resource and the dynamic part of first dynamic child resources. If :raml_resource: has no dynamic child resources, 'id' is used as the 2nd part. E.g. if your dynamic part on route 'stories' is named 'superId' then dynamic part will be 'stories_superId'. :param raml_resource: Instance of ramlfications.raml.ResourceNode for which dynamic part name is being generated. :param route_name: Cleaned name of :raml_resource: :param pk_field: Model Primary Key field name.
entailment
def extract_dynamic_part(uri): """ Extract dynamic url part from :uri: string. :param uri: URI string that may contain dynamic part. """ for part in uri.split('/'): part = part.strip() if part.startswith('{') and part.endswith('}'): return clean_dynamic_uri(part)
Extract dynamic url part from :uri: string. :param uri: URI string that may contain dynamic part.
entailment
def resource_view_attrs(raml_resource, singular=False): """ Generate view method names needed for `raml_resource` view. Collects HTTP method names from resource siblings and dynamic children if exist. Collected methods are then translated to `nefertari.view.BaseView` method names, each of which is used to process a particular HTTP method request. Maps of {HTTP_method: view_method} `collection_methods` and `item_methods` are used to convert collection and item methods respectively. :param raml_resource: Instance of ramlfications.raml.ResourceNode :param singular: Boolean indicating if resource is singular or not """ from .views import collection_methods, item_methods # Singular resource doesn't have collection methods though # it looks like a collection if singular: collection_methods = item_methods siblings = get_resource_siblings(raml_resource) http_methods = [sibl.method.lower() for sibl in siblings] attrs = [collection_methods.get(method) for method in http_methods] # Check if resource has dynamic child resource like collection/{id} # If dynamic child resource exists, add its siblings' methods to attrs, # as both resources are handled by a single view children = get_resource_children(raml_resource) http_submethods = [child.method.lower() for child in children if is_dynamic_uri(child.path)] attrs += [item_methods.get(method) for method in http_submethods] return set(filter(bool, attrs))
Generate view method names needed for `raml_resource` view. Collects HTTP method names from resource siblings and dynamic children if exist. Collected methods are then translated to `nefertari.view.BaseView` method names, each of which is used to process a particular HTTP method request. Maps of {HTTP_method: view_method} `collection_methods` and `item_methods` are used to convert collection and item methods respectively. :param raml_resource: Instance of ramlfications.raml.ResourceNode :param singular: Boolean indicating if resource is singular or not
entailment
def resource_schema(raml_resource): """ Get schema properties of RAML resource :raml_resource:. Must be called with RAML resource that defines body schema. First body that defines schema is used. Schema is converted on return using 'convert_schema'. :param raml_resource: Instance of ramlfications.raml.ResourceNode of POST method. """ # NOTE: Must be called with resource that defines body schema log.info('Searching for model schema') if not raml_resource.body: raise ValueError('RAML resource has no body to setup database ' 'schema from') for body in raml_resource.body: if body.schema: return convert_schema(body.schema, body.mime_type) log.debug('No model schema found.')
Get schema properties of RAML resource :raml_resource:. Must be called with RAML resource that defines body schema. First body that defines schema is used. Schema is converted on return using 'convert_schema'. :param raml_resource: Instance of ramlfications.raml.ResourceNode of POST method.
entailment
def get_static_parent(raml_resource, method=None): """ Get static parent resource of :raml_resource: with HTTP method :method:. :param raml_resource:Instance of ramlfications.raml.ResourceNode. :param method: HTTP method name which matching static resource must have. """ parent = raml_resource.parent while is_dynamic_resource(parent): parent = parent.parent if parent is None: return parent match_method = method is not None if match_method: if parent.method.upper() == method.upper(): return parent else: return parent for res in parent.root.resources: if res.path == parent.path: if res.method.upper() == method.upper(): return res
Get static parent resource of :raml_resource: with HTTP method :method:. :param raml_resource:Instance of ramlfications.raml.ResourceNode. :param method: HTTP method name which matching static resource must have.
entailment
def attr_subresource(raml_resource, route_name): """ Determine if :raml_resource: is an attribute subresource. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param route_name: Name of the :raml_resource:. """ static_parent = get_static_parent(raml_resource, method='POST') if static_parent is None: return False schema = resource_schema(static_parent) or {} properties = schema.get('properties', {}) if route_name in properties: db_settings = properties[route_name].get('_db_settings', {}) return db_settings.get('type') in ('dict', 'list') return False
Determine if :raml_resource: is an attribute subresource. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param route_name: Name of the :raml_resource:.
entailment
def singular_subresource(raml_resource, route_name): """ Determine if :raml_resource: is a singular subresource. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param route_name: Name of the :raml_resource:. """ static_parent = get_static_parent(raml_resource, method='POST') if static_parent is None: return False schema = resource_schema(static_parent) or {} properties = schema.get('properties', {}) if route_name not in properties: return False db_settings = properties[route_name].get('_db_settings', {}) is_obj = db_settings.get('type') == 'relationship' single_obj = not db_settings.get('uselist', True) return is_obj and single_obj
Determine if :raml_resource: is a singular subresource. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param route_name: Name of the :raml_resource:.
entailment
def is_callable_tag(tag): """ Determine whether :tag: is a valid callable string tag. String is assumed to be valid callable if it starts with '{{' and ends with '}}'. :param tag: String name of tag. """ return (isinstance(tag, six.string_types) and tag.strip().startswith('{{') and tag.strip().endswith('}}'))
Determine whether :tag: is a valid callable string tag. String is assumed to be valid callable if it starts with '{{' and ends with '}}'. :param tag: String name of tag.
entailment
def resolve_to_callable(callable_name): """ Resolve string :callable_name: to a callable. :param callable_name: String representing callable name as registered in ramses registry or dotted import path of callable. Can be wrapped in double curly brackets, e.g. '{{my_callable}}'. """ from . import registry clean_callable_name = callable_name.replace( '{{', '').replace('}}', '').strip() try: return registry.get(clean_callable_name) except KeyError: try: from zope.dottedname.resolve import resolve return resolve(clean_callable_name) except ImportError: raise ImportError( 'Failed to load callable `{}`'.format(clean_callable_name))
Resolve string :callable_name: to a callable. :param callable_name: String representing callable name as registered in ramses registry or dotted import path of callable. Can be wrapped in double curly brackets, e.g. '{{my_callable}}'.
entailment
def get_resource_siblings(raml_resource): """ Get siblings of :raml_resource:. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ path = raml_resource.path return [res for res in raml_resource.root.resources if res.path == path]
Get siblings of :raml_resource:. :param raml_resource: Instance of ramlfications.raml.ResourceNode.
entailment
def get_resource_children(raml_resource): """ Get children of :raml_resource:. :param raml_resource: Instance of ramlfications.raml.ResourceNode. """ path = raml_resource.path return [res for res in raml_resource.root.resources if res.parent and res.parent.path == path]
Get children of :raml_resource:. :param raml_resource: Instance of ramlfications.raml.ResourceNode.
entailment
def get_events_map(): """ Prepare map of event subscribers. * Extends copies of BEFORE_EVENTS and AFTER_EVENTS maps with 'set' action. * Returns map of {before/after: {action: event class(es)}} """ from nefertari import events set_keys = ('create', 'update', 'replace', 'update_many', 'register') before_events = events.BEFORE_EVENTS.copy() before_events['set'] = [before_events[key] for key in set_keys] after_events = events.AFTER_EVENTS.copy() after_events['set'] = [after_events[key] for key in set_keys] return { 'before': before_events, 'after': after_events, }
Prepare map of event subscribers. * Extends copies of BEFORE_EVENTS and AFTER_EVENTS maps with 'set' action. * Returns map of {before/after: {action: event class(es)}}
entailment
def patch_view_model(view_cls, model_cls): """ Patches view_cls.Model with model_cls. :param view_cls: View class "Model" param of which should be patched :param model_cls: Model class which should be used to patch view_cls.Model """ original_model = view_cls.Model view_cls.Model = model_cls try: yield finally: view_cls.Model = original_model
Patches view_cls.Model with model_cls. :param view_cls: View class "Model" param of which should be patched :param model_cls: Model class which should be used to patch view_cls.Model
entailment
def get_route_name(resource_uri): """ Get route name from RAML resource URI. :param resource_uri: String representing RAML resource URI. :returns string: String with route name, which is :resource_uri: stripped of non-word characters. """ resource_uri = resource_uri.strip('/') resource_uri = re.sub('\W', '', resource_uri) return resource_uri
Get route name from RAML resource URI. :param resource_uri: String representing RAML resource URI. :returns string: String with route name, which is :resource_uri: stripped of non-word characters.
entailment
def generate_resource(config, raml_resource, parent_resource): """ Perform complete one resource configuration process This function generates: ACL, view, route, resource, database model for a given `raml_resource`. New nefertari resource is attached to `parent_resource` class which is an instance of `nefertari.resource.Resource`. Things to consider: * Top-level resources must be collection names. * No resources are explicitly created for dynamic (ending with '}') RAML resources as they are implicitly processed by parent collection resources. * Resource nesting must look like collection/id/collection/id/... * Only part of resource path after last '/' is taken into account, thus each level of resource nesting should add one more path element. E.g. /stories -> /stories/{id} and not /stories -> /stories/mystories/{id}. Latter route will be generated at /stories/{id}. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param parent_resource: Parent nefertari resource object. """ from .models import get_existing_model # Don't generate resources for dynamic routes as they are already # generated by their parent resource_uri = get_resource_uri(raml_resource) if is_dynamic_uri(resource_uri): if parent_resource.is_root: raise Exception("Top-level resources can't be dynamic and must " "represent collections instead") return route_name = get_route_name(resource_uri) log.info('Configuring resource: `{}`. Parent: `{}`'.format( route_name, parent_resource.uid or 'root')) # Get DB model. If this is an attribute or singular resource, # we don't need to get model is_singular = singular_subresource(raml_resource, route_name) is_attr_res = attr_subresource(raml_resource, route_name) if not parent_resource.is_root and (is_attr_res or is_singular): model_cls = parent_resource.view.Model else: model_name = generate_model_name(raml_resource) model_cls = get_existing_model(model_name) resource_kwargs = {} # Generate ACL log.info('Generating ACL for `{}`'.format(route_name)) resource_kwargs['factory'] = generate_acl( config, model_cls=model_cls, raml_resource=raml_resource) # Generate dynamic part name if not is_singular: resource_kwargs['id_name'] = dynamic_part_name( raml_resource=raml_resource, route_name=route_name, pk_field=model_cls.pk_field()) # Generate REST view log.info('Generating view for `{}`'.format(route_name)) view_attrs = resource_view_attrs(raml_resource, is_singular) resource_kwargs['view'] = generate_rest_view( config, model_cls=model_cls, attrs=view_attrs, attr_view=is_attr_res, singular=is_singular, ) # In case of singular resource, model still needs to be generated, # but we store it on a different view attribute if is_singular: model_name = generate_model_name(raml_resource) view_cls = resource_kwargs['view'] view_cls._parent_model = view_cls.Model view_cls.Model = get_existing_model(model_name) # Create new nefertari resource log.info('Creating new resource for `{}`'.format(route_name)) clean_uri = resource_uri.strip('/') resource_args = (singularize(clean_uri),) if not is_singular: resource_args += (clean_uri,) return parent_resource.add(*resource_args, **resource_kwargs)
Perform complete one resource configuration process This function generates: ACL, view, route, resource, database model for a given `raml_resource`. New nefertari resource is attached to `parent_resource` class which is an instance of `nefertari.resource.Resource`. Things to consider: * Top-level resources must be collection names. * No resources are explicitly created for dynamic (ending with '}') RAML resources as they are implicitly processed by parent collection resources. * Resource nesting must look like collection/id/collection/id/... * Only part of resource path after last '/' is taken into account, thus each level of resource nesting should add one more path element. E.g. /stories -> /stories/{id} and not /stories -> /stories/mystories/{id}. Latter route will be generated at /stories/{id}. :param raml_resource: Instance of ramlfications.raml.ResourceNode. :param parent_resource: Parent nefertari resource object.
entailment
def generate_server(raml_root, config): """ Handle server generation process. :param raml_root: Instance of ramlfications.raml.RootNode. :param config: Pyramid Configurator instance. """ log.info('Server generation started') if not raml_root.resources: return root_resource = config.get_root_resource() generated_resources = {} for raml_resource in raml_root.resources: if raml_resource.path in generated_resources: continue # Get Nefertari parent resource parent_resource = _get_nefertari_parent_resource( raml_resource, generated_resources, root_resource) # Get generated resource and store it new_resource = generate_resource( config, raml_resource, parent_resource) if new_resource is not None: generated_resources[raml_resource.path] = new_resource
Handle server generation process. :param raml_root: Instance of ramlfications.raml.RootNode. :param config: Pyramid Configurator instance.
entailment
def generate_models(config, raml_resources): """ Generate model for each resource in :raml_resources: The DB model name is generated using singular titled version of current resource's url. E.g. for resource under url '/stories', model with name 'Story' will be generated. :param config: Pyramid Configurator instance. :param raml_resources: List of ramlfications.raml.ResourceNode. """ from .models import handle_model_generation if not raml_resources: return for raml_resource in raml_resources: # No need to generate models for dynamic resource if is_dynamic_uri(raml_resource.path): continue # Since POST resource must define schema use only POST # resources to generate models if raml_resource.method.upper() != 'POST': continue # Generate DB model # If this is an attribute resource we don't need to generate model resource_uri = get_resource_uri(raml_resource) route_name = get_route_name(resource_uri) if not attr_subresource(raml_resource, route_name): log.info('Configuring model for route `{}`'.format(route_name)) model_cls, is_auth_model = handle_model_generation( config, raml_resource) if is_auth_model: config.registry.auth_model = model_cls
Generate model for each resource in :raml_resources: The DB model name is generated using singular titled version of current resource's url. E.g. for resource under url '/stories', model with name 'Story' will be generated. :param config: Pyramid Configurator instance. :param raml_resources: List of ramlfications.raml.ResourceNode.
entailment
def generate_rest_view(config, model_cls, attrs=None, es_based=True, attr_view=False, singular=False): """ Generate REST view for a model class. :param model_cls: Generated DB model class. :param attr: List of strings that represent names of view methods, new generated view should support. Not supported methods are replaced with property that raises AttributeError to display MethodNotAllowed error. :param es_based: Boolean indicating if generated view should read from elasticsearch. If True - collection reads are performed from elasticsearch. Database is used for reads otherwise. Defaults to True. :param attr_view: Boolean indicating if ItemAttributeView should be used as a base class for generated view. :param singular: Boolean indicating if ItemSingularView should be used as a base class for generated view. """ valid_attrs = (list(collection_methods.values()) + list(item_methods.values())) missing_attrs = set(valid_attrs) - set(attrs) if singular: bases = [ItemSingularView] elif attr_view: bases = [ItemAttributeView] elif es_based: bases = [ESCollectionView] else: bases = [CollectionView] if config.registry.database_acls: from nefertari_guards.view import ACLFilterViewMixin bases = [SetObjectACLMixin] + bases + [ACLFilterViewMixin] bases.append(NefertariBaseView) RESTView = type('RESTView', tuple(bases), {'Model': model_cls}) def _attr_error(*args, **kwargs): raise AttributeError for attr in missing_attrs: setattr(RESTView, attr, property(_attr_error)) return RESTView
Generate REST view for a model class. :param model_cls: Generated DB model class. :param attr: List of strings that represent names of view methods, new generated view should support. Not supported methods are replaced with property that raises AttributeError to display MethodNotAllowed error. :param es_based: Boolean indicating if generated view should read from elasticsearch. If True - collection reads are performed from elasticsearch. Database is used for reads otherwise. Defaults to True. :param attr_view: Boolean indicating if ItemAttributeView should be used as a base class for generated view. :param singular: Boolean indicating if ItemSingularView should be used as a base class for generated view.
entailment
def set_object_acl(self, obj): """ Set object ACL on creation if not already present. """ if not obj._acl: from nefertari_guards import engine as guards_engine acl = self._factory(self.request).generate_item_acl(obj) obj._acl = guards_engine.ACLField.stringify_acl(acl)
Set object ACL on creation if not already present.
entailment
def resolve_kw(self, kwargs): """ Resolve :kwargs: like `story_id: 1` to the form of `id: 1`. """ resolved = {} for key, value in kwargs.items(): split = key.split('_', 1) if len(split) > 1: key = split[1] resolved[key] = value return resolved
Resolve :kwargs: like `story_id: 1` to the form of `id: 1`.
entailment
def _location(self, obj): """ Get location of the `obj` Arguments: :obj: self.Model instance. """ field_name = self.clean_id_name return self.request.route_url( self._resource.uid, **{self._resource.id_name: getattr(obj, field_name)})
Get location of the `obj` Arguments: :obj: self.Model instance.
entailment
def _parent_queryset(self): """ Get queryset of parent view. Generated queryset is used to run queries in the current level view. """ parent = self._resource.parent if hasattr(parent, 'view'): req = self.request.blank(self.request.path) req.registry = self.request.registry req.matchdict = { parent.id_name: self.request.matchdict.get(parent.id_name)} parent_view = parent.view(parent.view._factory, req) obj = parent_view.get_item(**req.matchdict) if isinstance(self, ItemSubresourceBaseView): return prop = self._resource.collection_name return getattr(obj, prop, None)
Get queryset of parent view. Generated queryset is used to run queries in the current level view.
entailment
def get_collection(self, **kwargs): """ Get objects collection taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus a queryset returned by this method will be a subset of its parent view's queryset, thus filtering out objects that don't belong to the parent object. """ self._query_params.update(kwargs) objects = self._parent_queryset() if objects is not None: return self.Model.filter_objects( objects, **self._query_params) return self.Model.get_collection(**self._query_params)
Get objects collection taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus a queryset returned by this method will be a subset of its parent view's queryset, thus filtering out objects that don't belong to the parent object.
entailment
def get_item(self, **kwargs): """ Get collection item taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus an item returned by this method will belong to its parent view's queryset, thus filtering out objects that don't belong to the parent object. Returns an object from the applicable ACL. If ACL wasn't applied, it is applied explicitly. """ if six.callable(self.context): self.reload_context(es_based=False, **kwargs) objects = self._parent_queryset() if objects is not None and self.context not in objects: raise JHTTPNotFound('{}({}) not found'.format( self.Model.__name__, self._get_context_key(**kwargs))) return self.context
Get collection item taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus an item returned by this method will belong to its parent view's queryset, thus filtering out objects that don't belong to the parent object. Returns an object from the applicable ACL. If ACL wasn't applied, it is applied explicitly.
entailment
def reload_context(self, es_based, **kwargs): """ Reload `self.context` object into a DB or ES object. A reload is performed by getting the object ID from :kwargs: and then getting a context key item from the new instance of `self._factory` which is an ACL class used by the current view. Arguments: :es_based: Boolean. Whether to init ACL ac es-based or not. This affects the backend which will be queried - either DB or ES :kwargs: Kwargs that contain value for current resource 'id_name' key """ from .acl import BaseACL key = self._get_context_key(**kwargs) kwargs = {'request': self.request} if issubclass(self._factory, BaseACL): kwargs['es_based'] = es_based acl = self._factory(**kwargs) if acl.item_model is None: acl.item_model = self.Model self.context = acl[key]
Reload `self.context` object into a DB or ES object. A reload is performed by getting the object ID from :kwargs: and then getting a context key item from the new instance of `self._factory` which is an ACL class used by the current view. Arguments: :es_based: Boolean. Whether to init ACL ac es-based or not. This affects the backend which will be queried - either DB or ES :kwargs: Kwargs that contain value for current resource 'id_name' key
entailment
def _parent_queryset_es(self): """ Get queryset (list of object IDs) of parent view. The generated queryset is used to run queries in the current level's view. """ parent = self._resource.parent if hasattr(parent, 'view'): req = self.request.blank(self.request.path) req.registry = self.request.registry req.matchdict = { parent.id_name: self.request.matchdict.get(parent.id_name)} parent_view = parent.view(parent.view._factory, req) obj = parent_view.get_item_es(**req.matchdict) prop = self._resource.collection_name objects_ids = getattr(obj, prop, None) return objects_ids
Get queryset (list of object IDs) of parent view. The generated queryset is used to run queries in the current level's view.
entailment
def get_es_object_ids(self, objects): """ Return IDs of :objects: if they are not IDs already. """ id_field = self.clean_id_name ids = [getattr(obj, id_field, obj) for obj in objects] return list(set(str(id_) for id_ in ids))
Return IDs of :objects: if they are not IDs already.
entailment
def get_collection_es(self): """ Get ES objects collection taking into account the generated queryset of parent view. This method allows working with nested resources properly. Thus a queryset returned by this method will be a subset of its parent view's queryset, thus filtering out objects that don't belong to the parent object. """ objects_ids = self._parent_queryset_es() if objects_ids is not None: objects_ids = self.get_es_object_ids(objects_ids) if not objects_ids: return [] self._query_params['id'] = objects_ids return super(ESBaseView, self).get_collection_es()
Get ES objects collection taking into account the generated queryset of parent view. This method allows working with nested resources properly. Thus a queryset returned by this method will be a subset of its parent view's queryset, thus filtering out objects that don't belong to the parent object.
entailment
def get_item_es(self, **kwargs): """ Get ES collection item taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus an item returned by this method will belong to its parent view's queryset, thus filtering out objects that don't belong to the parent object. Returns an object retrieved from the applicable ACL. If an ACL wasn't applied, it is applied explicitly. """ item_id = self._get_context_key(**kwargs) objects_ids = self._parent_queryset_es() if objects_ids is not None: objects_ids = self.get_es_object_ids(objects_ids) if six.callable(self.context): self.reload_context(es_based=True, **kwargs) if (objects_ids is not None) and (item_id not in objects_ids): raise JHTTPNotFound('{}(id={}) resource not found'.format( self.Model.__name__, item_id)) return self.context
Get ES collection item taking into account generated queryset of parent view. This method allows working with nested resources properly. Thus an item returned by this method will belong to its parent view's queryset, thus filtering out objects that don't belong to the parent object. Returns an object retrieved from the applicable ACL. If an ACL wasn't applied, it is applied explicitly.
entailment
def update(self, **kwargs): """ Explicitly reload context with DB usage to get access to complete DB object. """ self.reload_context(es_based=False, **kwargs) return super(ESCollectionView, self).update(**kwargs)
Explicitly reload context with DB usage to get access to complete DB object.
entailment
def delete(self, **kwargs): """ Explicitly reload context with DB usage to get access to complete DB object. """ self.reload_context(es_based=False, **kwargs) return super(ESCollectionView, self).delete(**kwargs)
Explicitly reload context with DB usage to get access to complete DB object.
entailment
def get_dbcollection_with_es(self, **kwargs): """ Get DB objects collection by first querying ES. """ es_objects = self.get_collection_es() db_objects = self.Model.filter_objects(es_objects) return db_objects
Get DB objects collection by first querying ES.
entailment
def delete_many(self, **kwargs): """ Delete multiple objects from collection. First ES is queried, then the results are used to query the DB. This is done to make sure deleted objects are those filtered by ES in the 'index' method (so user deletes what he saw). """ db_objects = self.get_dbcollection_with_es(**kwargs) return self.Model._delete_many(db_objects, self.request)
Delete multiple objects from collection. First ES is queried, then the results are used to query the DB. This is done to make sure deleted objects are those filtered by ES in the 'index' method (so user deletes what he saw).
entailment
def update_many(self, **kwargs): """ Update multiple objects from collection. First ES is queried, then the results are used to query DB. This is done to make sure updated objects are those filtered by ES in the 'index' method (so user updates what he saw). """ db_objects = self.get_dbcollection_with_es(**kwargs) return self.Model._update_many( db_objects, self._json_params, self.request)
Update multiple objects from collection. First ES is queried, then the results are used to query DB. This is done to make sure updated objects are those filtered by ES in the 'index' method (so user updates what he saw).
entailment
def _get_context_key(self, **kwargs): """ Get value of `self._resource.parent.id_name` from :kwargs: """ return str(kwargs.get(self._resource.parent.id_name))
Get value of `self._resource.parent.id_name` from :kwargs:
entailment
def get_item(self, **kwargs): """ Reload context on each access. """ self.reload_context(es_based=False, **kwargs) return super(ItemSubresourceBaseView, self).get_item(**kwargs)
Reload context on each access.
entailment
def setup(app): """Allow this module to be used as sphinx extension. This attaches the Sphinx hooks. :type app: sphinx.application.Sphinx """ import sphinxcontrib_django.docstrings import sphinxcontrib_django.roles # Setup both modules at once. They can also be separately imported to # use only fragments of this package. sphinxcontrib_django.docstrings.setup(app) sphinxcontrib_django.roles.setup(app)
Allow this module to be used as sphinx extension. This attaches the Sphinx hooks. :type app: sphinx.application.Sphinx
entailment
def patch_django_for_autodoc(): """Fix the appearance of some classes in autodoc. This avoids query evaluation. """ # Fix Django's manager appearance ManagerDescriptor.__get__ = lambda self, *args, **kwargs: self.manager # Stop Django from executing DB queries models.QuerySet.__repr__ = lambda self: self.__class__.__name__
Fix the appearance of some classes in autodoc. This avoids query evaluation.
entailment
def setup(app): """Allow this package to be used as Sphinx extension. This is also called from the top-level ``__init__.py``. :type app: sphinx.application.Sphinx """ from .patches import patch_django_for_autodoc # When running, make sure Django doesn't execute querysets patch_django_for_autodoc() # Generate docstrings for Django model fields # Register the docstring processor with sphinx app.connect('autodoc-process-docstring', improve_model_docstring) # influence skip rules app.connect("autodoc-skip-member", autodoc_skip)
Allow this package to be used as Sphinx extension. This is also called from the top-level ``__init__.py``. :type app: sphinx.application.Sphinx
entailment
def autodoc_skip(app, what, name, obj, skip, options): """Hook that tells autodoc to include or exclude certain fields. Sadly, it doesn't give a reference to the parent object, so only the ``name`` can be used for referencing. :type app: sphinx.application.Sphinx :param what: The parent type, ``class`` or ``module`` :type what: str :param name: The name of the child method/attribute. :type name: str :param obj: The child value (e.g. a method, dict, or module reference) :param options: The current autodoc settings. :type options: dict .. seealso:: http://www.sphinx-doc.org/en/stable/ext/autodoc.html#event-autodoc-skip-member """ if name in config.EXCLUDE_MEMBERS: return True if name in config.INCLUDE_MEMBERS: return False return skip
Hook that tells autodoc to include or exclude certain fields. Sadly, it doesn't give a reference to the parent object, so only the ``name`` can be used for referencing. :type app: sphinx.application.Sphinx :param what: The parent type, ``class`` or ``module`` :type what: str :param name: The name of the child method/attribute. :type name: str :param obj: The child value (e.g. a method, dict, or module reference) :param options: The current autodoc settings. :type options: dict .. seealso:: http://www.sphinx-doc.org/en/stable/ext/autodoc.html#event-autodoc-skip-member
entailment
def improve_model_docstring(app, what, name, obj, options, lines): """Hook that improves the autodoc docstrings for Django models. :type app: sphinx.application.Sphinx :param what: The parent type, ``class`` or ``module`` :type what: str :param name: The dotted path to the child method/attribute. :type name: str :param obj: The Python object that i s being documented. :param options: The current autodoc settings. :type options: dict :param lines: The current documentation lines :type lines: list """ if what == 'class': _improve_class_docs(app, obj, lines) elif what == 'attribute': _improve_attribute_docs(obj, name, lines) elif what == 'method': _improve_method_docs(obj, name, lines) # Return the extended docstring return lines
Hook that improves the autodoc docstrings for Django models. :type app: sphinx.application.Sphinx :param what: The parent type, ``class`` or ``module`` :type what: str :param name: The dotted path to the child method/attribute. :type name: str :param obj: The Python object that i s being documented. :param options: The current autodoc settings. :type options: dict :param lines: The current documentation lines :type lines: list
entailment
def _improve_class_docs(app, cls, lines): """Improve the documentation of a class.""" if issubclass(cls, models.Model): _add_model_fields_as_params(app, cls, lines) elif issubclass(cls, forms.Form): _add_form_fields(cls, lines)
Improve the documentation of a class.
entailment
def _add_model_fields_as_params(app, obj, lines): """Improve the documentation of a Django model subclass. This adds all model fields as parameters to the ``__init__()`` method. :type app: sphinx.application.Sphinx :type lines: list """ for field in obj._meta.get_fields(): try: help_text = strip_tags(force_text(field.help_text)) verbose_name = force_text(field.verbose_name).capitalize() except AttributeError: # e.g. ManyToOneRel continue # Add parameter if help_text: lines.append(u':param %s: %s' % (field.name, help_text)) else: lines.append(u':param %s: %s' % (field.name, verbose_name)) # Add type lines.append(_get_field_type(field)) if 'sphinx.ext.inheritance_diagram' in app.extensions and \ 'sphinx.ext.graphviz' in app.extensions and \ not any('inheritance-diagram::' in line for line in lines): lines.append('.. inheritance-diagram::')
Improve the documentation of a Django model subclass. This adds all model fields as parameters to the ``__init__()`` method. :type app: sphinx.application.Sphinx :type lines: list
entailment
def _add_form_fields(obj, lines): """Improve the documentation of a Django Form class. This highlights the available fields in the form. """ lines.append("**Form fields:**") lines.append("") for name, field in obj.base_fields.items(): field_type = "{}.{}".format(field.__class__.__module__, field.__class__.__name__) tpl = "* ``{name}``: {label} (:class:`~{field_type}`)" lines.append(tpl.format( name=name, field=field, label=field.label or name.replace('_', ' ').title(), field_type=field_type ))
Improve the documentation of a Django Form class. This highlights the available fields in the form.
entailment
def _improve_attribute_docs(obj, name, lines): """Improve the documentation of various attributes. This improves the navigation between related objects. :param obj: the instance of the object to document. :param name: full dotted path to the object. :param lines: expected documentation lines. """ if obj is None: # Happens with form attributes. return if isinstance(obj, DeferredAttribute): # This only points to a field name, not a field. # Get the field by importing the name. cls_path, field_name = name.rsplit('.', 1) model = import_string(cls_path) field = model._meta.get_field(obj.field_name) del lines[:] # lines.clear() is Python 3 only lines.append("**Model field:** {label}".format( label=field.verbose_name )) elif isinstance(obj, _FIELD_DESCRIPTORS): # These del lines[:] lines.append("**Model field:** {label}".format( label=obj.field.verbose_name )) if isinstance(obj, FileDescriptor): lines.append("**Return type:** :class:`~django.db.models.fields.files.FieldFile`") elif PhoneNumberDescriptor is not None and isinstance(obj, PhoneNumberDescriptor): lines.append("**Return type:** :class:`~phonenumber_field.phonenumber.PhoneNumber`") elif isinstance(obj, related_descriptors.ForwardManyToOneDescriptor): # Display a reasonable output for forward descriptors. related_model = obj.field.remote_field.model if isinstance(related_model, str): cls_path = related_model else: cls_path = "{}.{}".format(related_model.__module__, related_model.__name__) del lines[:] lines.append("**Model field:** {label}, " "accesses the :class:`~{cls_path}` model.".format( label=obj.field.verbose_name, cls_path=cls_path )) elif isinstance(obj, related_descriptors.ReverseOneToOneDescriptor): related_model = obj.related.related_model if isinstance(related_model, str): cls_path = related_model else: cls_path = "{}.{}".format(related_model.__module__, related_model.__name__) del lines[:] lines.append("**Model field:** {label}, " "accesses the :class:`~{cls_path}` model.".format( label=obj.related.field.verbose_name, cls_path=cls_path )) elif isinstance(obj, related_descriptors.ReverseManyToOneDescriptor): related_model = obj.rel.related_model if isinstance(related_model, str): cls_path = related_model else: cls_path = "{}.{}".format(related_model.__module__, related_model.__name__) del lines[:] lines.append("**Model field:** {label}, " "accesses the M2M :class:`~{cls_path}` model.".format( label=obj.field.verbose_name, cls_path=cls_path )) elif isinstance(obj, (models.Manager, ManagerDescriptor)): # Somehow the 'objects' manager doesn't pass through the docstrings. module, cls_name, field_name = name.rsplit('.', 2) lines.append("Django manager to access the ORM") tpl = "Use ``{cls_name}.objects.all()`` to fetch all objects." lines.append(tpl.format(cls_name=cls_name))
Improve the documentation of various attributes. This improves the navigation between related objects. :param obj: the instance of the object to document. :param name: full dotted path to the object. :param lines: expected documentation lines.
entailment