code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class StorageByKeyName(Storage): <NEW_LINE> <INDENT> @util.positional(4) <NEW_LINE> def __init__(self, model, key_name, property_name, cache=None, user=None): <NEW_LINE> <INDENT> if key_name is None: <NEW_LINE> <INDENT> if user is None: <NEW_LINE> <INDENT> raise ValueError('StorageByKeyName called with no key name or user.') <NEW_LINE> <DEDENT> key_name = user.user_id() <NEW_LINE> <DEDENT> self._model = model <NEW_LINE> self._key_name = key_name <NEW_LINE> self._property_name = property_name <NEW_LINE> self._cache = cache <NEW_LINE> <DEDENT> def _is_ndb(self): <NEW_LINE> <INDENT> if isinstance(self._model, type): <NEW_LINE> <INDENT> if ndb is not None and issubclass(self._model, ndb.Model): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> elif issubclass(self._model, db.Model): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> raise TypeError('Model class not an NDB or DB model: %s.' % (self._model,)) <NEW_LINE> <DEDENT> def _get_entity(self): <NEW_LINE> <INDENT> if self._is_ndb(): <NEW_LINE> <INDENT> return self._model.get_by_id(self._key_name) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self._model.get_by_key_name(self._key_name) <NEW_LINE> <DEDENT> <DEDENT> def _delete_entity(self): <NEW_LINE> <INDENT> if self._is_ndb(): <NEW_LINE> <INDENT> ndb.Key(self._model, self._key_name).delete() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> entity_key = db.Key.from_path(self._model.kind(), self._key_name) <NEW_LINE> db.delete(entity_key) <NEW_LINE> <DEDENT> <DEDENT> @db.non_transactional(allow_existing=True) <NEW_LINE> def locked_get(self): <NEW_LINE> <INDENT> credentials = None <NEW_LINE> if self._cache: <NEW_LINE> <INDENT> json = self._cache.get(self._key_name) <NEW_LINE> if json: <NEW_LINE> <INDENT> credentials = Credentials.new_from_json(json) <NEW_LINE> <DEDENT> <DEDENT> if credentials is None: <NEW_LINE> <INDENT> entity = self._get_entity() <NEW_LINE> if entity is not None: <NEW_LINE> <INDENT> credentials = getattr(entity, self._property_name) <NEW_LINE> if self._cache: <NEW_LINE> <INDENT> self._cache.set(self._key_name, credentials.to_json()) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> if credentials and hasattr(credentials, 'set_store'): <NEW_LINE> <INDENT> credentials.set_store(self) <NEW_LINE> <DEDENT> return credentials <NEW_LINE> <DEDENT> @db.non_transactional(allow_existing=True) <NEW_LINE> def locked_put(self, credentials): <NEW_LINE> <INDENT> entity = self._model.get_or_insert(self._key_name) <NEW_LINE> setattr(entity, self._property_name, credentials) <NEW_LINE> entity.put() <NEW_LINE> if self._cache: <NEW_LINE> <INDENT> self._cache.set(self._key_name, credentials.to_json()) <NEW_LINE> <DEDENT> <DEDENT> @db.non_transactional(allow_existing=True) <NEW_LINE> def locked_delete(self): <NEW_LINE> <INDENT> if self._cache: <NEW_LINE> <INDENT> self._cache.delete(self._key_name) <NEW_LINE> <DEDENT> self._delete_entity() | Store and retrieve a credential to and from the App Engine datastore.
This Storage helper presumes the Credentials have been stored as a
CredentialsProperty or CredentialsNDBProperty on a datastore model class, and
that entities are stored by key_name. | 625990b2627d3e7fe0e08f85 |
class Task(Task): <NEW_LINE> <INDENT> option_list = [ make_option('--max-complexity', dest='max_complexity', default='-1', help='McCabe complexity treshold'), make_option("--pep8-exclude", dest="pep8-exclude", default=pep8.DEFAULT_EXCLUDE + ",migrations", help="exclude files or directories which match these " "comma separated patterns (default: %s)" % pep8.DEFAULT_EXCLUDE), make_option("--pep8-select", dest="pep8-select", help="select errors and warnings (e.g. E,W6)"), make_option("--pep8-ignore", dest="pep8-ignore", help="skip errors and warnings (e.g. E4,W)"), make_option("--pep8-max-line-length", dest="pep8-max-line-length", type='int', help="set maximum allowed line length (default: %d)" % pep8.MAX_LINE_LENGTH), make_option("--pep8-rcfile", dest="pep8-rcfile", help="PEP8 configuration file"), ] <NEW_LINE> def __init__(self, test_labels, options): <NEW_LINE> <INDENT> super(Task, self).__init__(test_labels, options) <NEW_LINE> self.test_all = options['test_all'] <NEW_LINE> self.max_complexity = int(options['max_complexity']) <NEW_LINE> if options.get('flake8_file_output', True): <NEW_LINE> <INDENT> output_dir = options['output_dir'] <NEW_LINE> if not os.path.exists(output_dir): <NEW_LINE> <INDENT> os.makedirs(output_dir) <NEW_LINE> <DEDENT> self.output = open(os.path.join(output_dir, 'flake8.report'), 'w') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.output = sys.stdout <NEW_LINE> <DEDENT> <DEDENT> def teardown_test_environment(self, **kwargs): <NEW_LINE> <INDENT> locations = get_apps_locations(self.test_labels, self.test_all) <NEW_LINE> class JenkinsReport(pep8.BaseReport): <NEW_LINE> <INDENT> def error(instance, line_number, offset, text, check): <NEW_LINE> <INDENT> code = super(JenkinsReport, instance).error( line_number, offset, text, check) <NEW_LINE> if not code: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> sourceline = instance.line_offset + line_number <NEW_LINE> self.output.write('%s:%s:%s: %s\n' % (instance.filename, sourceline, offset + 1, text)) <NEW_LINE> <DEDENT> <DEDENT> pep8style = get_style_guide(parse_argv=False, config_file=self.pep8_rcfile, reporter=JenkinsReport, max_complexity=self.max_complexity, **self.pep8_options) <NEW_LINE> for location in locations: <NEW_LINE> <INDENT> pep8style.input_dir(relpath(location)) <NEW_LINE> <DEDENT> self.output.close() | Runs flake8 on python files. | 625990b2091ae35668706d48 |
class Keyword(Base): <NEW_LINE> <INDENT> __tablename__ = 'keywords' <NEW_LINE> id = Column(Integer, primary_key=True) <NEW_LINE> keyword = Column(Unicode) <NEW_LINE> keyword_weight = Column(Unicode) | Keyword Model. Populated by harvester.
Keywords from harvested page and their
number of occurences as keyword weight. | 625990b2187af65679d2ac72 |
class MimetypePluginManager(RegistrablePluginManager): <NEW_LINE> <INDENT> _default_mimetype_functions = ( mimetype.by_python, mimetype.by_file, mimetype.by_default, ) <NEW_LINE> def clear(self): <NEW_LINE> <INDENT> self._mimetype_functions = list(self._default_mimetype_functions) <NEW_LINE> super(MimetypePluginManager, self).clear() <NEW_LINE> <DEDENT> def get_mimetype(self, path): <NEW_LINE> <INDENT> for fnc in self._mimetype_functions: <NEW_LINE> <INDENT> mime = fnc(path) <NEW_LINE> if mime: <NEW_LINE> <INDENT> return mime <NEW_LINE> <DEDENT> <DEDENT> return mimetype.by_default(path) <NEW_LINE> <DEDENT> def register_mimetype_function(self, fnc): <NEW_LINE> <INDENT> self._mimetype_functions.insert(0, fnc) | Plugin manager for mimetype-function registration. | 625990b2c4546d3d9def832d |
class MapSave(Document): <NEW_LINE> <INDENT> name = StringProperty( verbose_name="Name of the map save", default=None, required=True, ) <NEW_LINE> desc = StringProperty( verbose_name="Description of the map save", default='', required=False, ) <NEW_LINE> version = StringProperty( verbose_name="User defined map version identifier for this save", default='', required=False, ) <NEW_LINE> owners = StringProperty( verbose_name="Map Owners", default=None, required=True, ) <NEW_LINE> created = DateTimeProperty( verbose_name="Date Created", required=True, auto_now_add=True, ) <NEW_LINE> modified = DateTimeProperty( verbose_name="Date Modified", required=False, default=None, auto_now=True, ) | A Minecraft server config file
| 625990b2c4546d3d9def8330 |
class ThreadLocal(local): <NEW_LINE> <INDENT> _init_done = False <NEW_LINE> def __init__(self, **default_factories): <NEW_LINE> <INDENT> if self._init_done: <NEW_LINE> <INDENT> raise SystemError('__init__ called too many times') <NEW_LINE> <DEDENT> self._init_done = True <NEW_LINE> self._default_factories = default_factories <NEW_LINE> <DEDENT> def __getattr__(self, name): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> factory = self._default_factories[name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> raise AttributeError(name) <NEW_LINE> <DEDENT> obj = factory() <NEW_LINE> setattr(self, name, obj) <NEW_LINE> return obj <NEW_LINE> <DEDENT> def get(self, name, factory, *factory_args, **factory_kwargs): <NEW_LINE> <INDENT> if not hasattr(self, name): <NEW_LINE> <INDENT> obj = factory(*factory_args, **factory_kwargs) <NEW_LINE> setattr(self, name, obj) <NEW_LINE> return obj <NEW_LINE> <DEDENT> return getattr(self, name) | Thread local storage for greenlet state. | 625990b2091ae35668706d55 |
class InternalTimerService(TimerService): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._current_watermark = None <NEW_LINE> <DEDENT> def current_processing_time(self) -> int: <NEW_LINE> <INDENT> return int(time.time() * 1000) <NEW_LINE> <DEDENT> def current_watermark(self): <NEW_LINE> <INDENT> return self._current_watermark <NEW_LINE> <DEDENT> def set_current_watermark(self, wm): <NEW_LINE> <INDENT> self._current_watermark = wm <NEW_LINE> <DEDENT> def register_processing_time_timer(self, t: int): <NEW_LINE> <INDENT> raise Exception("Register timers is only supported on a keyed stream.") <NEW_LINE> <DEDENT> def register_event_time_timer(self, t: int): <NEW_LINE> <INDENT> raise Exception("Register timers is only supported on a keyed stream.") | Internal implementation of TimerService. | 625990b2187af65679d2ac79 |
class SchLib_V6(object): <NEW_LINE> <INDENT> def __init__(self, filename): <NEW_LINE> <INDENT> with open(filename) as fp: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self.sexpdata = sexpdata.loads("\n".join(fp.readlines())) <NEW_LINE> if self.sexpdata[0].value() != "kicad_symbol_lib": <NEW_LINE> <INDENT> raise AssertionError <NEW_LINE> <DEDENT> <DEDENT> except AssertionError: <NEW_LINE> <INDENT> sys.stderr.write("The file is not a KiCad V6 Schematic Library File\n") <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> self.filename = filename <NEW_LINE> self.components = [ Component_V6(comp) for comp in find_by_key("symbol", self.sexpdata) ] <NEW_LINE> <DEDENT> def get_field_names(self): <NEW_LINE> <INDENT> field_names = set() <NEW_LINE> for component in self.components: <NEW_LINE> <INDENT> field_names.update(component.get_field_names()) <NEW_LINE> <DEDENT> return list(field_names) <NEW_LINE> <DEDENT> def save(self, backup=True, filename=None): <NEW_LINE> <INDENT> if not filename: <NEW_LINE> <INDENT> filename = self.filename <NEW_LINE> <DEDENT> if backup: <NEW_LINE> <INDENT> create_backup(filename) <NEW_LINE> <DEDENT> with open(filename, "w") as fp: <NEW_LINE> <INDENT> fp.write(sexp_indent(sexpdata.dumps(self.sexpdata))) | A class to parse KiCad V6 schematic symbol libraries. | 625990b2091ae35668706d57 |
class TransformerEncoderLayer(nn.Module): <NEW_LINE> <INDENT> __constants__ = ['batch_first'] <NEW_LINE> def __init__(self, d_model, nhead, dim_feedforward=2048, dropout=0.1, activation="relu", layer_norm_eps=1e-5, batch_first=False, normalize_before=True, device=None, dtype=None) -> None: <NEW_LINE> <INDENT> factory_kwargs = {'device': device, 'dtype': dtype} <NEW_LINE> super(TransformerEncoderLayer, self).__init__() <NEW_LINE> self.self_attn = MultiheadAttention(d_model, nhead, dropout=dropout) <NEW_LINE> self.linear1 = nn.Linear(d_model, dim_feedforward, **factory_kwargs) <NEW_LINE> self.dropout = nn.Dropout(dropout) <NEW_LINE> self.linear2 = nn.Linear(dim_feedforward, d_model, **factory_kwargs) <NEW_LINE> self.norm1 = nn.LayerNorm(d_model, eps=layer_norm_eps, **factory_kwargs) <NEW_LINE> self.norm2 = nn.LayerNorm(d_model, eps=layer_norm_eps, **factory_kwargs) <NEW_LINE> self.dropout1 = nn.Dropout(dropout) <NEW_LINE> self.dropout2 = nn.Dropout(dropout) <NEW_LINE> self.activation = _get_activation_fn(activation) <NEW_LINE> self.normalize_before = normalize_before <NEW_LINE> <DEDENT> def __setstate__(self, state): <NEW_LINE> <INDENT> if 'activation' not in state: <NEW_LINE> <INDENT> state['activation'] = F.relu <NEW_LINE> <DEDENT> super(TransformerEncoderLayer, self).__setstate__(state) <NEW_LINE> <DEDENT> def forward_post(self, src, src_mask = None): <NEW_LINE> <INDENT> src2 = self.self_attn(src, src, src, attn_mask=src_mask)[0] <NEW_LINE> src = src + self.dropout1(src2) <NEW_LINE> src = self.norm1(src) <NEW_LINE> src2 = self.linear2(self.dropout(self.activation(self.linear1(src)))) <NEW_LINE> src = src + self.dropout2(src2) <NEW_LINE> src = self.norm2(src) <NEW_LINE> return src <NEW_LINE> <DEDENT> def forward_pre(self, src, src_mask = None): <NEW_LINE> <INDENT> src2 = self.norm1(src) <NEW_LINE> src2 = self.self_attn(src2, src2, src2, attn_mask=src_mask)[0] <NEW_LINE> src = src + self.dropout1(src2) <NEW_LINE> src2 = self.norm2(src) <NEW_LINE> src2 = self.linear2(self.dropout(self.activation(self.linear1(src2)))) <NEW_LINE> src = src + self.dropout2(src2) <NEW_LINE> return src <NEW_LINE> <DEDENT> def forward(self, src, src_mask = None, src_key_padding_mask = None): <NEW_LINE> <INDENT> if self.normalize_before: <NEW_LINE> <INDENT> return self.forward_pre(src, src_mask) <NEW_LINE> <DEDENT> return self.forward_post(src, src_mask) | TransformerEncoderLayer copy-paste from torch.nn.TransformerEncoderLayer
with modifications:
* layer norm before add residual | 625990b2627d3e7fe0e08fa4 |
class DownloadReportResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.DailyReportUrl = None <NEW_LINE> self.ResultReportUrl = None <NEW_LINE> self.DetailReportUrl = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.DailyReportUrl = params.get("DailyReportUrl") <NEW_LINE> self.ResultReportUrl = params.get("ResultReportUrl") <NEW_LINE> self.DetailReportUrl = params.get("DetailReportUrl") <NEW_LINE> self.RequestId = params.get("RequestId") | DownloadReport返回参数结构体
| 625990b2187af65679d2ac7f |
class MoverseConArribaAbajo(Habilidad): <NEW_LINE> <INDENT> def __init__(self, receptor): <NEW_LINE> <INDENT> Habilidad.__init__(self, receptor) <NEW_LINE> pilas.eventos.actualizar.conectar(self.pulsa_tecla) <NEW_LINE> <DEDENT> def pulsa_tecla(self, evento): <NEW_LINE> <INDENT> velocidad = 5 <NEW_LINE> c = pilas.escena_actual().control <NEW_LINE> if c.arriba: <NEW_LINE> <INDENT> self.receptor.y += velocidad <NEW_LINE> <DEDENT> elif c.abajo: <NEW_LINE> <INDENT> self.receptor.y -= velocidad | Hace que un actor se pueda mover con las teclas direccionales <ARRIBA> y <ABAJO> | 625990b3187af65679d2ac82 |
class TypoScriptCssDataLexer(RegexLexer): <NEW_LINE> <INDENT> name = 'TypoScriptCssData' <NEW_LINE> aliases = ['typoscriptcssdata'] <NEW_LINE> tokens = { 'root': [ (r'(.*)(###\w+###)(.*)', bygroups(String, Name.Constant, String)), (r'(\{)(\$)((?:[\w\-_]+\.)*)([\w\-_]+)(\})', bygroups(String.Symbol, Operator, Name.Constant, Name.Constant, String.Symbol)), (r'(.*)(\{)([\w\-_]+)(\s*:\s*)([\w\-_]+)(\})(.*)', bygroups(String, String.Symbol, Name.Constant, Operator, Name.Constant, String.Symbol, String)), (r'\s+', Text), (r'/\*(?:(?!\*/).)*\*/', Comment), (r'(?<!(#|\'|"))(?:#(?!(?:[a-fA-F0-9]{6}|[a-fA-F0-9]{3}))[^\n#]+|//[^\n]*)', Comment), (r'[<>,:=\.\*%+\|]', String), (r'[\w"_\-!\/&;\(\)\{\}]+', String), ] } | Lexer that highlights markers, constants and registers within css blocks. | 625990b3c4546d3d9def833c |
class Sport(models.Model): <NEW_LINE> <INDENT> ext_id = models.UUIDField(primary_key=True, default=uuid.uuid4, editable=False) <NEW_LINE> name = models.CharField(max_length=50, unique=True) <NEW_LINE> created = models.DateTimeField(auto_now_add=True) <NEW_LINE> updated = models.DateTimeField(auto_now=True) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.name | Store sport here | 625990b3091ae35668706d6b |
class XmlEntry: <NEW_LINE> <INDENT> def __init__(self, title, ns, id, text, username, ipedit, timestamp, editRestriction, moveRestriction, revisionid, comment, redirect): <NEW_LINE> <INDENT> self.title = title <NEW_LINE> self.ns = ns <NEW_LINE> self.id = id <NEW_LINE> self.text = text <NEW_LINE> self.username = username.strip() <NEW_LINE> self.ipedit = ipedit <NEW_LINE> self.timestamp = timestamp <NEW_LINE> self.editRestriction = editRestriction <NEW_LINE> self.moveRestriction = moveRestriction <NEW_LINE> self.revisionid = revisionid <NEW_LINE> self.comment = comment <NEW_LINE> self.isredirect = redirect | Represent a page. | 625990b3187af65679d2ac84 |
class Level: <NEW_LINE> <INDENT> def __init__(self, level): <NEW_LINE> <INDENT> self.Level = level <NEW_LINE> self.Nodes = [] <NEW_LINE> self.Bundles = [] <NEW_LINE> self.IndexLimitNode = None <NEW_LINE> <DEDENT> def AddEntity(self, entity, store): <NEW_LINE> <INDENT> self.Nodes.append(store.GetNode(entity)) <NEW_LINE> <DEDENT> def MakeBundles(self, store): <NEW_LINE> <INDENT> bundles = set() <NEW_LINE> for node in self.Nodes: <NEW_LINE> <INDENT> bundles.update(node.MakeBundles(store)) <NEW_LINE> <DEDENT> self.Bundles = sorted([bundle for bundle in bundles]) <NEW_LINE> <DEDENT> def OrderNodes(self, store): <NEW_LINE> <INDENT> my_level = self.Level <NEW_LINE> remainder = set(self.Nodes) <NEW_LINE> assigned_nodes = set() <NEW_LINE> ordered_nodes = [] <NEW_LINE> def AddNode(node): <NEW_LINE> <INDENT> remainder.discard(node) <NEW_LINE> if node not in assigned_nodes: <NEW_LINE> <INDENT> assigned_nodes.add(node) <NEW_LINE> ordered_nodes.append(node) <NEW_LINE> <DEDENT> <DEDENT> non_local_nodes = dict() <NEW_LINE> for node in self.Nodes: <NEW_LINE> <INDENT> child_bundles = node.Data.ChildBundles <NEW_LINE> if len(child_bundles) != 0: <NEW_LINE> <INDENT> max_child_level = max([bundle.Level for bundle in child_bundles]) <NEW_LINE> if max_child_level != my_level: <NEW_LINE> <INDENT> non_local_nodes.setdefault(max_child_level, []).append(node) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for key in sorted(non_local_nodes.keys(), reverse = True): <NEW_LINE> <INDENT> for node in sorted(non_local_nodes[key]): <NEW_LINE> <INDENT> AddNode(node) <NEW_LINE> <DEDENT> <DEDENT> num_nonlocal_nodes = len(ordered_nodes) <NEW_LINE> for bundle in self.Bundles: <NEW_LINE> <INDENT> parents = [store.GetNode(parent) for parent in bundle.Data.ParentEntities if parent.Level == my_level] <NEW_LINE> for parent in parents: <NEW_LINE> <INDENT> AddNode(parent) <NEW_LINE> <DEDENT> <DEDENT> self.Nodes = ordered_nodes <NEW_LINE> self.Nodes.extend(sorted([node for node in remainder])) <NEW_LINE> if num_nonlocal_nodes != 0: <NEW_LINE> <INDENT> self.IndexLimitNode = min(num_nonlocal_nodes, len(self.Nodes) - 1) <NEW_LINE> <DEDENT> <DEDENT> def Layout(self, pos, config): <NEW_LINE> <INDENT> for node in self.Nodes: <NEW_LINE> <INDENT> pos = node.Layout(pos, config) <NEW_LINE> <DEDENT> x, y = pos <NEW_LINE> x += config.NodeWidth + config.BundleWidth <NEW_LINE> for bundle in reversed(self.Bundles): <NEW_LINE> <INDENT> x = bundle.Layout(x, config) <NEW_LINE> <DEDENT> return (x + config.BundleWidth, y) <NEW_LINE> <DEDENT> def Adjust(self, limit, delta, config): <NEW_LINE> <INDENT> if limit is not None: <NEW_LINE> <INDENT> clip = self.Nodes[0].Y + delta - limit <NEW_LINE> if clip < 0: <NEW_LINE> <INDENT> delta -= clip <NEW_LINE> <DEDENT> <DEDENT> for node in self.Nodes: <NEW_LINE> <INDENT> node.Adjust(delta) <NEW_LINE> <DEDENT> delta = -1000000 <NEW_LINE> for bundle in self.Bundles: <NEW_LINE> <INDENT> delta = bundle.Adjust(delta, config) <NEW_LINE> <DEDENT> limit = None <NEW_LINE> if self.IndexLimitNode is not None: <NEW_LINE> <INDENT> limit = self.Nodes[self.IndexLimitNode].Y + config.NodeSpacing <NEW_LINE> <DEDENT> return limit, delta <NEW_LINE> <DEDENT> def ExtractNodes(self): <NEW_LINE> <INDENT> return [node.Extract() for node in self.Nodes] <NEW_LINE> <DEDENT> def ExtractBundles(self): <NEW_LINE> <INDENT> return [bundle.Extract() for bundle in self.Bundles] | Graphical layout for a vertical group of entities | 625990b3c4546d3d9def833f |
class RandomSampling(Learner): <NEW_LINE> <INDENT> def __init__(self, model): <NEW_LINE> <INDENT> super(RandomSampling, self).__init__(model) | docstring for RandomSampling | 625990b3187af65679d2ac86 |
class GlossProcessor(BlockProcessor): <NEW_LINE> <INDENT> def test(self, parent, block): <NEW_LINE> <INDENT> return block.split('\n')[0] == GLOSS_START <NEW_LINE> <DEDENT> def run(self, parent, blocks): <NEW_LINE> <INDENT> lines_raw = blocks.pop(0).split('\n')[1:] <NEW_LINE> lines = [] <NEW_LINE> pre = None <NEW_LINE> post = None <NEW_LINE> i = 0 <NEW_LINE> for line in lines_raw: <NEW_LINE> <INDENT> if lines and line.startswith(' ' * INDENT_LENGTH): <NEW_LINE> <INDENT> lines[-1] += line <NEW_LINE> <DEDENT> elif line.strip() == "::": <NEW_LINE> <INDENT> if pre == None: <NEW_LINE> <INDENT> pre = i <NEW_LINE> <DEDENT> elif post == None: <NEW_LINE> <INDENT> post = i <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise SyntaxError("Too many `::` in `gloss` block.") <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> lines.append(line) <NEW_LINE> i += 1 <NEW_LINE> <DEDENT> <DEDENT> if pre == None or post == None: <NEW_LINE> <INDENT> raise SyntaxError("Not enough `::` in `gloss` block.") <NEW_LINE> <DEDENT> div = etree.SubElement(parent, "div") <NEW_LINE> div.set("class", "gloss") <NEW_LINE> for line in lines[:pre]: <NEW_LINE> <INDENT> par = etree.SubElement(div, "p") <NEW_LINE> m = RE_CLASS.match(line) <NEW_LINE> if m is not None: <NEW_LINE> <INDENT> par.set("class", m.group(1)) <NEW_LINE> line = line[m.end():] <NEW_LINE> <DEDENT> par.text = line <NEW_LINE> <DEDENT> columns = [] <NEW_LINE> line = lines[pre] <NEW_LINE> m = RE_CLASS.match(line) <NEW_LINE> cl = None <NEW_LINE> if m is not None: <NEW_LINE> <INDENT> cl = m.group(1) <NEW_LINE> line = line[m.end():] <NEW_LINE> <DEDENT> for word in _parse_gloss_line(line): <NEW_LINE> <INDENT> dl = etree.SubElement(div, "dl") <NEW_LINE> dt = etree.SubElement(dl, "dt") <NEW_LINE> dt.text = word <NEW_LINE> if cl is not None: <NEW_LINE> <INDENT> dt.set("class", cl) <NEW_LINE> <DEDENT> columns.append(dl) <NEW_LINE> <DEDENT> for line in lines[pre+1:post]: <NEW_LINE> <INDENT> m = RE_CLASS.match(line) <NEW_LINE> cl = None <NEW_LINE> if m is not None: <NEW_LINE> <INDENT> cl = m.group(1) <NEW_LINE> line = line[m.end():] <NEW_LINE> <DEDENT> words = _parse_gloss_line(line) <NEW_LINE> for i, dl in enumerate(columns): <NEW_LINE> <INDENT> if i < len(words): <NEW_LINE> <INDENT> dd = etree.SubElement(dl, "dd") <NEW_LINE> dd.text = words[i] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> etree.SubElement(dl, "dd") <NEW_LINE> <DEDENT> if cl is not None: <NEW_LINE> <INDENT> dd.set("class", cl) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> for line in lines[post:]: <NEW_LINE> <INDENT> par = etree.SubElement(div, "p") <NEW_LINE> m = RE_CLASS.match(line) <NEW_LINE> if m is not None: <NEW_LINE> <INDENT> par.set("class", m.group(1)) <NEW_LINE> line = line[m.end():] <NEW_LINE> <DEDENT> par.text = line | Processes interlinear glosses. | 625990b3091ae35668706d77 |
class TestTeamSPRatingSpecialTeams(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def testTeamSPRatingSpecialTeams(self): <NEW_LINE> <INDENT> pass | TeamSPRatingSpecialTeams unit test stubs | 625990b3627d3e7fe0e08fbb |
class CanaryCamera(Camera): <NEW_LINE> <INDENT> def __init__(self, hass, data, location, device, timeout, ffmpeg_args): <NEW_LINE> <INDENT> super().__init__() <NEW_LINE> self._ffmpeg = hass.data[DATA_FFMPEG] <NEW_LINE> self._ffmpeg_arguments = ffmpeg_args <NEW_LINE> self._data = data <NEW_LINE> self._location = location <NEW_LINE> self._device = device <NEW_LINE> self._timeout = timeout <NEW_LINE> self._live_stream_session = None <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._device.name <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_recording(self): <NEW_LINE> <INDENT> return self._location.is_recording <NEW_LINE> <DEDENT> @property <NEW_LINE> def motion_detection_enabled(self): <NEW_LINE> <INDENT> return not self._location.is_recording <NEW_LINE> <DEDENT> async def async_camera_image(self): <NEW_LINE> <INDENT> self.renew_live_stream_session() <NEW_LINE> ffmpeg = ImageFrame(self._ffmpeg.binary, loop=self.hass.loop) <NEW_LINE> image = await asyncio.shield( ffmpeg.get_image( self._live_stream_session.live_stream_url, output_format=IMAGE_JPEG, extra_cmd=self._ffmpeg_arguments, ) ) <NEW_LINE> return image <NEW_LINE> <DEDENT> async def handle_async_mjpeg_stream(self, request): <NEW_LINE> <INDENT> if self._live_stream_session is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop) <NEW_LINE> await stream.open_camera( self._live_stream_session.live_stream_url, extra_cmd=self._ffmpeg_arguments ) <NEW_LINE> try: <NEW_LINE> <INDENT> stream_reader = await stream.get_reader() <NEW_LINE> return await async_aiohttp_proxy_stream( self.hass, request, stream_reader, self._ffmpeg.ffmpeg_stream_content_type, ) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> await stream.close() <NEW_LINE> <DEDENT> <DEDENT> @Throttle(MIN_TIME_BETWEEN_SESSION_RENEW) <NEW_LINE> def renew_live_stream_session(self): <NEW_LINE> <INDENT> self._live_stream_session = self._data.get_live_stream_session(self._device) | An implementation of a Canary security camera. | 625990b3091ae35668706d7b |
class StudentDiscussionDetail(generics.RetrieveAPIView): <NEW_LINE> <INDENT> serializer_class = StudentDiscussionSerializer <NEW_LINE> permission_classes = (IsAdminUser, IsStudent, ) <NEW_LINE> authentication_classes = (SessionAuthentication, BasicAuthentication, OAuth2Authentication) <NEW_LINE> def get_object(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> student_id = self.kwargs['student_id'] <NEW_LINE> list = get_count_student(student_id) <NEW_LINE> list['user_id'] <NEW_LINE> return list <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> raise Http404 | **Use Case**
Get count of discussions and questions for a specific student.
**Example Requests**
GET /api/courses/v2/discussions/students/{student_id}
**Response Values**
On success with Response Code <200>
* user_id: The unique identifier for the student.
* count:
* discussion: Count of discussions by the student
* question: Count of questions asked by the student
**ERROR RESPONSES**
* Response Code <404> STUDENT NOT FOUND
* Response Code <403> FORBIDDEN | 625990b3627d3e7fe0e08fbd |
class TestCityDocs(unittest.TestCase): <NEW_LINE> <INDENT> def test_class_doc(self): <NEW_LINE> <INDENT> self.assertTrue(len(City.__doc__) > 0) | check for documentation | 625990b3187af65679d2ac8f |
class AwsDedicatedHost(resource.BaseResource): <NEW_LINE> <INDENT> def __init__(self, machine_type, zone): <NEW_LINE> <INDENT> super(AwsDedicatedHost, self).__init__() <NEW_LINE> self.machine_type = machine_type <NEW_LINE> self.zone = zone <NEW_LINE> self.region = util.GetRegionFromZone(self.zone) <NEW_LINE> self.client_token = str(uuid.uuid4()) <NEW_LINE> self.id = None <NEW_LINE> <DEDENT> def _Create(self): <NEW_LINE> <INDENT> create_cmd = util.AWS_PREFIX + [ 'ec2', 'allocate-hosts', '--region=%s' % self.region, '--client-token=%s' % self.client_token, '--instance-type=%s' % self.machine_type, '--availability-zone=%s' % self.zone, '--auto-placement=off', '--quantity=1'] <NEW_LINE> vm_util.IssueCommand(create_cmd) <NEW_LINE> <DEDENT> def _Delete(self): <NEW_LINE> <INDENT> if self.id: <NEW_LINE> <INDENT> delete_cmd = util.AWS_PREFIX + [ 'ec2', 'release-hosts', '--region=%s' % self.region, '--host-ids=%s' % self.id] <NEW_LINE> vm_util.IssueCommand(delete_cmd) <NEW_LINE> <DEDENT> <DEDENT> @vm_util.Retry() <NEW_LINE> def _Exists(self): <NEW_LINE> <INDENT> describe_cmd = util.AWS_PREFIX + [ 'ec2', 'describe-hosts', '--region=%s' % self.region, '--filter=Name=client-token,Values=%s' % self.client_token] <NEW_LINE> stdout, _, _ = vm_util.IssueCommand(describe_cmd) <NEW_LINE> response = json.loads(stdout) <NEW_LINE> hosts = response['Hosts'] <NEW_LINE> assert len(hosts) < 2, 'Too many hosts.' <NEW_LINE> if not hosts: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> host = hosts[0] <NEW_LINE> self.id = host['HostId'] <NEW_LINE> state = host['State'] <NEW_LINE> assert state in KNOWN_HOST_STATES, state <NEW_LINE> return state in HOST_EXISTS_STATES | Object representing an AWS host.
Attributes:
region: The AWS region of the host.
zone: The AWS availability zone of the host.
machine_type: The machine type of VMs that may be created on the host.
client_token: A uuid that makes the creation request idempotent.
id: The host_id of the host. | 625990b4187af65679d2ac93 |
class AccountInvoiceRefund(models.TransientModel): <NEW_LINE> <INDENT> _inherit = "account.invoice.refund" <NEW_LINE> @api.multi <NEW_LINE> def compute_refund(self, mode='refund'): <NEW_LINE> <INDENT> return super(AccountInvoiceRefund, self.with_context(of_mode=mode)).compute_refund(mode) | Refunds invoice | 625990b4187af65679d2ac99 |
class PyCFormView(object): <NEW_LINE> <INDENT> def __new__(cls): <NEW_LINE> <INDENT> raise Exception('This class just for typing, can not be instanced!') <NEW_LINE> <DEDENT> def OnCommand(self,wparam:'Any',lparam:'Any') -> 'None': <NEW_LINE> <INDENT> pass | A class which implementes a CFormView (ie, a view based on a dialog resource. | 625990b4187af65679d2ac9b |
class MainWindow(Gtk.ApplicationWindow): <NEW_LINE> <INDENT> def __init__(self,app): <NEW_LINE> <INDENT> Gtk.Window.__init__(self, title="google2ubuntu-manager",application=app) <NEW_LINE> self.set_default_size(660, 400) <NEW_LINE> self.set_resizable(True) <NEW_LINE> self.set_border_width(0) <NEW_LINE> self.get_focus() <NEW_LINE> self.set_position(Gtk.WindowPosition.CENTER) <NEW_LINE> path = os.path.dirname(os.path.abspath(__file__)).strip('librairy') <NEW_LINE> self.set_default_icon_from_file(path+'/resources/icons.png') <NEW_LINE> button_config = Gtk.ToolButton.new_from_stock(Gtk.STOCK_PREFERENCES) <NEW_LINE> button_config.set_label(_("Setup")) <NEW_LINE> button_config.set_is_important(True) <NEW_LINE> button_config.set_tooltip_text(_('Open setup window')) <NEW_LINE> button_config.show() <NEW_LINE> button_config.connect("clicked",self.change_page,1) <NEW_LINE> button_back = Gtk.Button.new_from_stock(Gtk.STOCK_OK) <NEW_LINE> button_back.connect("clicked",self.change_page,0) <NEW_LINE> content = add_window(button_config) <NEW_LINE> label_main = Gtk.Label("main") <NEW_LINE> config = SetupWindow(button_back) <NEW_LINE> label_config = Gtk.Label("config") <NEW_LINE> self.notebook = Gtk.Notebook.new() <NEW_LINE> self.notebook.set_show_tabs(False) <NEW_LINE> self.notebook.append_page(content.get_grid(),label_main) <NEW_LINE> self.notebook.append_page(config.getGrid(),label_config) <NEW_LINE> self.add(self.notebook) <NEW_LINE> self.show_all() <NEW_LINE> <DEDENT> def change_page(self,button,page): <NEW_LINE> <INDENT> self.notebook.set_current_page(page) | @description: This class display the main window that the user will
see when he wants to manage his commands | 625990b4c4546d3d9def8355 |
class FormlessBrowsableAPIRenderer(renderers.BrowsableAPIRenderer): <NEW_LINE> <INDENT> def show_form_for_method(self, *args, **kwargs): <NEW_LINE> <INDENT> return False | An instance of the browseable API with forms suppressed. Useful for POST endpoints that don't create objects. | 625990b4187af65679d2ac9c |
class StructureFormatError(Exception): <NEW_LINE> <INDENT> def __init__(self, msg): <NEW_LINE> <INDENT> self.msg = msg <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return repr(self.msg) | Exception raised for structure format errors.
Parameters
----------
msg : str
Error message. | 625990b4627d3e7fe0e08fe3 |
class TweetsFetcher(): <NEW_LINE> <INDENT> def __init__(self, tweetsCsvFile, outputDir, tweetsPerXml): <NEW_LINE> <INDENT> self.__process = None <NEW_LINE> self.tweetsCsvFile = tweetsCsvFile <NEW_LINE> self.outputDir = outputDir <NEW_LINE> self.__cacheDir = settings.XML_CACHE_DIR <NEW_LINE> self.__canceled = False <NEW_LINE> self.__tweetsPerXml = tweetsPerXml <NEW_LINE> self.__updateListeners = [] <NEW_LINE> <DEDENT> def addListener(self, listener): <NEW_LINE> <INDENT> self.__updateListeners.append(listener) <NEW_LINE> <DEDENT> def fetch(self): <NEW_LINE> <INDENT> thread = threading.Thread(target=self.__startJar) <NEW_LINE> thread.start() <NEW_LINE> <DEDENT> def __startJar(self): <NEW_LINE> <INDENT> argsStr = "java -jar " + settings.TWORPUS_FETCHAR_JAR + " -input-file " + self.tweetsCsvFile + " -xml-cache-folder " + self.__cacheDir + " -xml-output-folder " + self.outputDir + " -split-after " + str(self.__tweetsPerXml) <NEW_LINE> argsStr = argsStr.replace("\\", "/") <NEW_LINE> args = shlex.split(argsStr) <NEW_LINE> self.__process = subprocess.Popen(args, shell=False, stdout=subprocess.PIPE) <NEW_LINE> while True: <NEW_LINE> <INDENT> line = self.__process.stdout.readline() <NEW_LINE> if not line: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> values = self.parseDownloadProgressFromLine(line) <NEW_LINE> if values is not None: <NEW_LINE> <INDENT> if values["result"] == "success": <NEW_LINE> <INDENT> for listener in self.__updateListeners: <NEW_LINE> <INDENT> listener.onSuccess(values) <NEW_LINE> <DEDENT> <DEDENT> elif values["result"] == "error": <NEW_LINE> <INDENT> for listener in self.__updateListeners: <NEW_LINE> <INDENT> listener.onError(values) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> sys.stdout.flush() <NEW_LINE> <DEDENT> self.__process.communicate() <NEW_LINE> self.__onFinish() if self.__canceled is not True else self.__onCancel() <NEW_LINE> <DEDENT> def parseDownloadProgressFromLine(self, line): <NEW_LINE> <INDENT> line = str(line) <NEW_LINE> if not line.startswith("Fetch:"): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> line = line.strip("Fetch:").strip("\n") <NEW_LINE> values = line.split(",") <NEW_LINE> result = dict() <NEW_LINE> for val in values: <NEW_LINE> <INDENT> tupel = val.split("=") <NEW_LINE> result[str(tupel[0])] = tupel[1] <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def cancel(self): <NEW_LINE> <INDENT> self.__canceled = True <NEW_LINE> if self.__process is not None: <NEW_LINE> <INDENT> os.kill(self.__process.pid, signal.SIGTERM) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__onCancel() <NEW_LINE> <DEDENT> <DEDENT> def __onFinish(self): <NEW_LINE> <INDENT> self.__process = None <NEW_LINE> for listener in self.__updateListeners: <NEW_LINE> <INDENT> listener.onFinish() <NEW_LINE> <DEDENT> <DEDENT> def __onCancel(self): <NEW_LINE> <INDENT> for listener in self.__updateListeners: <NEW_LINE> <INDENT> listener.onCancel() | Fetches and merges tweets as XML file(s).
Process is done by jar file started through subprocess. | 625990b4187af65679d2ac9f |
class ScoreBoard(object): <NEW_LINE> <INDENT> def __init__(self, capacity=10): <NEW_LINE> <INDENT> self._board = [None]*capacity <NEW_LINE> self._len = 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def len(self): <NEW_LINE> <INDENT> return self._len <NEW_LINE> <DEDENT> def __getitem__(self, k): <NEW_LINE> <INDENT> return self._board[k] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '\n'.join(str(self._board[j]) for j in range(self._len)) <NEW_LINE> <DEDENT> def add(self, entry): <NEW_LINE> <INDENT> score = entry.score <NEW_LINE> good = self._len < len(self._board) or score > self._board[-1].score <NEW_LINE> if good: <NEW_LINE> <INDENT> if self._len < len(self._board): <NEW_LINE> <INDENT> self._len += 1 <NEW_LINE> <DEDENT> j = self._len - 1 <NEW_LINE> while j > 0 and self._board[j-1].score < score: <NEW_LINE> <INDENT> self._board[j] = self._board[j-1] <NEW_LINE> j -= 1 <NEW_LINE> <DEDENT> self._board[j] = entry | 高分榜 | 625990b4091ae35668706da5 |
class FeatureSerializer(object): <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def deserialize(path): <NEW_LINE> <INDENT> serializedFeatures = {} <NEW_LINE> try: <NEW_LINE> <INDENT> with open(path) as file: <NEW_LINE> <INDENT> serializedFeatures = decode(file.read()) <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return serializedFeatures <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def serialize(path, features): <NEW_LINE> <INDENT> with open(path, 'w') as featuresFile: <NEW_LINE> <INDENT> featuresFile.write(json.dumps(features, cls=MyJSONEncoder)) | Serialize and deserialize features from a path | 625990b5627d3e7fe0e08fe7 |
class RetrieveAgentAppsByTagId(RetrieveAppsByTagId): <NEW_LINE> <INDENT> def __init__(self, username, customer_name, tag_id, uri=None, method=None, count=30, offset=0, sort='asc', sort_key=AgentAppsKey.Name, show_hidden=CommonKeys.NO): <NEW_LINE> <INDENT> self.count = count <NEW_LINE> self.offset = offset <NEW_LINE> self.customer_name = customer_name <NEW_LINE> self.username = username <NEW_LINE> self.uri = uri <NEW_LINE> self.method = method <NEW_LINE> self.tag_id = tag_id <NEW_LINE> self.CurrentAppsCollection = AppCollections.vFenseApps <NEW_LINE> self.CurrentAppsIndexes = AgentAppsIndexes <NEW_LINE> self.CurrentAppsPerAgentCollection = AppCollections.vFenseAppsPerAgent <NEW_LINE> self.CurrentAppsKey = AgentAppsKey <NEW_LINE> self.CurrentAppsPerAgentKey = AgentAppsPerAgentKey <NEW_LINE> self.CurrentAppsPerAgentIndexes = AgentAppsPerAgentIndexes <NEW_LINE> self.pluck_list = ( [ self.CurrentAppsKey.AppId, self.CurrentAppsKey.Version, self.CurrentAppsKey.Name, self.CurrentAppsKey.Hidden, self.CurrentAppsPerAgentKey.Update, self.CurrentAppsPerAgentKey.Dependencies, self.CurrentAppsKey.ReleaseDate, self.CurrentAppsKey.RebootRequired, self.CurrentAppsPerAgentKey.InstallDate, self.CurrentAppsPerAgentKey.Status, self.CurrentAppsKey.RvSeverity, self.CurrentAppsKey.FilesDownloadStatus, ] ) <NEW_LINE> self.map_hash = ( { self.CurrentAppsKey.AppId: r.row['right'][self.CurrentAppsKey.AppId], self.CurrentAppsKey.Version: r.row['right'][self.CurrentAppsKey.Version], self.CurrentAppsKey.Name: r.row['right'][self.CurrentAppsKey.Name], self.CurrentAppsKey.Hidden: r.row['right'][self.CurrentAppsKey.Hidden], self.CurrentAppsPerAgentKey.Update: r.row['left']['right'][self.CurrentAppsPerAgentKey.Update], self.CurrentAppsPerAgentKey.Dependencies: r.row['left']['right'][self.CurrentAppsPerAgentKey.Dependencies], self.CurrentAppsKey.ReleaseDate: r.row['right'][self.CurrentAppsKey.ReleaseDate].to_epoch_time(), self.CurrentAppsPerAgentKey.InstallDate: r.row['left']['right'][self.CurrentAppsPerAgentKey.InstallDate].to_epoch_time(), self.CurrentAppsPerAgentKey.Status: r.row['left']['right'][self.CurrentAppsPerAgentKey.Status], self.CurrentAppsKey.RvSeverity: r.row['right'][self.CurrentAppsKey.RvSeverity], self.CurrentAppsKey.RebootRequired: r.row['right'][self.CurrentAppsKey.RebootRequired], self.CurrentAppsKey.FilesDownloadStatus: r.row['right'][self.CurrentAppsKey.FilesDownloadStatus], } ) <NEW_LINE> if show_hidden in CommonAppKeys.ValidHiddenVals: <NEW_LINE> <INDENT> self.show_hidden = show_hidden <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.show_hidden = CommonKeys.NO <NEW_LINE> <DEDENT> if sort_key in self.pluck_list: <NEW_LINE> <INDENT> self.sort_key = sort_key <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.sort_key = self.CurrentAppsKey.Name <NEW_LINE> <DEDENT> if sort == 'asc': <NEW_LINE> <INDENT> self.sort = r.asc <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.sort = r.desc | This class is used to get tag data from within the Packages Page | 625990b5187af65679d2aca1 |
class VmvlancreationmodeEnum(Enum): <NEW_LINE> <INDENT> automatic = 1 <NEW_LINE> manual = 2 <NEW_LINE> @staticmethod <NEW_LINE> def _meta_info(): <NEW_LINE> <INDENT> from ydk.models.cisco_ios_xe._meta import _CISCO_VLAN_MEMBERSHIP_MIB as meta <NEW_LINE> return meta._meta_table['CiscoVlanMembershipMib.Vmmembership.VmvlancreationmodeEnum'] | VmvlancreationmodeEnum
This object is used to determine whether or not
a non\-existing VLAN will be created automatically
by the system after assigned to a port.
automatic(1)\: a non\-existing VLAN will be created
automatically by the system after
assigned to a port.
manual(2)\: a non\-existing VLAN will not be created
automatically by the system and need to be
manually created by the users after assigned
to a port.
.. data:: automatic = 1
.. data:: manual = 2 | 625990b5187af65679d2aca3 |
class Example(QtGui.QWidget): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(QtGui.QWidget, self).__init__() <NEW_LINE> self.file = "" <NEW_LINE> self.save_file = "" <NEW_LINE> self.lbl = None <NEW_LINE> self.lbl_result = None <NEW_LINE> self.pbar = None <NEW_LINE> self.btn = None <NEW_LINE> self.step = 0 <NEW_LINE> self.data_send = 0 <NEW_LINE> self.total_data = 0 <NEW_LINE> self.init_ui() <NEW_LINE> self.to_set = 0 <NEW_LINE> <DEDENT> def tempo(self): <NEW_LINE> <INDENT> step = int(self.data_send/self.total_data) <NEW_LINE> if step >= 100: <NEW_LINE> <INDENT> self.timer.stop() <NEW_LINE> return <NEW_LINE> <DEDENT> self.pbar.setValue(step) <NEW_LINE> <DEDENT> def init_ui(self): <NEW_LINE> <INDENT> self.lbl = QtGui.QLabel('No file selected', self) <NEW_LINE> self.lbl.setGeometry(0, 0, 300, 20) <NEW_LINE> self.lbl_result = QtGui.QLabel("0/0", self) <NEW_LINE> self.btn = QtGui.QPushButton("Choose file", self) <NEW_LINE> self.btn.clicked.connect(self.show_dialog) <NEW_LINE> self.btn.setGeometry(120, 100, 100, 50) <NEW_LINE> self.pbar = QtGui.QProgressBar(self) <NEW_LINE> self.timer = QtCore.QBasicTimer() <NEW_LINE> self.btn.clicked.connect(self.tempo) <NEW_LINE> self.pbar.setGeometry(10, 400, 480, 50) <NEW_LINE> self.pbar.setValue(0) <NEW_LINE> self.lbl_result.setGeometry(0, 450, 500, 50) <NEW_LINE> self.lbl_result.setAlignment(QtCore.Qt.AlignCenter) <NEW_LINE> self.setGeometry(300, 300, 500, 500) <NEW_LINE> <DEDENT> def timerEvent(self, e): <NEW_LINE> <INDENT> if self.to_set >= 100: <NEW_LINE> <INDENT> self.timer.stop() <NEW_LINE> self.lbl_result.setText("Transfer complete") <NEW_LINE> return <NEW_LINE> <DEDENT> self.to_set = int((self.data_send/self.total_data)*100) <NEW_LINE> self.pbar.setValue(self.to_set) <NEW_LINE> self.lbl_result.setText(str(self.data_send) + "/" + str(self.total_data)) <NEW_LINE> <DEDENT> def tempo(self): <NEW_LINE> <INDENT> if self.timer.isActive(): <NEW_LINE> <INDENT> self.timer.stop() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.timer.start(100, self) <NEW_LINE> <DEDENT> <DEDENT> def show_dialog(self): <NEW_LINE> <INDENT> file = QtGui.QFileDialog.getOpenFileName(QtGui.QFileDialog(), 'Open file', '/home') <NEW_LINE> save_file = QtGui.QFileDialog.getExistingDirectory(QtGui.QFileDialog(), 'Save file directory', '/home', QtGui.QFileDialog.ShowDirsOnly) <NEW_LINE> self.file = file <NEW_LINE> self.save_file = save_file <NEW_LINE> if self.file: <NEW_LINE> <INDENT> self.lbl.setText(self.file) <NEW_LINE> self.total_data = os.stat(self.file).st_size | Create a window with a label, a progress
bar and a button that open a file dialog window | 625990b5627d3e7fe0e08fed |
class Fields: <NEW_LINE> <INDENT> bottom: int <NEW_LINE> top: int <NEW_LINE> def __init__(self, txt: Text): <NEW_LINE> <INDENT> if '-' in txt: <NEW_LINE> <INDENT> self.bottom, self.top = map(int, txt.split('-')) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.bottom = self.top = int(txt) <NEW_LINE> <DEDENT> self.top += 1 <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return (n for n in range(self.bottom, self.top)) | a range of fields; '2' would be the single field 2 and '3-5' would be fields '3-5' (inclusive) | 625990b5091ae35668706daf |
class SchemaElement: <NEW_LINE> <INDENT> def __init__(self, el): <NEW_LINE> <INDENT> assert el is not None <NEW_LINE> self.el = el <NEW_LINE> self.uri = None <NEW_LINE> self.name = None <NEW_LINE> self.id = None <NEW_LINE> self.type = None <NEW_LINE> self.label = None <NEW_LINE> self.verbose_label = None <NEW_LINE> self.calcTo = [] <NEW_LINE> self.sorted = False <NEW_LINE> self.parents = [] <NEW_LINE> self.child_elements = [] <NEW_LINE> <DEDENT> def setLabel(self, role, text): <NEW_LINE> <INDENT> if role == label_role: <NEW_LINE> <INDENT> self.label = text <NEW_LINE> <DEDENT> elif role == verboseLabel_role: <NEW_LINE> <INDENT> self.verbose_label = text <NEW_LINE> <DEDENT> <DEDENT> def getLabel(self): <NEW_LINE> <INDENT> if self.verbose_label is None and self.label is None: <NEW_LINE> <INDENT> assert self.uri in ['http://www.xbrl.org/2003/instance', 'http://www.w3.org/2001/XMLSchema'] <NEW_LINE> <DEDENT> return self.name, self.label, self.verbose_label | スキーマファイルの中の項目 ( 語彙スキーマ )
| 625990b5627d3e7fe0e08ff1 |
class ProjectsService(base_api.BaseApiService): <NEW_LINE> <INDENT> _NAME = u'projects' <NEW_LINE> def __init__(self, client): <NEW_LINE> <INDENT> super(ApheleiaV1beta1.ProjectsService, self).__init__(client) <NEW_LINE> self._method_configs = { } <NEW_LINE> self._upload_configs = { } | Service class for the projects resource. | 625990b5091ae35668706db1 |
class Unserializer(PdbHandler): <NEW_LINE> <INDENT> def __init__(self, include_pats=None, exclude_pats=None, custom_steps=None): <NEW_LINE> <INDENT> PdbHandler.__init__(self) <NEW_LINE> self.projects = {} <NEW_LINE> self.first_project = None <NEW_LINE> if include_pats: <NEW_LINE> <INDENT> self.include_pats = set(include_pats) <NEW_LINE> <DEDENT> if exclude_pats: <NEW_LINE> <INDENT> self.exclude_pats = set(exclude_pats) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.exclude_pats = set([]) <NEW_LINE> <DEDENT> if custom_steps: <NEW_LINE> <INDENT> self.custom_steps = dict(custom_steps) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.custom_steps = {} <NEW_LINE> <DEDENT> <DEDENT> def as_project(self, name): <NEW_LINE> <INDENT> if not name in self.projects: <NEW_LINE> <INDENT> raise Error("unable to find " + name + " in the index file.", project_name=name) <NEW_LINE> <DEDENT> return self.projects[name] <NEW_LINE> <DEDENT> def filters(self, project_name): <NEW_LINE> <INDENT> for inc in self.include_pats: <NEW_LINE> <INDENT> inc = inc.replace('+', '\\+') <NEW_LINE> if re.match(inc, project_name): <NEW_LINE> <INDENT> for exc in self.exclude_pats: <NEW_LINE> <INDENT> if re.match(exc.replace('+', '\\+'), project_name): <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def project(self, proj_obj): <NEW_LINE> <INDENT> if (not proj_obj.name in self.projects) and self.filters(proj_obj.name): <NEW_LINE> <INDENT> if not self.first_project: <NEW_LINE> <INDENT> self.first_project = proj_obj <NEW_LINE> <DEDENT> self.projects[proj_obj.name] = proj_obj | Builds *Project* instances for every project that matches *include_pats*
and not *exclude_pats*. See *filters*() for implementation. | 625990b5c4546d3d9def8360 |
class BoundaryBase(object): <NEW_LINE> <INDENT> def __init__(self, left, right, bottom, top): <NEW_LINE> <INDENT> self.dim = None <NEW_LINE> self.boundaries = None <NEW_LINE> <DEDENT> def update_boundaries(self, particles, particles_index, neighbor_graph): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def find_boundary_particles(self, neighbor_graph, neighbors_graph_size, ghost_indices, total_ghost_indices): <NEW_LINE> <INDENT> cumsum_neighbors = neighbors_graph_size.cumsum() <NEW_LINE> border = set() <NEW_LINE> for i in ghost_indices: <NEW_LINE> <INDENT> start = cumsum_neighbors[i] - neighbors_graph_size[i] <NEW_LINE> end = cumsum_neighbors[i] <NEW_LINE> border.update(neighbor_graph[start:end]) <NEW_LINE> <DEDENT> border_tmp = set(border) <NEW_LINE> for i in border_tmp: <NEW_LINE> <INDENT> start = cumsum_neighbors[i] - neighbors_graph_size[i] <NEW_LINE> end = cumsum_neighbors[i] <NEW_LINE> border.update(neighbor_graph[start:end]) <NEW_LINE> <DEDENT> border = border.difference(total_ghost_indices) <NEW_LINE> return np.array(list(border)) <NEW_LINE> <DEDENT> def primitive_to_ghost(self, particles, primitive, particles_index): <NEW_LINE> <INDENT> ghost_map = particles_index["ghost_map"] <NEW_LINE> return np.hstack((primitive, primitive[:, np.asarray([ghost_map[i] for i in particles_index["ghost"]])])) <NEW_LINE> <DEDENT> def gradient_to_ghost(self, particles, grad, particles_index): <NEW_LINE> <INDENT> ghost_map = particles_index["ghost_map"] <NEW_LINE> new_grad = {} <NEW_LINE> for key in grad.keys(): <NEW_LINE> <INDENT> new_grad[key] = np.hstack((grad[key], grad[key][:, np.asarray([ghost_map[i] for i in particles_index["ghost"]])])) <NEW_LINE> <DEDENT> return new_grad | boundary condition base class, every boundary class must inherit
this class | 625990b5187af65679d2aca8 |
class DummyParent(UniqueRepresentation, Parent): <NEW_LINE> <INDENT> def __init__(self, name): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> <DEDENT> def _repr_(self): <NEW_LINE> <INDENT> return self.name | A class for creating dummy parents for testing ElementWrapper | 625990b5627d3e7fe0e08ff7 |
class AttentionWithContext(Layer): <NEW_LINE> <INDENT> def __init__(self, W_regularizer=None, u_regularizer=None, b_regularizer=None, W_constraint=None, u_constraint=None, b_constraint=None, bias=True, **kwargs): <NEW_LINE> <INDENT> self.supports_masking = True <NEW_LINE> self.init = glorot_uniform() <NEW_LINE> self.W_regularizer = regularizers.get(W_regularizer) <NEW_LINE> self.u_regularizer = regularizers.get(u_regularizer) <NEW_LINE> self.b_regularizer = regularizers.get(b_regularizer) <NEW_LINE> self.W_constraint = constraints.get(W_constraint) <NEW_LINE> self.u_constraint = constraints.get(u_constraint) <NEW_LINE> self.b_constraint = constraints.get(b_constraint) <NEW_LINE> self.bias = bias <NEW_LINE> super(AttentionWithContext, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def build(self, input_shape): <NEW_LINE> <INDENT> assert len(input_shape) == 3 <NEW_LINE> self.W = self.add_weight((input_shape[-1], input_shape[-1],), initializer=self.init, name='{}_W'.format(self.name), regularizer=self.W_regularizer, constraint=self.W_constraint) <NEW_LINE> if self.bias: <NEW_LINE> <INDENT> self.b = self.add_weight((input_shape[-1],), initializer='zero', name='{}_b'.format(self.name), regularizer=self.b_regularizer, constraint=self.b_constraint) <NEW_LINE> <DEDENT> self.u = self.add_weight((input_shape[-1],), initializer=self.init, name='{}_u'.format(self.name), regularizer=self.u_regularizer, constraint=self.u_constraint) <NEW_LINE> super(AttentionWithContext, self).build(input_shape) <NEW_LINE> <DEDENT> def compute_mask(self, input, input_mask=None): <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> def call(self, x, mask=None): <NEW_LINE> <INDENT> uit = K.dot(x, self.W) <NEW_LINE> if self.bias: <NEW_LINE> <INDENT> uit += self.b <NEW_LINE> <DEDENT> uit = K.tanh(uit) <NEW_LINE> ait = K.dot(uit, self.u) <NEW_LINE> a = K.exp(ait) <NEW_LINE> if mask is not None: <NEW_LINE> <INDENT> a *= K.cast(mask, K.floatx()) <NEW_LINE> <DEDENT> a /= K.cast(K.sum(a, axis=1, keepdims=True) + K.epsilon(), K.floatx()) <NEW_LINE> a = K.expand_dims(a) <NEW_LINE> weighted_input = x * a <NEW_LINE> return K.sum(weighted_input, axis=1) <NEW_LINE> <DEDENT> def get_output_shape_for(self, input_shape): <NEW_LINE> <INDENT> return input_shape[0], input_shape[-1] <NEW_LINE> <DEDENT> def compute_output_shape(self, input_shape): <NEW_LINE> <INDENT> return (input_shape[0], input_shape[-1]) | Example:
model.add(LSTM(64, return_sequences=True))
model.add(AttentionWithContext()) | 625990b5627d3e7fe0e08ff9 |
class PackageHashError(spack.error.SpackError): <NEW_LINE> <INDENT> pass | Raised for all errors encountered during package hashing. | 625990b5187af65679d2acab |
class SxS(_JSONEntry): <NEW_LINE> <INDENT> PREFIX = 'sxs' <NEW_LINE> JSON_ENCODER = AmendmentEncoder | Processes Section-by-Section analyses, keyed by sxs | 625990b5c4546d3d9def8367 |
class SpinChain: <NEW_LINE> <INDENT> def __init__( self, total_devices: int, spi_select: Optional[Tuple[int, int]] = None, spi_transfer: Optional[ Callable[[List[int]], List[int]] ] = None, ) -> None: <NEW_LINE> <INDENT> assert total_devices > 0 <NEW_LINE> assert (spi_select is None) != (spi_transfer is None), 'Either supply a SPI transfer function or use spidev\'s' <NEW_LINE> self._total_devices: Final = total_devices <NEW_LINE> if spi_transfer is not None: <NEW_LINE> <INDENT> self._spi_transfer = spi_transfer <NEW_LINE> <DEDENT> elif spi_select is not None: <NEW_LINE> <INDENT> import spidev <NEW_LINE> self._spi: Final = spidev.SpiDev() <NEW_LINE> bus, device = spi_select <NEW_LINE> self._spi.open(bus, device) <NEW_LINE> self._spi.mode = 3 <NEW_LINE> self._spi.lsbfirst = False <NEW_LINE> self._spi.max_speed_hz = 1000000 <NEW_LINE> self._spi.cshigh = False <NEW_LINE> self._spi_transfer = self._spi.xfer2 <NEW_LINE> <DEDENT> <DEDENT> def create(self, position: int) -> SpinDevice: <NEW_LINE> <INDENT> assert position >= 0 <NEW_LINE> assert position < self._total_devices <NEW_LINE> return SpinDevice( position, self._total_devices, self._spi_transfer, ) | Class for constructing a chain of SPIN devices | 625990b5187af65679d2acae |
class KW_onehot0(KeywordToken): <NEW_LINE> <INDENT> ASTOP = 'onehot0' | Expression 'OneHot0' keyword | 625990b5627d3e7fe0e09005 |
class IsOwnerOrReadOnly(permissions.BasePermission): <NEW_LINE> <INDENT> def has_object_permission(self, request, view, obj): <NEW_LINE> <INDENT> if request.method in permissions.SAFE_METHODS: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return obj.owner == request.user | Grant rw permissions if user created the snippet and r otherwise. | 625990b6187af65679d2acb1 |
class RegistrationView(MethodView): <NEW_LINE> <INDENT> def post(self): <NEW_LINE> <INDENT> user = User.query.filter_by(email=request.data['email']).first() <NEW_LINE> if not user: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> post_data = request.data <NEW_LINE> email = post_data['email'] <NEW_LINE> password = post_data['password'] <NEW_LINE> user = User(email=email, password=password) <NEW_LINE> user.save() <NEW_LINE> response = { 'message': 'you registered succcessfully. please log in' } <NEW_LINE> return make_response(jsonify(response)), 201 <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> response = { 'message': str(e) } <NEW_LINE> return make_response(jsonify(response)), 401 <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> response = { 'message': 'User already exists. Please login' } <NEW_LINE> return make_response(jsonify(response)), 202 | This class register a new user | 625990b6c4546d3d9def836b |
class IncapSession(requests.Session): <NEW_LINE> <INDENT> def get(self, url, **kwargs): <NEW_LINE> <INDENT> kwargs.setdefault('allow_redirects', True) <NEW_LINE> r = self.request('GET', url, **kwargs) <NEW_LINE> return crack(self, r) | requests.Session subclass to wrap all get requests with incapsula.crack. | 625990b6187af65679d2acb2 |
class ExcelOption(object): <NEW_LINE> <INDENT> def __init__(self,filename,sheetname=None): <NEW_LINE> <INDENT> self.filename = filename <NEW_LINE> self.sheetname = sheetname <NEW_LINE> self.wb = load_workbook(self.filename) <NEW_LINE> self.ws = self.wb.active if self.sheetname is None else self.wb[self.sheetname] <NEW_LINE> self.case_list = [] <NEW_LINE> self.Case = namedtuple( "Case" , tuple( self.ws.iter_rows( max_row = 1 , values_only = True ) )[0] ) <NEW_LINE> <DEDENT> def get_cases(self): <NEW_LINE> <INDENT> Case = tuple( self.ws.iter_rows( min_row = 2 , values_only = True ) ) <NEW_LINE> for data in Case: <NEW_LINE> <INDENT> self.case_list.append( self.Case( *data ) ) <NEW_LINE> <DEDENT> return self.case_list <NEW_LINE> <DEDENT> def get_case(self,row): <NEW_LINE> <INDENT> if isinstance(row,int) and (2 <= row <= self.ws.max_row): <NEW_LINE> <INDENT> case = tuple( self.ws.iter_rows( min_row = row , max_row = row , values_only = True ) )[0] <NEW_LINE> return self.Case( *case ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print("只能是正整数") <NEW_LINE> <DEDENT> <DEDENT> def excel_write(self,row,real_result,result): <NEW_LINE> <INDENT> self.ws.cell(row = row,column = int(GetConfig().get_config("column","act_column")),value = real_result) <NEW_LINE> self.ws.cell(row = row,column = int(GetConfig().get_config("column","res_column")),value = result) <NEW_LINE> self.wb.save(self.filename) | excel 操作类 | 625990b6187af65679d2acb3 |
class Actor(nn.Module): <NEW_LINE> <INDENT> def __init__(self, state_size, hidden_size, action_size, seed, dropout): <NEW_LINE> <INDENT> super(Actor, self).__init__() <NEW_LINE> self.seed = torch.manual_seed(seed) <NEW_LINE> self.input_layer = nn.Linear(state_size, hidden_size) <NEW_LINE> self.batchnorm_layer = nn.BatchNorm1d(hidden_size) <NEW_LINE> self.fc1 = nn.Linear(hidden_size, int(hidden_size/2)) <NEW_LINE> self.dropout_layer = nn.Dropout(p=dropout) <NEW_LINE> self.output_layer = nn.Linear(int(hidden_size/2), action_size) <NEW_LINE> self.reset_parameters() <NEW_LINE> <DEDENT> def reset_parameters(self): <NEW_LINE> <INDENT> self.input_layer.weight.data.uniform_(*hidden_init(self.input_layer)) <NEW_LINE> self.fc1.weight.data.uniform_(*hidden_init(self.fc1)) <NEW_LINE> self.output_layer.weight.data.uniform_(-3e-3, 3e-3) <NEW_LINE> <DEDENT> def forward(self, state): <NEW_LINE> <INDENT> x = F.relu(self.input_layer(state)) <NEW_LINE> x = F.relu(self.fc1(x)) <NEW_LINE> x = self.dropout_layer(x) <NEW_LINE> return torch.tanh(self.output_layer(x)) | Actor (Policy) Model. | 625990b6091ae35668706dcd |
class PropertiesTestCaseWithoutProperties(TestCase): <NEW_LINE> <INDENT> fixtures = ['lfs_shop.xml', "lfs_user.xml"] <NEW_LINE> def setUp(self): <NEW_LINE> <INDENT> self.p1 = Product.objects.create(name="Product 1", slug="product-1", price=5) <NEW_LINE> self.p2 = Product.objects.create(name="Product 2", slug="product-2", price=3) <NEW_LINE> self.p3 = Product.objects.create(name="Product 3", slug="product-3", price=1) <NEW_LINE> self.c1 = Category.objects.create(name="Category 1", slug="category-1") <NEW_LINE> self.c1.products.set([self.p1, self.p2, self.p3]) <NEW_LINE> self.c1.save() <NEW_LINE> <DEDENT> def test_get_product_filters(self): <NEW_LINE> <INDENT> f = lfs.catalog.utils.get_product_filters(self.c1, {}, None, None, None) <NEW_LINE> self.assertEqual(f, {"select_fields": [], "number_fields": []}) | Test the filter methods without added properties.
| 625990b6627d3e7fe0e0900f |
class MqlTooComplex(InvalidMqlException): <NEW_LINE> <INDENT> pass | Exception class for errors caused by overly complex queries. | 625990b6627d3e7fe0e09013 |
class CSCMatrix(COOMatrix): <NEW_LINE> <INDENT> def _build(self, num_rows, num_cols): <NEW_LINE> <INDENT> data, rows, cols = self._build_sparse(num_rows, num_cols) <NEW_LINE> srtidxs = np.lexsort((rows, cols)) <NEW_LINE> data = data[srtidxs] <NEW_LINE> rows = rows[srtidxs] <NEW_LINE> cols = cols[srtidxs] <NEW_LINE> revidxs = np.argsort(srtidxs) <NEW_LINE> metadata = self._metadata <NEW_LINE> for key, (ind1, ind2, idxs, jac_type, factor) in iteritems(metadata): <NEW_LINE> <INDENT> if idxs is None: <NEW_LINE> <INDENT> metadata[key] = (revidxs[ind1:ind2], jac_type, factor) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> metadata[key] = (revidxs[ind1:ind2][np.argsort(idxs)], jac_type, factor) <NEW_LINE> <DEDENT> <DEDENT> coo = coo_matrix((data, (rows, cols)), shape=(num_rows, num_cols)) <NEW_LINE> self._matrix = coo.tocsc() <NEW_LINE> if coo.data.size != self._matrix.data.size: <NEW_LINE> <INDENT> raise ValueError("CSC matrix data contains duplicate row/col entries. " "This would break internal indexing.") | Sparse matrix in Compressed Col Storage format. | 625990b6187af65679d2acb7 |
class Printer(SciPyPrinter): <NEW_LINE> <INDENT> import_aliases = { 'numpy': '_np', 'scipy': '_scipy', 'scipy.special': '_scipy_special', 'scipy.constants': '_scipy_constants', 'scipy.sparse': '_scipy_sparse' } <NEW_LINE> @property <NEW_LINE> def numpy_alias(self): <NEW_LINE> <INDENT> return self.import_aliases.get('numpy', 'numpy') <NEW_LINE> <DEDENT> def _module_format(self, fqn, register=True): <NEW_LINE> <INDENT> super()._module_format(fqn, register) <NEW_LINE> parts = fqn.split('.') <NEW_LINE> module = '.'.join(parts[:-1]) <NEW_LINE> try: <NEW_LINE> <INDENT> alias = self.import_aliases[module] <NEW_LINE> return str.join('.', (alias, parts[-1])) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return fqn <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def direct_imports(self): <NEW_LINE> <INDENT> return (m for m in self.module_imports if m not in self.import_aliases) <NEW_LINE> <DEDENT> @property <NEW_LINE> def aliased_imports(self): <NEW_LINE> <INDENT> for module, alias in self.import_aliases.items(): <NEW_LINE> <INDENT> if module in self.module_imports: <NEW_LINE> <INDENT> yield module, alias <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def print_ndarray(self, arr, assign_to=None): <NEW_LINE> <INDENT> arr = np.asarray(arr) <NEW_LINE> subs = dict( np=self.numpy_alias, dtype=arr.dtype, list=arr.tolist(), shape=arr.shape ) <NEW_LINE> if arr.size: <NEW_LINE> <INDENT> arr_str = "{np}.array({list}, dtype={np}.{dtype})".format(**subs) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> arr_str = "{np}.zeros({shape}, dtype={np}.{dtype})".format(**subs) <NEW_LINE> <DEDENT> if assign_to and utils.isidentifier(assign_to): <NEW_LINE> <INDENT> return '{} = {}'.format(assign_to, arr_str) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return arr_str <NEW_LINE> <DEDENT> <DEDENT> def _print(self, e): <NEW_LINE> <INDENT> if isinstance(e, var.CallableBase): <NEW_LINE> <INDENT> return self._print_CallableBase(e) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super()._print(e) <NEW_LINE> <DEDENT> <DEDENT> def _print_CallableBase(self, e): <NEW_LINE> <INDENT> args = ', '.join(self._print(arg) for arg in e.args) <NEW_LINE> name = getattr(e, 'name', None) or e.__class__.__name__ <NEW_LINE> return f'{name}({args})' <NEW_LINE> <DEDENT> def _print_invert(self, e): <NEW_LINE> <INDENT> arg = self._print(e.args[0]) <NEW_LINE> return f'~{arg}' <NEW_LINE> <DEDENT> def _print_getmaskarray(self, e): <NEW_LINE> <INDENT> arg = self._print(e.args[0]) <NEW_LINE> np = self.numpy_alias <NEW_LINE> return f'{np}.ma.getmaskarray({arg})' | sym2num sympy code printer. | 625990b6091ae35668706dd7 |
class Accoutrement(Equipment): <NEW_LINE> <INDENT> IS_EQUIPMENT = False <NEW_LINE> IS_ACCOUTREMENT = True <NEW_LINE> BUY_PRICE = 0 <NEW_LINE> SELL_PRICE = 0 <NEW_LINE> def place(self, animal): <NEW_LINE> <INDENT> for eq in animal.accoutrements: <NEW_LINE> <INDENT> if eq.NAME == self.NAME: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> return True | Things which are not equipment, but are displayed in the same way | 625990b6627d3e7fe0e09019 |
class BlackListToken(db.Model): <NEW_LINE> <INDENT> id = db.Column(db.Integer, primary_key=True, autoincrement=True) <NEW_LINE> token = db.Column(db.String(500), unique=True, nullable=False) <NEW_LINE> blacklisted_on = db.Column(db.DateTime, nullable=False) <NEW_LINE> def __init__(self, token): <NEW_LINE> <INDENT> self.token = token <NEW_LINE> self.blacklisted_on = datetime.datetime.now() <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def check_validity(auth_token): <NEW_LINE> <INDENT> is_listed = BlackListToken.query.filter_by(token=auth_token).first() <NEW_LINE> if is_listed: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | "
db to save invalid tokens | 625990b6627d3e7fe0e0901b |
@attr(shard=10) <NEW_LINE> class NewProgramHelpTest(ProgramsConfigMixin, AcceptanceTest): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(NewProgramHelpTest, self).setUp() <NEW_LINE> self.auth_page = AutoAuthPage(self.browser, staff=True) <NEW_LINE> self.program_page = DashboardPageWithPrograms(self.browser) <NEW_LINE> self.auth_page.visit() <NEW_LINE> self.set_programs_api_configuration(True) <NEW_LINE> self.program_page.visit() <NEW_LINE> <DEDENT> def test_program_create_nav_help(self): <NEW_LINE> <INDENT> self.program_page.click_new_program_button() <NEW_LINE> href = 'http://edx.readthedocs.io/projects/open-edx-building-and-running-a-course' '/en/latest/index.html' <NEW_LINE> assert_nav_help_link( test=self, page=self.program_page, href=href, ) | Test help links on a 'New Program' page | 625990b6c4546d3d9def8375 |
class EndiciaConfiguration(ModelSingleton, ModelSQL, ModelView): <NEW_LINE> <INDENT> __name__ = 'endicia.configuration' <NEW_LINE> account_id = fields.Char('Account Id') <NEW_LINE> requester_id = fields.Char('Requester Id') <NEW_LINE> passphrase = fields.Char('Passphrase') <NEW_LINE> is_test = fields.Boolean('Is Test') <NEW_LINE> @classmethod <NEW_LINE> def __setup__(cls): <NEW_LINE> <INDENT> super(EndiciaConfiguration, cls).__setup__() <NEW_LINE> cls._error_messages.update({ 'endicia_credentials_required': 'Endicia settings on endicia configuration are incomplete.', }) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def __register__(cls, module_name): <NEW_LINE> <INDENT> TableHandler = backend.get('TableHandler') <NEW_LINE> cursor = Transaction().cursor <NEW_LINE> super(EndiciaConfiguration, cls).__register__(module_name) <NEW_LINE> if backend.name() == 'postgresql': <NEW_LINE> <INDENT> cursor.execute( 'SELECT pg_typeof("account_id") ' 'FROM endicia_configuration ' 'LIMIT 1', ) <NEW_LINE> records = cursor.fetchone() <NEW_LINE> is_integer = records and records[0] == 'integer' or None <NEW_LINE> if is_integer: <NEW_LINE> <INDENT> table = TableHandler(cursor, cls, module_name) <NEW_LINE> table.alter_type('account_id', 'varchar') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get_endicia_credentials(self): <NEW_LINE> <INDENT> if not all([ self.account_id, self.requester_id, self.passphrase ]): <NEW_LINE> <INDENT> self.raise_user_error('endicia_credentials_required') <NEW_LINE> <DEDENT> return self | Configuration settings for Endicia. | 625990b6627d3e7fe0e0901d |
class PublicManager(BasePage): <NEW_LINE> <INDENT> homepage = 'xpath=>//*[@id="navUl"]/li[1]/a' <NEW_LINE> username = 'xpath=>//*[@id="userName"]' <NEW_LINE> password = 'xpath=>//*[@id="password"]' <NEW_LINE> button = 'xpath=>//*[@id="btnSubmit"]' <NEW_LINE> pre_screening = 'xpath=>//*[@id="navUl"]/li[3]/span' <NEW_LINE> fund_manager = 'xpath=>//*[@id="navUl"]/li[3]/ul/li[3]/a' <NEW_LINE> public_manager = 'xpath=>//*[@id="addNewprcdiv"]/div[1]/ul/li[2]' <NEW_LINE> key_word = 'xpath=>//*[@id="keywordSearch"]' <NEW_LINE> public_sure = 'xpath=>//*[@id="maindetermineBtn"]' <NEW_LINE> public_details = 'xpath=>//*[@id="main-grid2"]/tbody/tr/td[2]/a' <NEW_LINE> public_y1 = 'xpath=>//*[@id="lastYear"]' <NEW_LINE> rank_y1 = 'xpath=>//*[@id="indexanalysis"]/button[3]' <NEW_LINE> risk_y1 = 'xpath=>//*[@id="rankingDate2"]/button[2]' <NEW_LINE> excess_profit = 'xpath=>//*[@id="profit"]/option[2]' <NEW_LINE> bate_coefficient = 'xpath=>//*[@id="change"]/option[4]' <NEW_LINE> history_fund = 'xpath=>//*[@id="incomeUl"]/li[2]' <NEW_LINE> fund_link = 'xpath=>//*[@id="rateindicatorsCharts"]/tbody/tr[1]/td[2]/span' <NEW_LINE> def public_manager_login(self): <NEW_LINE> <INDENT> self.click(self.homepage) <NEW_LINE> time.sleep(2) <NEW_LINE> self.type(self.username, '15107045860') <NEW_LINE> time.sleep(2) <NEW_LINE> self.type(self.password, '045860') <NEW_LINE> time.sleep(2) <NEW_LINE> self.click(self.button) <NEW_LINE> time.sleep(2) <NEW_LINE> self.click(self.pre_screening) <NEW_LINE> time.sleep(2) <NEW_LINE> self.click(self.fund_manager) <NEW_LINE> time.sleep(8) <NEW_LINE> <DEDENT> def public_manager_details(self): <NEW_LINE> <INDENT> self.click(self.public_manager) <NEW_LINE> time.sleep(2) <NEW_LINE> self.type(self.key_word, '陈凯杨') <NEW_LINE> time.sleep(2) <NEW_LINE> self.click(self.public_sure) <NEW_LINE> time.sleep(8) <NEW_LINE> self.click(self.public_details) <NEW_LINE> time.sleep(5) <NEW_LINE> <DEDENT> def public_manager_head(self): <NEW_LINE> <INDENT> self.click(self.public_y1) <NEW_LINE> time.sleep(5) <NEW_LINE> <DEDENT> def public_manager_ranking(self): <NEW_LINE> <INDENT> self.click(self.rank_y1) <NEW_LINE> time.sleep(5) <NEW_LINE> <DEDENT> def public_manager_risk(self): <NEW_LINE> <INDENT> self.click(self.risk_y1) <NEW_LINE> time.sleep(5) <NEW_LINE> self.click(self.excess_profit) <NEW_LINE> time.sleep(5) <NEW_LINE> self.click(self.bate_coefficient) <NEW_LINE> time.sleep(5) <NEW_LINE> <DEDENT> def public_manager_list(self): <NEW_LINE> <INDENT> self.click(self.history_fund) <NEW_LINE> time.sleep(5) <NEW_LINE> self.click(self.fund_link) <NEW_LINE> time.sleep(5) <NEW_LINE> <DEDENT> def public_manager_info(self): <NEW_LINE> <INDENT> pass | 公募基金经理详情页 | 625990b6091ae35668706ddd |
class RandomRotate(object): <NEW_LINE> <INDENT> def __init__(self, max_deg): <NEW_LINE> <INDENT> self.max_deg = max_deg <NEW_LINE> <DEDENT> def __call__(self, sample): <NEW_LINE> <INDENT> angle = np.random.uniform(0, 1) * self.max_deg <NEW_LINE> sample = transform.rotate(sample, angle) <NEW_LINE> return sample | Rotate randomly the image in a sample. | 625990b6187af65679d2acbd |
class Stats(BaseObj): <NEW_LINE> <INDENT> enregistrer = True <NEW_LINE> def __init__(self, uptime): <NEW_LINE> <INDENT> BaseObj.__init__(self) <NEW_LINE> self.uptime = uptime <NEW_LINE> self.nb_commandes = 0 <NEW_LINE> self.tps_moy_commandes = None <NEW_LINE> self.max_commandes = DicMax(3) <NEW_LINE> self.moy_wd = 0 <NEW_LINE> self.nb_wd = 0 <NEW_LINE> self.dernier_wd = None <NEW_LINE> self.max_wd = 0 <NEW_LINE> self.nb_max_wd = 0 <NEW_LINE> <DEDENT> def __getnewargs__(self): <NEW_LINE> <INDENT> return (None, ) <NEW_LINE> <DEDENT> def surveiller_watch_dog(self, temps_actuel): <NEW_LINE> <INDENT> if self.dernier_wd: <NEW_LINE> <INDENT> diff = temps_actuel - self.dernier_wd <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.dernier_wd = temps_actuel <NEW_LINE> return <NEW_LINE> <DEDENT> self.dernier_wd = temps_actuel <NEW_LINE> if diff >= WD_TROP_GRAND: <NEW_LINE> <INDENT> self.nb_max_wd += 1 <NEW_LINE> self.max_wd = (self.max_wd * (self.nb_max_wd - 1) + diff) / self.nb_max_wd <NEW_LINE> return <NEW_LINE> <DEDENT> if self.moy_wd is not None: <NEW_LINE> <INDENT> self.moy_wd = (self.moy_wd * self.nb_wd + diff) / (self.nb_wd + 1) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.moy_wd = 0 <NEW_LINE> <DEDENT> self.nb_wd += 1 | Classe contenant les différentes statistiques du MUD.
Ces stats sont enregistrées en fichier pour être rechargées en cas de
redémarrage du module 'statistique'.
On s'assure, avant de considérer les statistiques récupérées comme
les statistiques 'actuelles' que le temps d'uptime est bien égal à celui
de la session.
Rappel : le temps d'uptime est conservé dans serveur.uptime. Cet attribut
est renseigné à la création de l'objet serveur et indique depuis quand le
MUD tourne. C'est ainsi un chiffre indépendant de tout chargement de
module. | 625990b6c4546d3d9def8378 |
class User(db.Model): <NEW_LINE> <INDENT> __tablename__ = "users" <NEW_LINE> username = db.Column(db.String(20), primary_key=True) <NEW_LINE> password = db.Column(db.Text, nullable=False) <NEW_LINE> email = db.Column(db.String(50), nullable=False) <NEW_LINE> first_name = db.Column(db.String(30), nullable=False) <NEW_LINE> last_name = db.Column(db.String(30), nullable=False) <NEW_LINE> pets = db.relationship("Pet", cascade="all,delete") <NEW_LINE> def __repr__(self): <NEW_LINE> <INDENT> s = self <NEW_LINE> return f"<User {s.username} {s.password} {s.email} {s.first_name} {s.last_name}>" <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def register(cls, username, password): <NEW_LINE> <INDENT> hashed_password = bcrypt.generate_password_hash(password) <NEW_LINE> hashed_password = hashed_password.decode("utf8") <NEW_LINE> return cls(username=username, password=hashed_password) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def login(cls, username, password): <NEW_LINE> <INDENT> user = User.query.filter_by(username=username).first() <NEW_LINE> if user and bcrypt.check_password_hash(user.password, password): <NEW_LINE> <INDENT> return user <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False | User Model | 625990b6091ae35668706de7 |
class CodeCheckRecord(models.Model): <NEW_LINE> <INDENT> user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=u'打分者', related_name='dafen_user') <NEW_LINE> to_user = models.ForeignKey(settings.AUTH_USER_MODEL, verbose_name=u'被打分者', related_name='beidafen_user') <NEW_LINE> date = models.DateField(default=timezone.now, verbose_name=u'日期') <NEW_LINE> num = models.IntegerField(default=0, verbose_name=u'打分值') <NEW_LINE> class Meta(): <NEW_LINE> <INDENT> unique_together = (('user', 'to_user', 'date'),) | 代码走查
by:王健 at:2015-4-6 | 625990b7187af65679d2acc3 |
class RichTextValue(object): <NEW_LINE> <INDENT> implements(IRichTextValue) <NEW_LINE> def __init__(self, raw=None, mimeType=None, outputMimeType=None, encoding='utf-8', output=None): <NEW_LINE> <INDENT> self._raw_holder = RawValueHolder(raw) <NEW_LINE> self._mimeType = mimeType <NEW_LINE> self._outputMimeType = outputMimeType <NEW_LINE> self._encoding = encoding <NEW_LINE> <DEDENT> @property <NEW_LINE> def raw(self): <NEW_LINE> <INDENT> return self._raw_holder.value <NEW_LINE> <DEDENT> @property <NEW_LINE> def encoding(self): <NEW_LINE> <INDENT> return self._encoding <NEW_LINE> <DEDENT> @property <NEW_LINE> def raw_encoded(self): <NEW_LINE> <INDENT> if self._raw_holder.value is None: <NEW_LINE> <INDENT> return '' <NEW_LINE> <DEDENT> happy_value = safe_unicode(self._raw_holder.value, encoding=self.encoding) <NEW_LINE> return happy_value.encode(self.encoding, 'ignore') <NEW_LINE> <DEDENT> @property <NEW_LINE> def mimeType(self): <NEW_LINE> <INDENT> return self._mimeType <NEW_LINE> <DEDENT> @property <NEW_LINE> def outputMimeType(self): <NEW_LINE> <INDENT> return self._outputMimeType <NEW_LINE> <DEDENT> @property <NEW_LINE> def output(self): <NEW_LINE> <INDENT> site = getSite() <NEW_LINE> return self.output_relative_to(site) <NEW_LINE> <DEDENT> def output_relative_to(self, context): <NEW_LINE> <INDENT> if self.mimeType == self.outputMimeType: <NEW_LINE> <INDENT> return self.raw_encoded <NEW_LINE> <DEDENT> transformer = ITransformer(context, None) <NEW_LINE> if transformer is None: <NEW_LINE> <INDENT> site = getSite() <NEW_LINE> transformer = ITransformer(site, None) <NEW_LINE> if transformer is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> return transformer(self, self.outputMimeType) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return u"RichTextValue object. (Did you mean <attribute>.raw or " u"<attribute>.output?)" <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if not isinstance(other, RichTextValue): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return vars(self) == vars(other) <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> equal = self.__eq__(other) <NEW_LINE> if equal is NotImplemented: <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> return not equal | The actual value.
Note that this is not a persistent object, to avoid a separate ZODB object
being loaded. | 625990b7091ae35668706ded |
class IRCChannel(object): <NEW_LINE> <INDENT> def __init__(self, name, topic='Telegram channel'): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.topic_by = 'Telegram' <NEW_LINE> self.topic = topic <NEW_LINE> self.clients = set() | IRC Channel handler. | 625990b7627d3e7fe0e09035 |
class Development(Common): <NEW_LINE> <INDENT> DEBUG = True <NEW_LINE> INTERNAL_IPS = [ '127.0.0.1' ] <NEW_LINE> INSTALLED_APPS = Common.INSTALLED_APPS + [ 'debug_toolbar' ]; <NEW_LINE> MIDDLEWARE = Common.MIDDLEWARE + [ 'debug_toolbar.middleware.DebugToolbarMiddleware' ] <NEW_LINE> def show_toolbar(request): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> DEBUG_TOOLBAR_CONFIG = { "SHOW_TOOLBAR_CALLBACK" : show_toolbar, } | The in-development settings and the default configuration. | 625990b7c4546d3d9def8382 |
class DartApiClient(BaseApiClient): <NEW_LINE> <INDENT> def __init__(self, content_source): <NEW_LINE> <INDENT> BaseApiClient.__init__(self, content_source=content_source) <NEW_LINE> log.debug("Creating new Dart API client...") <NEW_LINE> slumber.API.__init__(self, self.url, auth=self.auth_request_decorator, append_slash=False) <NEW_LINE> <DEDENT> @property <NEW_LINE> def auth_request_decorator(self): <NEW_LINE> <INDENT> def result(request): <NEW_LINE> <INDENT> request.headers['Authorization'] = 'Bearer {}'.format(self.content_source.o_auth_client.client_secret) <NEW_LINE> return request <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> @property <NEW_LINE> def url(self): <NEW_LINE> <INDENT> return f'{self.content_source.host_url}/corpus/api/v1/' <NEW_LINE> <DEDENT> def get_provider_courses(self): <NEW_LINE> <INDENT> request = self.collections.get() <NEW_LINE> data = request['data'] <NEW_LINE> result = [ { 'course_id': collection['uid'], 'name': collection['title'], 'org': collection['content_creator'], } for collection in data ] <NEW_LINE> return result <NEW_LINE> <DEDENT> def get_course_blocks(self, course_id): <NEW_LINE> <INDENT> request = self.collections(course_id).get() <NEW_LINE> assets_list = request['asset_uids'] <NEW_LINE> result = [] <NEW_LINE> for asset in assets_list: <NEW_LINE> <INDENT> asset_request = self.assets(asset).get() <NEW_LINE> content_type = asset_request['asset']['content_type'] <NEW_LINE> if content_type == 'vertical': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> lti_url = None <NEW_LINE> for data_source in asset_request['asset']['content_embed']: <NEW_LINE> <INDENT> if data_source['protocol'] == 'lti': <NEW_LINE> <INDENT> lti_url = data_source['data'] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not lti_url: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> title = asset_request['asset']['title'] <NEW_LINE> result.append({ 'block_id': asset, 'display_name': title, 'lti_url': lti_url, 'type': content_type, 'visible_to_staff_only': False, }) <NEW_LINE> <DEDENT> return result | API client to interact with DART. | 625990b7187af65679d2acca |
class SBBTreeHeap( object ): <NEW_LINE> <INDENT> __slots__ = ( 'root' ) <NEW_LINE> def __init__( self ): <NEW_LINE> <INDENT> self.root = None <NEW_LINE> <DEDENT> def __str__( self ): <NEW_LINE> <INDENT> return str(self.root) | A size-balanced binary tree heap.
SLOTS:
root: NoneType|TreeNode | 625990b7c4546d3d9def8389 |
class TenCrop(object): <NEW_LINE> <INDENT> def __init__(self, size, vertical_flip=False): <NEW_LINE> <INDENT> self.size = size <NEW_LINE> if isinstance(size, numbers.Number): <NEW_LINE> <INDENT> self.size = (int(size), int(size)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> assert len(size) == 2, "Please provide only two dimensions (h, w) for size." <NEW_LINE> self.size = size <NEW_LINE> <DEDENT> self.vertical_flip = vertical_flip <NEW_LINE> <DEDENT> def __call__(self, img): <NEW_LINE> <INDENT> return F.ten_crop(img, self.size, self.vertical_flip) | 将给定的 PIL Image 裁剪成四个角, 中心裁剪, 并加上这些的翻转版本(默认使用水平翻转)
.. Note::
该变换返回一个图像元组, 并且数据集返回的输入和目标的数量可能不匹配.
请参阅下面的例子来处理这个问题.
Args:
size (sequence or int): 期望输出的裁剪尺寸. 如果 size(尺寸)是 `int`` 类型的整数, 而不是像 (h, w) 这样类型的序列, 裁剪出来的图像是 (size, size) 这样的正方形的.
vertical_flip(bool): 使用垂直翻转而不是水平的方式
Example:
>>> transform = Compose([
>>> TenCrop(size), # this is a list of PIL Images
>>> Lambda(lambda crops: torch.stack([ToTensor()(crop) for crop in crops])) # returns a 4D tensor
>>> ])
>>> #In your test loop you can do the following:
>>> input, target = batch # input is a 5d tensor, target is 2d
>>> bs, ncrops, c, h, w = input.size()
>>> result = model(input.view(-1, c, h, w)) # fuse batch size and ncrops
>>> result_avg = result.view(bs, ncrops, -1).mean(1) # avg over crops | 625990b8187af65679d2acd2 |
class NoFutureImportBear(LocalBear): <NEW_LINE> <INDENT> LANGUAGES = {'Python', 'Python 2', 'Python 3'} <NEW_LINE> AUTHORS = {'The coala developers'} <NEW_LINE> AUTHORS_EMAILS = {'[email protected]'} <NEW_LINE> LICENSE = 'AGPL-3.0' <NEW_LINE> BEAR_DEPS = {PyFlakesASTBear} <NEW_LINE> def remove_future_imports(self, file, lineno, corrected_lines): <NEW_LINE> <INDENT> def handle_backslash(line, lineno, diff, corrected_lines): <NEW_LINE> <INDENT> corrected_lines.add(lineno) <NEW_LINE> semicolon_index = line.find(';') <NEW_LINE> if semicolon_index == -1: <NEW_LINE> <INDENT> if line.rstrip()[-1] == '\\': <NEW_LINE> <INDENT> next_line = file[lineno] <NEW_LINE> diff, corrected_lines = handle_backslash( next_line, lineno+1, diff, corrected_lines) <NEW_LINE> <DEDENT> diff.delete_line(lineno) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> replacement = line[semicolon_index + 1:].lstrip() <NEW_LINE> diff, corrected_lines = handle_semicolon( replacement, lineno, diff, corrected_lines) <NEW_LINE> <DEDENT> return diff, corrected_lines <NEW_LINE> <DEDENT> def handle_semicolon(line, lineno, diff, corrected_lines): <NEW_LINE> <INDENT> corrected_lines.add(lineno) <NEW_LINE> if not line.lstrip().startswith('from __future__'): <NEW_LINE> <INDENT> return diff, corrected_lines <NEW_LINE> <DEDENT> semicolon_index = line.find(';') <NEW_LINE> if semicolon_index == -1: <NEW_LINE> <INDENT> diff, corrected_lines = handle_backslash( line, lineno, diff, corrected_lines) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> replacement = line[semicolon_index + 1:].lstrip() <NEW_LINE> diff.modify_line(lineno, replacement) <NEW_LINE> if len(replacement) != 0: <NEW_LINE> <INDENT> diff, corrected_lines = handle_semicolon( replacement, lineno, diff, corrected_lines) <NEW_LINE> <DEDENT> <DEDENT> return diff, corrected_lines <NEW_LINE> <DEDENT> diff = Diff(file) <NEW_LINE> line = file[lineno - 1] <NEW_LINE> diff, corrected_lines = handle_semicolon( line, lineno, diff, corrected_lines) <NEW_LINE> return diff, corrected_lines <NEW_LINE> <DEDENT> def run(self, filename, file, dependency_results=dict() ): <NEW_LINE> <INDENT> corrected_lines = set() <NEW_LINE> for result in dependency_results.get(PyFlakesASTBear.name, []): <NEW_LINE> <INDENT> for node in result.get_nodes(result.module_scope, FutureImportation, key=lambda x: x.source.lineno): <NEW_LINE> <INDENT> lineno = node.source.lineno <NEW_LINE> if lineno not in corrected_lines: <NEW_LINE> <INDENT> corrected, corrected_lines = self.remove_future_imports( file, lineno, corrected_lines ) <NEW_LINE> yield Result.from_values( origin=self, message='Future import(s) found', file=filename, diffs={filename: corrected}, line=lineno) | NoFutureImportBear implementation.
A local bear that uses pyflakes AST to detect
use of `__future__` import in python code. | 625990b8187af65679d2acd8 |
class Queue: <NEW_LINE> <INDENT> def __init__(self, capacity): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def is_empty(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def is_full(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def enqueue(self, item): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def dequeue(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def size(self): <NEW_LINE> <INDENT> pass | Implements an link-based ,efficient first-in first-out Abstract Data Type | 625990b8627d3e7fe0e0905b |
class ConfigurableUnit(Unit, Configurable): <NEW_LINE> <INDENT> pass | A unit which can be directly configured. | 625990b8187af65679d2acdb |
class PositionBody: <NEW_LINE> <INDENT> def __init__( self, x_m, y_m, z_m): <NEW_LINE> <INDENT> self.x_m = x_m <NEW_LINE> self.y_m = y_m <NEW_LINE> self.z_m = z_m <NEW_LINE> <DEDENT> def __eq__(self, to_compare): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return (self.x_m == to_compare.x_m) and (self.y_m == to_compare.y_m) and (self.z_m == to_compare.z_m) <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> struct_repr = ", ".join([ "x_m: " + str(self.x_m), "y_m: " + str(self.y_m), "z_m: " + str(self.z_m) ]) <NEW_LINE> return f"PositionBody: [{struct_repr}]" <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def translate_from_rpc(rpcPositionBody): <NEW_LINE> <INDENT> return PositionBody( rpcPositionBody.x_m, rpcPositionBody.y_m, rpcPositionBody.z_m ) <NEW_LINE> <DEDENT> def translate_to_rpc(self, rpcPositionBody): <NEW_LINE> <INDENT> rpcPositionBody.x_m = self.x_m <NEW_LINE> rpcPositionBody.y_m = self.y_m <NEW_LINE> rpcPositionBody.z_m = self.z_m | Position type, represented in the Body (X Y Z) frame
Parameters
----------
x_m : float
X Position in metres.
y_m : float
Y Position in metres.
z_m : float
Z Position in metres. | 625990b8627d3e7fe0e09067 |
class Module(object): <NEW_LINE> <INDENT> def __init__(self, modname, filename): <NEW_LINE> <INDENT> self.modname = modname <NEW_LINE> self.label = modname <NEW_LINE> self.filename = filename <NEW_LINE> self.imports = set() <NEW_LINE> self.imported_names = () <NEW_LINE> self.unused_names = () <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"<{self.__class__.__name__}: {self.modname}>" | Node in a module dependency graph.
Packages may also be represented as Module objects.
``imports`` is a set of module names this module depends on.
``imported_names`` is a list of all names that were imported from other
modules (actually, ImportInfo objects).
``unused_names`` is a list of names that were imported, but are not used
(actually, ImportInfo objects). | 625990b8187af65679d2ace0 |
class Group(object): <NEW_LINE> <INDENT> def __init__(self, stones=None, color=None): <NEW_LINE> <INDENT> if stones is not None: <NEW_LINE> <INDENT> self.stones = set(stones) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.stones = set() <NEW_LINE> <DEDENT> self.border = set() <NEW_LINE> self.color = color <NEW_LINE> <DEDENT> def __add__(self, other): <NEW_LINE> <INDENT> if self.color != other.color: <NEW_LINE> <INDENT> raise ValueError('Only groups of same colour can be added!') <NEW_LINE> <DEDENT> grp = Group(stones=self.stones.union(other.stones)) <NEW_LINE> grp.color = self.color <NEW_LINE> grp.border = self.border.union(other.border).difference(grp.stones) <NEW_LINE> return grp <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> return len(self.stones) | Represents a group of connected stones on the board.
Attributes:
stones (set): list of all coordinates where the group has a stone
border (set): list of all fields that are adjacent to the group
For a new group empty fields must be added manually
since the group does not know about the field size
color (bool): color of the group
Property:
size (int): equal to len(self.stones), the number of stones in
the group. | 625990b8627d3e7fe0e09069 |
class CompoundStep(object): <NEW_LINE> <INDENT> def __init__(self, methods): <NEW_LINE> <INDENT> self.methods = list(methods) <NEW_LINE> self.generates_stats = any(method.generates_stats for method in self.methods) <NEW_LINE> self.stats_dtypes = [] <NEW_LINE> for method in self.methods: <NEW_LINE> <INDENT> if method.generates_stats: <NEW_LINE> <INDENT> self.stats_dtypes.extend(method.stats_dtypes) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def step(self, point): <NEW_LINE> <INDENT> if self.generates_stats: <NEW_LINE> <INDENT> states = [] <NEW_LINE> for method in self.methods: <NEW_LINE> <INDENT> if method.generates_stats: <NEW_LINE> <INDENT> point, state = method.step(point) <NEW_LINE> states.extend(state) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> point = method.step(point) <NEW_LINE> <DEDENT> <DEDENT> return point, states <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for method in self.methods: <NEW_LINE> <INDENT> point = method.step(point) <NEW_LINE> <DEDENT> return point <NEW_LINE> <DEDENT> <DEDENT> def warnings(self, strace): <NEW_LINE> <INDENT> warns = [] <NEW_LINE> for method in self.methods: <NEW_LINE> <INDENT> if hasattr(method, 'warnings'): <NEW_LINE> <INDENT> warns.extend(method.warnings(strace)) <NEW_LINE> <DEDENT> <DEDENT> return warns | Step method composed of a list of several other step methods applied in sequence. | 625990b9627d3e7fe0e0906d |
class VirtualNetworkProfile(Model): <NEW_LINE> <INDENT> _validation = { 'name': {'readonly': True}, 'type': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'subnet': {'key': 'subnet', 'type': 'str'}, } <NEW_LINE> def __init__(self, id=None, subnet=None): <NEW_LINE> <INDENT> super(VirtualNetworkProfile, self).__init__() <NEW_LINE> self.id = id <NEW_LINE> self.name = None <NEW_LINE> self.type = None <NEW_LINE> self.subnet = subnet | Specification for using a Virtual Network.
Variables are only populated by the server, and will be ignored when
sending a request.
:param id: Resource id of the Virtual Network.
:type id: str
:ivar name: Name of the Virtual Network (read-only).
:vartype name: str
:ivar type: Resource type of the Virtual Network (read-only).
:vartype type: str
:param subnet: Subnet within the Virtual Network.
:type subnet: str | 625990b9627d3e7fe0e09075 |
class TZDateTime(TypeDecorator): <NEW_LINE> <INDENT> impl = DateTime <NEW_LINE> def process_bind_param(self, value, dialect): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> if isinstance(value, basestring) or isinstance(value, str): <NEW_LINE> <INDENT> value = parser.parse(value) <NEW_LINE> <DEDENT> if isinstance(value, datetime): <NEW_LINE> <INDENT> if value.tzinfo is not None: <NEW_LINE> <INDENT> value = value.astimezone(utc) <NEW_LINE> value.replace(tzinfo=None) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return value <NEW_LINE> <DEDENT> def process_result_value(self, value, dialect): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> if isinstance(value, datetime): <NEW_LINE> <INDENT> if value.tzinfo is not None: <NEW_LINE> <INDENT> value = value.astimezone(utc) <NEW_LINE> value.replace(tzinfo=None) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return value | Coerces a tz-aware datetime object into a naive utc datetime object to be
stored in the database. If already naive, will keep it.
On return of the data will restore it as an aware object by assuming it
is UTC.
Use this instead of the standard :class:`sqlalchemy.types.DateTime`. | 625990b9c4546d3d9def83a4 |
class PySerialTransport(RFXtrxTransport): <NEW_LINE> <INDENT> def __init__(self, port, debug=False): <NEW_LINE> <INDENT> self.serial = Serial(port, 38400, timeout=0.1) <NEW_LINE> self.debug = debug <NEW_LINE> <DEDENT> def receive_blocking(self): <NEW_LINE> <INDENT> while True: <NEW_LINE> <INDENT> data = self.serial.read() <NEW_LINE> if (len(data) > 0): <NEW_LINE> <INDENT> if data == '\x00': <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> pkt = bytearray(data) <NEW_LINE> data = self.serial.read(pkt[0]) <NEW_LINE> pkt.extend(bytearray(data)) <NEW_LINE> if self.debug: <NEW_LINE> <INDENT> print("Recv: " + " ".join("0x{0:02x}".format(x) for x in pkt)) <NEW_LINE> <DEDENT> return self.parse(pkt) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def send(self, data): <NEW_LINE> <INDENT> if isinstance(data, bytearray): <NEW_LINE> <INDENT> pkt = data <NEW_LINE> <DEDENT> elif isinstance(data, str) or isinstance(data, bytes): <NEW_LINE> <INDENT> pkt = bytearray(data) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Invalid type") <NEW_LINE> <DEDENT> if self.debug: <NEW_LINE> <INDENT> print ("Send: " + " ".join("0x{0:02x}".format(x) for x in pkt)) <NEW_LINE> <DEDENT> self.serial.write(pkt) <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.send('\x0D\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00') <NEW_LINE> sleep(0.3) <NEW_LINE> self.serial.flushInput() <NEW_LINE> self.send('\x0D\x00\x00\x01\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00') <NEW_LINE> return self.receive_blocking() | Implementation of a transport using PySerial | 625990b9187af65679d2ace9 |
class CommonDotGen(DotGenLabeller): <NEW_LINE> <INDENT> def visit_GeneratedPathRef(self, node): <NEW_LINE> <INDENT> return "target: %s" % node.target.get_filename() | Manages coversion of all common nodes to dot. | 625990b9c4546d3d9def83a6 |
class CacheKey(str): <NEW_LINE> <INDENT> def __init__(self, key): <NEW_LINE> <INDENT> self._key = key <NEW_LINE> <DEDENT> if sys.version_info[0] < 3: <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return smart_bytes(self._key) <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return smart_text(self._key) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> def __str__(self): <NEW_LINE> <INDENT> return smart_text(self._key) <NEW_LINE> <DEDENT> <DEDENT> def original_key(self): <NEW_LINE> <INDENT> key = self._key.rsplit(":", 1)[1] <NEW_LINE> return key | A stub string class that we can use to check if a key was created already. | 625990b9c4546d3d9def83a7 |
class Collaboration: <NEW_LINE> <INDENT> def __init__(self, species_index, gen, individuals, fit=None): <NEW_LINE> <INDENT> self.species_index = species_index <NEW_LINE> self.gen = gen <NEW_LINE> self.individuals = individuals <NEW_LINE> self.fitness = FitnessMin() <NEW_LINE> if fit is not None: <NEW_LINE> <INDENT> self.fitness.values = fit <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self.individuals) <NEW_LINE> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self.individuals[item] | A cooperation of individuals to form a complete solution. | 625990b9187af65679d2aced |
class HomeView(TemplateView): <NEW_LINE> <INDENT> template_name = 'invest/home.html' | Query the miner.AggregateResults table to retrieve values for plotting in a bar chart | 625990b9187af65679d2acf0 |
class TestProcessFindJbossEAPRunJarVer(unittest.TestCase): <NEW_LINE> <INDENT> def test_success_case(self): <NEW_LINE> <INDENT> in_line = '1.3.6.Final-redhat-1**2018-01-18; ' '1.3.6.Final-redhat-1**2018-01-18; ' '1.3.6.Final-redhat-1**2018-01-18\n' <NEW_LINE> expected = {'version': '1.3.6.Final-redhat-1', 'date': '2018-01-18'} <NEW_LINE> self.assertEqual( eap.ProcessFindJbossEAPRunJarVer.process( ansible_result(in_line)), [expected, expected, expected]) | Test ProcessFindJbossEAPRunJarVer. | 625990bac4546d3d9def83af |
class OpenURLAction(BaseAction): <NEW_LINE> <INDENT> itype = 'is.workflow.actions.openurl' <NEW_LINE> keyword = 'open_url' | Open URL from previous action | 625990ba187af65679d2acf4 |
@parser(Specs.rhn_charsets) <NEW_LINE> class RHNCharSets(LegacyItemAccess, Parser): <NEW_LINE> <INDENT> def parse_content(self, content): <NEW_LINE> <INDENT> db_set = {} <NEW_LINE> db_backend = None <NEW_LINE> in_server = False <NEW_LINE> in_client = False <NEW_LINE> for line in content: <NEW_LINE> <INDENT> line = line.strip() <NEW_LINE> if not line or line.startswith(('----', '(', 'PARAMETER')): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> if '_encoding' in line: <NEW_LINE> <INDENT> db_backend = 'postgresql' <NEW_LINE> in_server = line.startswith('server_') <NEW_LINE> in_client = line.startswith('client_') <NEW_LINE> <DEDENT> elif db_backend == 'postgresql': <NEW_LINE> <INDENT> if in_server: <NEW_LINE> <INDENT> db_set['server_encoding'] = line <NEW_LINE> <DEDENT> elif in_client: <NEW_LINE> <INDENT> db_set['client_encoding'] = line <NEW_LINE> <DEDENT> <DEDENT> elif line.startswith('NLS_'): <NEW_LINE> <INDENT> line_splits = line.split() <NEW_LINE> if len(line_splits) == 2: <NEW_LINE> <INDENT> db_set[line_splits[0]] = line_splits[1] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.data = db_set | ==== Sample (1) embedded database ====
server_encoding
-----------------
UTF8
(1 row)
client_encoding
-----------------
UTF8
(1 row)
==== Sample (2) Oracle database ====
PARAMETER VALUE
---------------------------------
NLS_CHARACTERSET UTF8
NLS_NCHAR_CHARACTERSET UTF8
======================================
Returns a dict:
- {'server_encoding': 'UTF8','client_encoding': 'UTF8'}
- {'NLS_CHARACTERSET': 'UTF8','NLS_NCHAR_CHARACTERSET': 'UTF8'} | 625990ba627d3e7fe0e09091 |
class Corpus(object): <NEW_LINE> <INDENT> def __init__(self, dictionary=None): <NEW_LINE> <INDENT> self.dictionary = {} <NEW_LINE> self.dictionary_supplied = False <NEW_LINE> self.matrix = None <NEW_LINE> if dictionary is not None: <NEW_LINE> <INDENT> self._check_dict(dictionary) <NEW_LINE> self.dictionary = dictionary <NEW_LINE> self.dictionary_supplied = True <NEW_LINE> <DEDENT> <DEDENT> def _check_dict(self, dictionary): <NEW_LINE> <INDENT> if (np.max(list(dictionary.values())) != (len(dictionary) - 1)): <NEW_LINE> <INDENT> raise Exception('The largest id in the dictionary ' 'should be equal to its length minus one.') <NEW_LINE> <DEDENT> if np.min(list(dictionary.values())) != 0: <NEW_LINE> <INDENT> raise Exception('Dictionary ids should start at zero') <NEW_LINE> <DEDENT> <DEDENT> def fit(self, corpus, window=10, max_map_size=1000, ignore_missing=False): <NEW_LINE> <INDENT> self.matrix = construct_cooccurrence_matrix(corpus, self.dictionary, int(self.dictionary_supplied), int(window), int(ignore_missing), max_map_size) <NEW_LINE> <DEDENT> def save(self, filename): <NEW_LINE> <INDENT> with open(filename, 'wb') as savefile: <NEW_LINE> <INDENT> pickle.dump((self.dictionary, self.matrix), savefile, protocol=pickle.HIGHEST_PROTOCOL) <NEW_LINE> <DEDENT> <DEDENT> @classmethod <NEW_LINE> def load(cls, filename): <NEW_LINE> <INDENT> instance = cls() <NEW_LINE> with open(filename, 'rb') as savefile: <NEW_LINE> <INDENT> instance.dictionary, instance.matrix = pickle.load(savefile) <NEW_LINE> <DEDENT> return instance | Class for constructing a cooccurrence matrix
from a corpus.
A dictionary mapping words to ids can optionally
be supplied. If left None, it will be constructed
from the corpus. | 625990bac4546d3d9def83b2 |
class NetStatsTest(unittest.TestCase): <NEW_LINE> <INDENT> def testInterfaceStatsGood(self): <NEW_LINE> <INDENT> dm.mrvl88601_netstats.PON_STATS_DIR = 'testdata/mrvl88601_netstats/ani' <NEW_LINE> dm.mrvl88601_netstats.ETH_STATS_DIR = 'testdata/mrvl88601_netstats/uni' <NEW_LINE> eth = dm.mrvl88601_netstats.NetdevStatsMrvl88601( 'testdata/mrvl88601_netstats/uni') <NEW_LINE> self.assertEqual(eth.BroadcastPacketsReceived, 100) <NEW_LINE> self.assertEqual(eth.BroadcastPacketsSent, 101) <NEW_LINE> self.assertEqual(eth.BytesReceived, 1001) <NEW_LINE> self.assertEqual(eth.BytesSent, 1002) <NEW_LINE> self.assertEqual(eth.DiscardPacketsReceived, 0) <NEW_LINE> self.assertEqual(eth.DiscardPacketsSent, 0) <NEW_LINE> self.assertEqual(eth.ErrorsReceived, 11) <NEW_LINE> self.assertEqual(eth.ErrorsSent, 0) <NEW_LINE> self.assertEqual(eth.MulticastPacketsReceived, 103) <NEW_LINE> self.assertEqual(eth.MulticastPacketsSent, 104) <NEW_LINE> self.assertEqual(eth.PacketsReceived, 500) <NEW_LINE> self.assertEqual(eth.PacketsSent, 501) <NEW_LINE> self.assertEqual(eth.UnicastPacketsReceived, 500 - 100 - 103) <NEW_LINE> self.assertEqual(eth.UnicastPacketsSent, 501 - 101 - 104) <NEW_LINE> self.assertEqual(eth.UnknownProtoPacketsReceived, 0) <NEW_LINE> pon = dm.mrvl88601_netstats.NetdevStatsMrvl88601( 'testdata/mrvl88601_netstats/ani') <NEW_LINE> self.assertEqual(pon.BroadcastPacketsReceived, 200) <NEW_LINE> self.assertEqual(pon.BroadcastPacketsSent, 201) <NEW_LINE> self.assertEqual(pon.BytesReceived, 2001) <NEW_LINE> self.assertEqual(pon.BytesSent, 2002) <NEW_LINE> self.assertEqual(pon.DiscardPacketsReceived, 0) <NEW_LINE> self.assertEqual(pon.DiscardPacketsSent, 0) <NEW_LINE> self.assertEqual(pon.ErrorsReceived, 21) <NEW_LINE> self.assertEqual(pon.ErrorsSent, 0) <NEW_LINE> self.assertEqual(pon.MulticastPacketsReceived, 203) <NEW_LINE> self.assertEqual(pon.MulticastPacketsSent, 204) <NEW_LINE> self.assertEqual(pon.PacketsReceived, 500) <NEW_LINE> self.assertEqual(pon.PacketsSent, 500) <NEW_LINE> self.assertEqual(pon.UnicastPacketsReceived, 500 - 200 - 203) <NEW_LINE> self.assertEqual(pon.UnicastPacketsSent, 500 - 201 - 204) <NEW_LINE> self.assertEqual(eth.UnknownProtoPacketsReceived, 0) | Tests for mrvl88610_netstats.py. | 625990ba187af65679d2acf8 |
class QueueIterator(object): <NEW_LINE> <INDENT> def __init__(self, node, count): <NEW_LINE> <INDENT> self.end = node <NEW_LINE> self.count = count <NEW_LINE> <DEDENT> def __next__(self): <NEW_LINE> <INDENT> if self.count == 0: <NEW_LINE> <INDENT> raise StopIteration <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> end = self.end.value <NEW_LINE> self.end = self.end.next <NEW_LINE> self.count -= 1 <NEW_LINE> return end | QueueIterator: Iterator for LinkedQueue. | 625990ba627d3e7fe0e0909d |
class ArticleDetailSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> category = CategorySerializer(read_only=True) <NEW_LINE> author = StringRelatedField(read_only=True) <NEW_LINE> tags = StringRelatedField(many=True, read_only=True) <NEW_LINE> comment_count = serializers.IntegerField(read_only=True) <NEW_LINE> create_time = serializers.DateTimeField(format='%Y-%m-%d') <NEW_LINE> click = StringRelatedField(read_only=True) <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = Article <NEW_LINE> fields = ('id', 'create_time', 'title', 'body', 'page_view', 'author', 'category', 'tags', 'comment_count', 'click') | 查询文章详情序列化器 | 625990ba627d3e7fe0e0909f |
class PickleRepository(DiskRepository): <NEW_LINE> <INDENT> file_ending = 'pickle' <NEW_LINE> def __init__(self, storage_path): <NEW_LINE> <INDENT> DiskRepository.__init__(self, storage_path) <NEW_LINE> self.repository_types = [PickleRepository] <NEW_LINE> <DEDENT> def load(self, fp): <NEW_LINE> <INDENT> cluster = pickle.load(fp) <NEW_LINE> cluster.repository = self <NEW_LINE> return cluster <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def dump(cluster, fp): <NEW_LINE> <INDENT> pickle.dump(cluster, fp, pickle.HIGHEST_PROTOCOL) | This implementation of :py:class:`AbstractClusterRepository` stores the
cluster on the local disc using pickle. Therefore the cluster object and
all its dependencies will be saved in a pickle (binary) file.
:param str storage_path: path to the folder to store the cluster
information | 625990bac4546d3d9def83b9 |
class HealthcareProjectsLocationsDatasetsFhirStoresFhirConditionalDeleteResourceRequest(_messages.Message): <NEW_LINE> <INDENT> parent = _messages.StringField(1, required=True) <NEW_LINE> type = _messages.StringField(2, required=True) | A HealthcareProjectsLocationsDatasetsFhirStoresFhirConditionalDeleteReso
urceRequest object.
Fields:
parent: The name of the FHIR store this resource belongs to.
type: The type of the resource to update. | 625990bac4546d3d9def83ba |
class PretrainPanoSet(Dataset): <NEW_LINE> <INDENT> def __init__(self, meta_data_path, filter_func, transform = None): <NEW_LINE> <INDENT> self.path = meta_data_path <NEW_LINE> df = pd.read_csv(self.path) <NEW_LINE> data = [] <NEW_LINE> for idx, row in df.iterrows(): <NEW_LINE> <INDENT> if filter_func(row): <NEW_LINE> <INDENT> pano_path = getAbsoluteAddress(row['Pano.Img']) <NEW_LINE> target_path = getAbsoluteAddress(row['Target.Img']) <NEW_LINE> all_path = getAbsoluteAddress(row['All.Img']) <NEW_LINE> data.append(Panorama(pano_path, target_path, all_path)) <NEW_LINE> <DEDENT> <DEDENT> self.data = data <NEW_LINE> self.meta_data = df <NEW_LINE> self.transform = transform <NEW_LINE> <DEDENT> def __getitem__(self, index, doTransform=True): <NEW_LINE> <INDENT> patch = self.data[index] <NEW_LINE> input_pano = Image.open(patch.pano_path) <NEW_LINE> input_box = createImage(input_pano.size) <NEW_LINE> target_major = Image.open(patch.target_path) <NEW_LINE> target_minor = Image.open(patch.target_path) <NEW_LINE> target_major = target_major.point(lambda p: 255 if p > 50 else 0) <NEW_LINE> target_minor = target_minor.point(lambda p: 255 if p > 50 else 0) <NEW_LINE> if self.transform is not None and doTransform: <NEW_LINE> <INDENT> input_pano, input_box, target_major, target_minor = self.transform(input_pano, input_box, target_major, target_minor) <NEW_LINE> <DEDENT> input = torch.cat([input_box, input_pano], dim=0) <NEW_LINE> target = torch.cat([target_major, target_minor], dim=0) <NEW_LINE> assert set(np.unique(target)).issubset({0,1}) <NEW_LINE> return (input, target, index) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.data) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return 'Dataset: {} [size: {}]'.format(self.__class__.__name__, len(self.data)) | extended Dataset class for pytorch fore pretrain | 625990ba627d3e7fe0e090a5 |
class DenormalizedText(Mutable, types.TypeDecorator): <NEW_LINE> <INDENT> impl = types.Text <NEW_LINE> def __init__(self, coerce=int, separator=" ", **kwargs): <NEW_LINE> <INDENT> self.coerce = coerce <NEW_LINE> self.separator = separator <NEW_LINE> super(DenormalizedText, self).__init__(**kwargs) <NEW_LINE> <DEDENT> def process_bind_param(self, value, dialect): <NEW_LINE> <INDENT> if value is not None: <NEW_LINE> <INDENT> items = [str(item).strip() for item in value] <NEW_LINE> value = self.separator.join(item for item in items if item) <NEW_LINE> <DEDENT> return value <NEW_LINE> <DEDENT> def process_result_value(self, value, dialect): <NEW_LINE> <INDENT> if not value: <NEW_LINE> <INDENT> return set() <NEW_LINE> <DEDENT> return set(self.coerce(item) for item in value.split(self.separator)) <NEW_LINE> <DEDENT> def copy_value(self, value): <NEW_LINE> <INDENT> return set(value) | Stores denormalized primary keys that can be
accessed as a set.
:param coerce: coercion function that ensures correct
type is returned
:param separator: separator character
Source: https://github.com/imwilsonxu/fbone/blob/master/fbone/user/models.py#L13-L45 | 625990bac4546d3d9def83bb |
class RelatedObjectLinkMixin(object): <NEW_LINE> <INDENT> link_fields = [] <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> if self.link_fields: <NEW_LINE> <INDENT> for field_name in self.link_fields: <NEW_LINE> <INDENT> func_name = field_name + '_link' <NEW_LINE> setattr(self, func_name, self._generate_link_func(field_name)) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def _generate_link_func(self, field_name): <NEW_LINE> <INDENT> def _func(obj, *args, **kwargs): <NEW_LINE> <INDENT> related_obj = getattr(obj, field_name) <NEW_LINE> if related_obj: <NEW_LINE> <INDENT> content_type = ContentType.objects.get_for_model(related_obj.__class__) <NEW_LINE> url_name = 'admin:%s_%s_change' % (content_type.app_label, content_type.model) <NEW_LINE> url = reverse(url_name, args=[related_obj.pk]) <NEW_LINE> return format_html('<a href="{}" class="changelink">{}</a>', url, str(related_obj)) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> return _func | Generate links to related links. Add this mixin to a Django admin model. Add a 'link_fields' attribute to the admin
containing a list of related model fields and then add the attribute name with a '_link' suffix to the
list_display attribute. For Example a Student model with a 'teacher' attribute would have an Admin class like this:
class StudentAdmin(RelatedObjectLinkMixin, ...):
link_fields = ['teacher']
list_display = [
...
'teacher_link'
...
] | 625990ba627d3e7fe0e090a8 |
class PhysicalVolume: <NEW_LINE> <INDENT> def __init__(self, handle: Any) -> None: <NEW_LINE> <INDENT> self.handle = handle <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self) -> str: <NEW_LINE> <INDENT> name: bytes = lvm_pv_get_name(self.handle) <NEW_LINE> return name.decode('ascii') <NEW_LINE> <DEDENT> @property <NEW_LINE> def uuid(self) -> str: <NEW_LINE> <INDENT> uuid: bytes = lvm_pv_get_uuid(self.handle) <NEW_LINE> return uuid.decode('ascii') <NEW_LINE> <DEDENT> @property <NEW_LINE> def mda_count(self) -> int: <NEW_LINE> <INDENT> return lvm_pv_get_mda_count(self.handle) <NEW_LINE> <DEDENT> @property <NEW_LINE> def dev_size(self) -> int: <NEW_LINE> <INDENT> return lvm_pv_get_dev_size(self.handle) <NEW_LINE> <DEDENT> @property <NEW_LINE> def size(self) -> int: <NEW_LINE> <INDENT> return lvm_pv_get_size(self.handle) <NEW_LINE> <DEDENT> @property <NEW_LINE> def free(self) -> int: <NEW_LINE> <INDENT> return lvm_pv_get_free(self.handle) | A physical volume | 625990bbc4546d3d9def83be |
class SummarizeDeNovoCountsStepPart(FilterDeNovosBaseStepPart): <NEW_LINE> <INDENT> name = "summarize_counts" <NEW_LINE> @listify <NEW_LINE> def get_input_files(self, action): <NEW_LINE> <INDENT> _ = action <NEW_LINE> name_pattern = "{mapper}.{caller}.%sde_novos_hard.{index_library}" % (self.prev_token,) <NEW_LINE> tpl = "work/" + name_pattern + "/out/" + name_pattern + ".summary.txt" <NEW_LINE> for sheet in filter(is_not_background, self.parent.shortcut_sheets): <NEW_LINE> <INDENT> for pedigree in sheet.cohort.pedigrees: <NEW_LINE> <INDENT> for donor in pedigree.donors: <NEW_LINE> <INDENT> if not donor.dna_ngs_library: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif not donor.father or not donor.father.dna_ngs_library: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> elif not donor.mother or not donor.mother.dna_ngs_library: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> for caller in self.config["tools_variant_calling"]: <NEW_LINE> <INDENT> yield tpl.format( mapper="{mapper}", caller=caller, index_library=donor.dna_ngs_library.name, ) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> <DEDENT> @dictify <NEW_LINE> def get_output_files(self, action): <NEW_LINE> <INDENT> assert action == "run" <NEW_LINE> yield "txt", "work/{mapper}.denovo_count_summary/out/{mapper}.denovo_count_summary.txt" <NEW_LINE> yield "txt_md5", ( "work/{mapper}.denovo_count_summary/out/{mapper}.denovo_count_summary.txt.md5" ) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def get_log_file(action): <NEW_LINE> <INDENT> assert action == "run" <NEW_LINE> return "work/{mapper}.denovo_count_summary/log/{mapper}.denovo_count_summary.log" | Step part for creating summary counts. | 625990bb627d3e7fe0e090b0 |
class memoized(object): <NEW_LINE> <INDENT> def __init__(self, func): <NEW_LINE> <INDENT> self.func = func <NEW_LINE> self.cache = {} <NEW_LINE> <DEDENT> def __call__(self, *args): <NEW_LINE> <INDENT> args_key = repr(args) <NEW_LINE> if args_key in self.cache: <NEW_LINE> <INDENT> return self.cache[args_key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> value = self.func(*args) <NEW_LINE> self.cache[args_key] = value <NEW_LINE> return value <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.func.__doc__ <NEW_LINE> <DEDENT> def __get__(self, obj, objtype): <NEW_LINE> <INDENT> return functools.partial(self.__call__, obj) | Decorator. Caches a function's return value each time it is called.
If called later with the same arguments, the cached value is returned
(not reevaluated). | 625990bb187af65679d2ad04 |
class Request: <NEW_LINE> <INDENT> data = {} <NEW_LINE> def __init__(self, module): <NEW_LINE> <INDENT> self.module = module <NEW_LINE> <DEDENT> def setData(self, name, value): <NEW_LINE> <INDENT> self.data[name] = value <NEW_LINE> <DEDENT> def setDataFromFile(self, name, file): <NEW_LINE> <INDENT> self.data[name] = base64.standard_b64encode( file.read()).decode('ascii') <NEW_LINE> file.close() <NEW_LINE> <DEDENT> def removeData(self, name): <NEW_LINE> <INDENT> del self.data[name] <NEW_LINE> <DEDENT> def clearData(self): <NEW_LINE> <INDENT> self.data.clear() | Request class used for prepared requests | 625990bb627d3e7fe0e090b2 |
class ChunkedDownload(_helpers.RequestsMixin, _download.ChunkedDownload): <NEW_LINE> <INDENT> def consume_next_chunk(self, transport): <NEW_LINE> <INDENT> method, url, payload, headers = self._prepare_request() <NEW_LINE> result = _helpers.http_request( transport, method, url, data=payload, headers=headers, retry_strategy=self._retry_strategy) <NEW_LINE> self._process_response(result) <NEW_LINE> return result | Download a resource in chunks from a Google API.
Args:
media_url (str): The URL containing the media to be downloaded.
chunk_size (int): The number of bytes to be retrieved in each
request.
stream (IO[bytes]): A write-able stream (i.e. file-like object) that
will be used to concatenate chunks of the resource as they are
downloaded.
start (int): The first byte in a range to be downloaded. If not
provided, defaults to ``0``.
end (int): The last byte in a range to be downloaded. If not
provided, will download to the end of the media.
headers (Optional[Mapping[str, str]]): Extra headers that should
be sent with each request, e.g. headers for data encryption
key headers.
Attributes:
media_url (str): The URL containing the media to be downloaded.
start (Optional[int]): The first byte in a range to be downloaded.
end (Optional[int]): The last byte in a range to be downloaded.
chunk_size (int): The number of bytes to be retrieved in each request.
Raises:
ValueError: If ``start`` is negative. | 625990bb187af65679d2ad05 |
class EncoderNet2d(_EncoderNetNd): <NEW_LINE> <INDENT> def __init__(self, channel, layers, kernel_size=3, in_planes=1, out_length=2): <NEW_LINE> <INDENT> super().__init__(2, channel=channel, layers=layers, kernel_size=kernel_size, in_planes=in_planes, out_length=out_length) | 2D convolutional down-scale (encoder) network.
This moule is a built-in model for convolutional network. The network
could be used for down-scaling or classification.
The network would down-sample and the input data according to the network
depth. The depth is given by the length of the argument "layers". | 625990bb187af65679d2ad08 |