code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class PackageTree: <NEW_LINE> <INDENT> from bisect import bisect_left <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.root = Package('') <NEW_LINE> self.packages = {'': self.root} <NEW_LINE> self.modules = [] <NEW_LINE> self.mod_dict = {} <NEW_LINE> self.cat_dict = {} <NEW_LINE> <DEDENT> def addModule(self, module): <NEW_LINE> <INDENT> self.getPackage(module.pckg_fqn).addModule(module) <NEW_LINE> insert_pos = self.bisect_left(self.modules, module) <NEW_LINE> self.modules.insert(insert_pos, module) <NEW_LINE> self.mod_dict[module.fqn] = module <NEW_LINE> if hasattr(module, 'cat_dict'): <NEW_LINE> <INDENT> self.addCatDict(module.cat_dict) <NEW_LINE> <DEDENT> <DEDENT> def getPackage(self, fqn): <NEW_LINE> <INDENT> package = self.packages.get(fqn) <NEW_LINE> if not package: <NEW_LINE> <INDENT> parent_fqn, sep, name = fqn.rpartition('.') <NEW_LINE> parentPackage = self.getPackage(parent_fqn) <NEW_LINE> package = Package(fqn) <NEW_LINE> parentPackage.addPackage(package) <NEW_LINE> self.packages[fqn] = package <NEW_LINE> <DEDENT> return package <NEW_LINE> <DEDENT> def sortTree(self): self.sort(self.root) <NEW_LINE> def sort(self, pckg): <NEW_LINE> <INDENT> pckg.packages.sort() <NEW_LINE> pckg.modules.sort() <NEW_LINE> for subpckg in pckg.packages: <NEW_LINE> <INDENT> self.sort(subpckg) <NEW_LINE> <DEDENT> <DEDENT> def addCatDict(self, cat_dict): <NEW_LINE> <INDENT> for kind, symbol_list in cat_dict.iteritems(): <NEW_LINE> <INDENT> self.cat_dict.setdefault(kind, []).extend(symbol_list) <NEW_LINE> <DEDENT> <DEDENT> def sortCatDict(self): <NEW_LINE> <INDENT> map(list.sort, self.cat_dict.itervalues()) <NEW_LINE> <DEDENT> def listSymbols(self, kinds): <NEW_LINE> <INDENT> syms = [] <NEW_LINE> for kind in kinds: <NEW_LINE> <INDENT> if kind in self.cat_dict: <NEW_LINE> <INDENT> syms.extend(self.cat_dict[kind]) <NEW_LINE> <DEDENT> <DEDENT> syms.sort() <NEW_LINE> return syms <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def symbolsByLetter(cls, symbols): <NEW_LINE> <INDENT> letter_dict = {} <NEW_LINE> for sym in symbols: <NEW_LINE> <INDENT> initial_letter = sym.name[0].upper() <NEW_LINE> letter_dict.setdefault(initial_letter, []).append(sym) <NEW_LINE> <DEDENT> letter_list = letter_dict.keys() <NEW_LINE> letter_list.sort(key=unicode.lower) <NEW_LINE> return letter_dict, letter_list | Represents a tree of all packages and modules in a project. | 625990a0c4546d3d9def820a |
class AuthorDetailView(LoginRequiredMixin, generic.DetailView): <NEW_LINE> <INDENT> model = Author | Generic class-based detail view for an author. | 625990a050812a4eaa621b36 |
class String(Unit): <NEW_LINE> <INDENT> cls = MEASURE <NEW_LINE> def __init__(self, fixlen=None, encoding=None): <NEW_LINE> <INDENT> if fixlen is None and encoding is None: <NEW_LINE> <INDENT> self.fixlen = None <NEW_LINE> self.encoding = u'U8' <NEW_LINE> <DEDENT> elif isinstance(fixlen, _inttypes + (IntegerConstant,)) and encoding is None: <NEW_LINE> <INDENT> if isinstance(fixlen, IntegerConstant): <NEW_LINE> <INDENT> self.fixlen = fixlen.val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fixlen = fixlen <NEW_LINE> <DEDENT> self.encoding = u'U8' <NEW_LINE> <DEDENT> elif isinstance(fixlen, _strtypes + (StringConstant,)) and encoding is None: <NEW_LINE> <INDENT> self.fixlen = None <NEW_LINE> if isinstance(fixlen, StringConstant): <NEW_LINE> <INDENT> self.encoding = fixlen.val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.encoding = unicode(fixlen) <NEW_LINE> <DEDENT> <DEDENT> elif isinstance(fixlen, _inttypes + (IntegerConstant,)) and isinstance(encoding, _strtypes + (StringConstant,)): <NEW_LINE> <INDENT> if isinstance(fixlen, IntegerConstant): <NEW_LINE> <INDENT> self.fixlen = fixlen.val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.fixlen = fixlen <NEW_LINE> <DEDENT> if isinstance(encoding, StringConstant): <NEW_LINE> <INDENT> self.encoding = encoding.val <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.encoding = unicode(encoding) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError(('Unexpected types to String constructor ' '(%s, %s)') % (type(fixlen), type(encoding))) <NEW_LINE> <DEDENT> if not self.encoding in _canonical_string_encodings: <NEW_LINE> <INDENT> raise ValueError('Unsupported string encoding %s' % repr(self.encoding)) <NEW_LINE> <DEDENT> self.encoding = _canonical_string_encodings[self.encoding] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> if self.fixlen is None and self.encoding == 'U8': <NEW_LINE> <INDENT> return 'string' <NEW_LINE> <DEDENT> elif self.fixlen is not None and self.encoding == 'U8': <NEW_LINE> <INDENT> return 'string(%i)' % self.fixlen <NEW_LINE> <DEDENT> elif self.fixlen is None and self.encoding != 'U8': <NEW_LINE> <INDENT> return 'string(%s)' % repr(self.encoding).strip('u') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 'string(%i, %s)' % (self.fixlen, repr(self.encoding).strip('u')) <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return ''.join(["ctype(\"", str(self).encode('unicode_escape').decode('ascii'), "\")"]) <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> if type(other) is String: <NEW_LINE> <INDENT> return self.fixlen == other.fixlen and self.encoding == other.encoding <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return False <NEW_LINE> <DEDENT> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return hash((self.fixlen, self.encoding)) | String container | 625990a0091ae35668706b0f |
class CoCoContextPolicy(Policy): <NEW_LINE> <INDENT> def __init__( self, priority: int = FORM_POLICY_PRIORITY, ) -> None: <NEW_LINE> <INDENT> super().__init__( priority=priority ) <NEW_LINE> <DEDENT> def train( self, training_trackers: List[DialogueStateTracker], domain: Domain, **kwargs: Any, ) -> None: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def predict_action_probabilities( self, tracker: DialogueStateTracker, domain: Domain ) -> List[float]: <NEW_LINE> <INDENT> prediction = [0.0] * domain.num_actions <NEW_LINE> active_component = tracker.active_form.get("name") <NEW_LINE> if tracker.latest_action_name == ACTION_LISTEN_NAME: <NEW_LINE> <INDENT> if active_component: <NEW_LINE> <INDENT> idx = domain.index_for_action(active_component) <NEW_LINE> if idx is None: <NEW_LINE> <INDENT> warnings.warn( "MappingPolicy tried to predict unknown " f"action '{active_component}'. Make sure all mapped actions are " "listed in the domain." ) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> prediction[idx] = 1 <NEW_LINE> <DEDENT> <DEDENT> if any(prediction): <NEW_LINE> <INDENT> logger.debug( "Continue component exec" " '{}' in the domain." "".format(active_component) ) <NEW_LINE> <DEDENT> <DEDENT> elif tracker.latest_action_name == active_component and active_component is not None: <NEW_LINE> <INDENT> latest_action = tracker.get_last_event_for(ActionExecuted) <NEW_LINE> assert latest_action.action_name == active_component <NEW_LINE> if latest_action.policy and latest_action.policy.endswith( type(self).__name__ ): <NEW_LINE> <INDENT> logger.debug( "The mapped action, '{}', for this intent, '{}', was " "executed last so MappingPolicy is returning to " "action_listen.".format(active_component, "") ) <NEW_LINE> idx = domain.index_for_action(ACTION_LISTEN_NAME) <NEW_LINE> prediction[idx] = 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> logger.debug( "The mapped action, '{}', for this intent, '{}', was " "executed last, but it was predicted by another policy, '{}', so MappingPolicy is not" "predicting any action.".format( active_component, "", latest_action.policy ) ) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> logger.debug( "There is no mapped action for the predicted intent, " "'{}'.".format("") ) <NEW_LINE> <DEDENT> return prediction <NEW_LINE> <DEDENT> def persist(self, path: Text) -> None: <NEW_LINE> <INDENT> config_file = os.path.join(path, "coco_context_policy.json") <NEW_LINE> meta = {"priority": self.priority} <NEW_LINE> rasa.utils.io.create_directory_for_file(config_file) <NEW_LINE> rasa.utils.io.dump_obj_as_json_to_file(config_file, meta) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def load(cls, path: Text) -> "CoCoContextPolicy": <NEW_LINE> <INDENT> meta = {} <NEW_LINE> if os.path.exists(path): <NEW_LINE> <INDENT> meta_path = os.path.join(path, "coco_context_policy.json") <NEW_LINE> if os.path.isfile(meta_path): <NEW_LINE> <INDENT> meta = json.loads(rasa.utils.io.read_file(meta_path)) <NEW_LINE> <DEDENT> <DEDENT> return cls(**meta) | Maintains CoCo multi-turn session by keeping active action mapped
to custom CoCo action while the Form set by CoCo to maintain the session
is active. | 625990a0d8ef3951e32c8dcb |
class Waypoint(nmeaSentence): <NEW_LINE> <INDENT> def __init__ (self, payload): <NEW_LINE> <INDENT> nmeaSentence.__init__ (self, payload) <NEW_LINE> p = payload.split (',') <NEW_LINE> self.id = p[0] <NEW_LINE> self.name = p[1] <NEW_LINE> self._mutable = False <NEW_LINE> return <NEW_LINE> <DEDENT> def defineName (self, name): <NEW_LINE> <INDENT> if name is not None: <NEW_LINE> <INDENT> payload = self.id + "," + name <NEW_LINE> result = Waypoint(payload) <NEW_LINE> pass <NEW_LINE> <DEDENT> else: result = self <NEW_LINE> return result <NEW_LINE> <DEDENT> pass | Define a waypoint maker.
The waypoint marker in the GPS data stream. It is defined to be:
PDIYWP,{name}
where the {name} is user defined. | 625990a0187af65679d2ab5a |
class CommandList(command.BaseCommand): <NEW_LINE> <INDENT> NAME = "list" <NEW_LINE> HELP = "List metrics." <NEW_LINE> def add_arguments(self, parser): <NEW_LINE> <INDENT> parser.add_argument( "glob", help="One metric name or globbing on metrics names" ) <NEW_LINE> <DEDENT> def run(self, accessor, opts): <NEW_LINE> <INDENT> accessor.connect() <NEW_LINE> for directory in accessor.glob_directory_names(opts.glob): <NEW_LINE> <INDENT> print("d %s" % directory) <NEW_LINE> <DEDENT> for metric in list_metrics(accessor, opts.glob): <NEW_LINE> <INDENT> if metric: <NEW_LINE> <INDENT> print("m %s %s" % (metric.name, metric.metadata.as_string_dict())) | List for metrics. | 625990a050812a4eaa621b39 |
class SplashScreen(QSplashScreen): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> ericPic = QPixmap(os.path.join(getConfig('ericPixDir'), 'ericSplash.png')) <NEW_LINE> self.labelAlignment = Qt.Alignment(Qt.AlignBottom | Qt.AlignRight | Qt.AlignAbsolute) <NEW_LINE> QSplashScreen.__init__(self, ericPic) <NEW_LINE> self.show() <NEW_LINE> QApplication.flush() <NEW_LINE> <DEDENT> def showMessage(self, msg): <NEW_LINE> <INDENT> logging.debug(unicode(msg)) <NEW_LINE> QSplashScreen.showMessage(self, msg, self.labelAlignment, QColor(Qt.white)) <NEW_LINE> QApplication.processEvents() <NEW_LINE> <DEDENT> def clearMessage(self): <NEW_LINE> <INDENT> QSplashScreen.clearMessage(self) <NEW_LINE> QApplication.processEvents() | Class implementing a splashscreen for eric4. | 625990a1adb09d7d5dc0c440 |
class Start_State(StateBase): <NEW_LINE> <INDENT> def __init__(self, obj_func): <NEW_LINE> <INDENT> StateBase.__init__(self, STATE_NAME_L[VD_READY], obj_func) | VD Task start state | 625990a150812a4eaa621b3a |
class ExpressRoutePortListResult(msrest.serialization.Model): <NEW_LINE> <INDENT> _attribute_map = { 'value': {'key': 'value', 'type': '[ExpressRoutePort]'}, 'next_link': {'key': 'nextLink', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, value: Optional[List["ExpressRoutePort"]] = None, next_link: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(ExpressRoutePortListResult, self).__init__(**kwargs) <NEW_LINE> self.value = value <NEW_LINE> self.next_link = next_link | Response for ListExpressRoutePorts API service call.
:param value: A list of ExpressRoutePort resources.
:type value: list[~azure.mgmt.network.v2019_04_01.models.ExpressRoutePort]
:param next_link: The URL to get the next set of results.
:type next_link: str | 625990a1187af65679d2ab5d |
class UrlTestCases(LiveServerTestCase): <NEW_LINE> <INDENT> def validate_url(self, url, status_code=200, find_str=None): <NEW_LINE> <INDENT> resp = Client().get(url) <NEW_LINE> self.assertEquals(resp.status_code, status_code, "%s (check status code)" % url) <NEW_LINE> if find_str is not None: <NEW_LINE> <INDENT> self.assertTrue(find_str in resp.content, "%s (check content)" % url) <NEW_LINE> <DEDENT> <DEDENT> def test_urls(self): <NEW_LINE> <INDENT> self.validate_url('/') <NEW_LINE> self.validate_url('/accounts/login/') <NEW_LINE> self.validate_url('/accounts/register/') | Walk through a set of URLs, and validate very basic properties (status code, some text)
A good test to weed out untested view/template errors | 625990a1099cdd3c6367636f |
class TestSubClass(object): <NEW_LINE> <INDENT> name = 'testsub' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.value = 1 | test for mkdoc | 625990a1d8ef3951e32c8dd2 |
class logged_in(object): <NEW_LINE> <INDENT> def __call__(self, method): <NEW_LINE> <INDENT> @functools.wraps(method) <NEW_LINE> def wrapper(self, *args, **kwargs): <NEW_LINE> <INDENT> if self.user is None: <NEW_LINE> <INDENT> redirect_url = self.url_for("userbase.login", force_external=True) <NEW_LINE> came_from = urllib.quote_plus(self.request.url) <NEW_LINE> self.flash(self._('Please log in.'), category="danger") <NEW_LINE> return redirect('%s?came_from=%s' %(redirect_url, came_from)) <NEW_LINE> <DEDENT> return method(self, *args, **kwargs) <NEW_LINE> <DEDENT> return wrapper | check if a valid user is present | 625990a150812a4eaa621b3f |
class Wall(Shape): <NEW_LINE> <INDENT> def __init__(self, shape, particle_type, color, material, quality, box_l, rasterize_resolution, rasterize_pointsize): <NEW_LINE> <INDENT> super().__init__(shape, particle_type, color, material, quality, box_l, rasterize_resolution, rasterize_pointsize) <NEW_LINE> self.distance = self.shape.get_parameter('dist') <NEW_LINE> self.normal = self.shape.get_parameter('normal') <NEW_LINE> self.box_diag = np.linalg.norm(self.box_l) <NEW_LINE> self.edges = self._edges_from_pn(self.distance * np.array(self.normal), self.normal, 2 * self.box_diag) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def _get_tangents(n): <NEW_LINE> <INDENT> n = np.array(n) <NEW_LINE> v1 = np.random.randn(3) <NEW_LINE> v1 -= v1.dot(n) * n / np.linalg.norm(n)**2 <NEW_LINE> v2 = np.cross(n, v1) <NEW_LINE> v1 /= np.linalg.norm(v1) <NEW_LINE> v2 /= np.linalg.norm(v2) <NEW_LINE> return v1, v2 <NEW_LINE> <DEDENT> def _edges_from_pn(self, p, n, diag): <NEW_LINE> <INDENT> v1, v2 = self._get_tangents(n) <NEW_LINE> edges = [p + diag * v1, p + diag * v2, p - diag * v1, p - diag * v2] <NEW_LINE> return edges <NEW_LINE> <DEDENT> def draw(self): <NEW_LINE> <INDENT> draw_plane(self.edges, self.color, self.material) | Drawable Shape Wall. | 625990a150812a4eaa621b40 |
class DescribeDDoSNetEvListResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Business = None <NEW_LINE> self.Id = None <NEW_LINE> self.StartTime = None <NEW_LINE> self.EndTime = None <NEW_LINE> self.Data = None <NEW_LINE> self.Total = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Business = params.get("Business") <NEW_LINE> self.Id = params.get("Id") <NEW_LINE> self.StartTime = params.get("StartTime") <NEW_LINE> self.EndTime = params.get("EndTime") <NEW_LINE> if params.get("Data") is not None: <NEW_LINE> <INDENT> self.Data = [] <NEW_LINE> for item in params.get("Data"): <NEW_LINE> <INDENT> obj = DDoSEventRecord() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Data.append(obj) <NEW_LINE> <DEDENT> <DEDENT> self.Total = params.get("Total") <NEW_LINE> self.RequestId = params.get("RequestId") | DescribeDDoSNetEvList response structure.
| 625990a1adb09d7d5dc0c44e |
class QueryMultiCheckboxField(QuerySelectMultipleField): <NEW_LINE> <INDENT> widget = PassthroughListWidget(prefix_label=False) <NEW_LINE> option_widget = widgets.CheckboxInput() | `MultiCheckboxField` for SQLAlchemy queries. | 625990a150812a4eaa621b42 |
@register_resource <NEW_LINE> class v1_Scale(Resource): <NEW_LINE> <INDENT> __kind__ = 'v1.Scale' <NEW_LINE> __fields__ = { 'api_version': 'apiVersion', 'kind': 'kind', 'metadata': 'metadata', 'spec': 'spec', 'status': 'status', } <NEW_LINE> __types__ = { 'metadata': 'v1.ObjectMeta', 'spec': 'v1.ScaleSpec', 'status': 'v1.ScaleStatus', } <NEW_LINE> __required__ = set() <NEW_LINE> api_version = None <NEW_LINE> kind = None <NEW_LINE> metadata = None <NEW_LINE> spec = None <NEW_LINE> status = None <NEW_LINE> def __init__(self, **_kwargs_): <NEW_LINE> <INDENT> self.kind = 'Scale' <NEW_LINE> self.api_version = 'v1' <NEW_LINE> super().__init__(**_kwargs_) | Scale represents a scaling request for a resource. | 625990a1091ae35668706b27 |
class Operator_ToggleConsole(Operator): <NEW_LINE> <INDENT> bl_idname = "object.toggle_console" <NEW_LINE> bl_label = "Toggle console" <NEW_LINE> def execute(self, context): <NEW_LINE> <INDENT> bpy.ops.wm.console_toggle() <NEW_LINE> return {'FINISHED'} | Show or hide the console | 625990a1091ae35668706b29 |
class OfficeParser(Parser): <NEW_LINE> <INDENT> def parse(self, document_page, descriptor=None): <NEW_LINE> <INDENT> logger.debug('executing') <NEW_LINE> try: <NEW_LINE> <INDENT> office_converter = OfficeConverter() <NEW_LINE> document_file = document_page.document.document_save_to_temp_dir(document_page.document.checksum) <NEW_LINE> logger.debug('document_file: %s', document_file) <NEW_LINE> office_converter.convert(document_file, mimetype=document_page.document.file_mimetype) <NEW_LINE> if office_converter.exists: <NEW_LINE> <INDENT> input_filepath = office_converter.output_filepath <NEW_LINE> logger.debug('office_converter.output_filepath: %s', input_filepath) <NEW_LINE> parse_document_page(document_page, descriptor=open(input_filepath), mimetype='application/pdf') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ParserError <NEW_LINE> <DEDENT> <DEDENT> except OfficeConversionError as exception: <NEW_LINE> <INDENT> logger.error(exception) <NEW_LINE> raise ParserError | Parser for office document formats | 625990a150812a4eaa621b44 |
class GCMTimeoutError(GCMServerError): <NEW_LINE> <INDENT> code = 'Unavailable' <NEW_LINE> description = 'Timeout' | Exception for server timeout. | 625990a150812a4eaa621b45 |
@gin.register <NEW_LINE> class F0LdEvaluator(BaseEvaluator): <NEW_LINE> <INDENT> def __init__(self, sample_rate, frame_rate, run_f0_crepe=True): <NEW_LINE> <INDENT> super().__init__(sample_rate, frame_rate) <NEW_LINE> self._loudness_metrics = metrics.LoudnessMetrics( sample_rate=sample_rate, frame_rate=frame_rate) <NEW_LINE> self._f0_metrics = metrics.F0Metrics( sample_rate=sample_rate, frame_rate=frame_rate) <NEW_LINE> self._run_f0_crepe = run_f0_crepe <NEW_LINE> if self._run_f0_crepe: <NEW_LINE> <INDENT> self._f0_crepe_metrics = metrics.F0CrepeMetrics( sample_rate=sample_rate, frame_rate=frame_rate) <NEW_LINE> <DEDENT> <DEDENT> def evaluate(self, batch, outputs, losses): <NEW_LINE> <INDENT> del losses <NEW_LINE> audio_gen = outputs['audio_gen'] <NEW_LINE> self._loudness_metrics.update_state(batch, audio_gen) <NEW_LINE> if 'f0_hz' in outputs and 'f0_hz' in batch: <NEW_LINE> <INDENT> self._f0_metrics.update_state(batch, outputs['f0_hz']) <NEW_LINE> <DEDENT> elif self._run_f0_crepe: <NEW_LINE> <INDENT> self._f0_crepe_metrics.update_state(batch, audio_gen) <NEW_LINE> <DEDENT> <DEDENT> def sample(self, batch, outputs, step): <NEW_LINE> <INDENT> if 'f0_hz' in outputs and 'f0_hz' in batch: <NEW_LINE> <INDENT> summaries.f0_summary(batch['f0_hz'], outputs['f0_hz'], step, name='f0_harmonic') <NEW_LINE> <DEDENT> <DEDENT> def flush(self, step): <NEW_LINE> <INDENT> self._loudness_metrics.flush(step) <NEW_LINE> self._f0_metrics.flush(step) <NEW_LINE> if self._run_f0_crepe: <NEW_LINE> <INDENT> self._f0_crepe_metrics.flush(step) | Computes F0 and loudness metrics. | 625990a1c4546d3d9def821b |
class TransitLineProxy(): <NEW_LINE> <INDENT> DEFAULT_ATTS = set(['description', 'layover_time', 'speed', 'headway', 'data1', 'data2', 'data3']) <NEW_LINE> __MAP = {'layover_time': 'layover'} <NEW_LINE> def __init__(self, line): <NEW_LINE> <INDENT> self.id = line.id <NEW_LINE> self.vehicle = line.vehicle.number <NEW_LINE> self.description = line.description <NEW_LINE> self.headway = line.headway <NEW_LINE> self.speed = line.speed <NEW_LINE> self.layover = line.layover_time <NEW_LINE> self.data1 = line.data1 <NEW_LINE> self.data2 = line.data2 <NEW_LINE> self.data3 = line.data3 <NEW_LINE> self.exatts = {} <NEW_LINE> for attId in line.network.attributes('TRANSIT_LINE'): <NEW_LINE> <INDENT> if not attId in self.DEFAULT_ATTS: <NEW_LINE> <INDENT> self.exatts[attId] = line[attId] <NEW_LINE> <DEDENT> <DEDENT> self.segments = [TransitSegmentProxy(segment) for segment in line.segments(True)] <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if type(key) != str and type(key) != unicode: raise TypeError("Attribute must be a string") <NEW_LINE> if key in self.__MAP: key = self.__MAP[key] <NEW_LINE> if key in self.exatts: <NEW_LINE> <INDENT> return self.exatts[key] <NEW_LINE> <DEDENT> return self.__dict__[key] <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> if type(key) != str and type(key) != unicode: raise TypeError("Attribute must be a string") <NEW_LINE> if key in self.__MAP: key = self.__MAP[key] <NEW_LINE> if key in self.exatts: <NEW_LINE> <INDENT> self.exatts = value <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.__dict__[key] = value <NEW_LINE> <DEDENT> <DEDENT> def copyToNetwork(self, network): <NEW_LINE> <INDENT> itinerary = [segment.iNode.number for segment in self.segments] <NEW_LINE> copy = network.create_transit_line(self.id, self.vehicle, itinerary) <NEW_LINE> copy.description = self.description <NEW_LINE> copy.headway = self.headway <NEW_LINE> copy.speed = self.speed <NEW_LINE> copy.layover_time = self.layover <NEW_LINE> copy.data1 = self.data1 <NEW_LINE> copy.data2 = self.data2 <NEW_LINE> copy.data3 = self.data3 <NEW_LINE> for key, val in self.exatts.iteritems(): <NEW_LINE> <INDENT> copy[key] = val <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> for i, segment in enumerate(copy.segments(True)): <NEW_LINE> <INDENT> self.segments[i].copyToSegment(segment) <NEW_LINE> <DEDENT> <DEDENT> except: <NEW_LINE> <INDENT> network.delete_transit_line(self.id) <NEW_LINE> raise <NEW_LINE> <DEDENT> return copy | Data container for copying transit line data. For easy line itinerary modification,
the line's segments are stored in a simple list made up of TransitSegmentProxy
objects. This class's copyToNetwork method can then be used to 'save' the changes
to the network. If errors are encountered, this class will safely roll back all
saved changes. | 625990a150812a4eaa621b46 |
class UserProfileSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = models.UserProfile <NEW_LINE> fields = ('id', 'email', 'name', 'password') <NEW_LINE> extra_kwargs = { 'password': { 'write_only': True, 'style': {'input_type': 'password'} } } <NEW_LINE> def create(self, validate_data): <NEW_LINE> <INDENT> user = models.UserProfile.objects.create_user( email=validate_data('email'), name=validate_data('name'), password=validate_data('password'), ) <NEW_LINE> return user | Serialiser a user profile object | 625990a1099cdd3c63676379 |
class MockHub(JupyterHub): <NEW_LINE> <INDENT> db_file = None <NEW_LINE> def _ip_default(self): <NEW_LINE> <INDENT> return 'localhost' <NEW_LINE> <DEDENT> def _authenticator_class_default(self): <NEW_LINE> <INDENT> return MockPAMAuthenticator <NEW_LINE> <DEDENT> def _spawner_class_default(self): <NEW_LINE> <INDENT> return MockSpawner <NEW_LINE> <DEDENT> def _admin_users_default(self): <NEW_LINE> <INDENT> return {'admin'} <NEW_LINE> <DEDENT> def start(self, argv=None): <NEW_LINE> <INDENT> self.db_file = NamedTemporaryFile() <NEW_LINE> self.db_url = 'sqlite:///' + self.db_file.name <NEW_LINE> evt = threading.Event() <NEW_LINE> @gen.coroutine <NEW_LINE> def _start_co(): <NEW_LINE> <INDENT> yield super(MockHub, self).initialize(argv=argv) <NEW_LINE> user = orm.User(name='user') <NEW_LINE> self.db.add(user) <NEW_LINE> self.db.commit() <NEW_LINE> yield super(MockHub, self).start() <NEW_LINE> self.io_loop.add_callback(evt.set) <NEW_LINE> <DEDENT> def _start(): <NEW_LINE> <INDENT> self.io_loop = IOLoop.current() <NEW_LINE> self.io_loop.add_callback(_start_co) <NEW_LINE> self.io_loop.start() <NEW_LINE> <DEDENT> self._thread = threading.Thread(target=_start) <NEW_LINE> self._thread.start() <NEW_LINE> evt.wait(timeout=5) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> super().stop() <NEW_LINE> self._thread.join() <NEW_LINE> IOLoop().run_sync(self.cleanup) <NEW_LINE> self.cleanup = lambda : None <NEW_LINE> self.db_file.close() | Hub with various mock bits | 625990a1099cdd3c6367637a |
class Rectangle: <NEW_LINE> <INDENT> def __init__(self, width=0, height=0): <NEW_LINE> <INDENT> self.height = height <NEW_LINE> self.width = width <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> return self.__height <NEW_LINE> <DEDENT> @height.setter <NEW_LINE> def height(self, value): <NEW_LINE> <INDENT> if self.__check_arg(value, "height"): <NEW_LINE> <INDENT> self.__height = value <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def width(self): <NEW_LINE> <INDENT> return self.__width <NEW_LINE> <DEDENT> @width.setter <NEW_LINE> def width(self, value): <NEW_LINE> <INDENT> if self.__check_arg(value, "width"): <NEW_LINE> <INDENT> self.__width = value <NEW_LINE> <DEDENT> <DEDENT> def __check_arg(self, value, attribute): <NEW_LINE> <INDENT> if type(value) is not int: <NEW_LINE> <INDENT> raise TypeError("{} must be an integer".format(attribute)) <NEW_LINE> <DEDENT> if value < 0: <NEW_LINE> <INDENT> raise ValueError("{} must be >= 0".format(attribute)) <NEW_LINE> <DEDENT> return (True) | defines class Rectangle
**Instance attributes**
width: private must be a non negative int
height: private must be a non negative int
**Instance methods**
width(self)
width(self, value)
height(self)
height(self, value)
__init__(self, width=0, height=0)
__check_arg(self, value) | 625990a150812a4eaa621b48 |
class AuthLoginViewTests(ManifestTestCase): <NEW_LINE> <INDENT> user_data = ["john", "pass"] <NEW_LINE> form_data = data_dicts.LOGIN_FORM["valid"][0] <NEW_LINE> def test_auth_login_view(self): <NEW_LINE> <INDENT> response = self.client.get(reverse("auth_login")) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertTemplateUsed(response, "manifest/auth_login.html") <NEW_LINE> <DEDENT> def test_auth_login_invalid(self): <NEW_LINE> <INDENT> response = self.client.post(reverse("auth_login"), data={}) <NEW_LINE> self.assertEqual(response.status_code, 200) <NEW_LINE> self.assertTemplateUsed(response, "manifest/auth_login.html") <NEW_LINE> self.assertEqual( response.context["form"].errors["identification"][0], _("Please enter your username or email address."), ) <NEW_LINE> <DEDENT> def test_auth_login_inactive(self): <NEW_LINE> <INDENT> user = get_user_model().objects.get(username=self.user_data[0]) <NEW_LINE> user.is_active = False <NEW_LINE> user.save() <NEW_LINE> response = self.client.post(reverse("auth_login"), data=self.form_data) <NEW_LINE> self.assertRedirects(response, reverse("auth_disabled")) <NEW_LINE> <DEDENT> def test_auth_login_success(self): <NEW_LINE> <INDENT> response = self.client.post(reverse("auth_login"), data=self.form_data) <NEW_LINE> self.assertRedirects(response, defaults.MANIFEST_LOGIN_REDIRECT_URL) <NEW_LINE> response = self.client.post( reverse("auth_login"), data={**self.form_data, "next": "/test/"} ) <NEW_LINE> self.assertRedirects(response, "/test/") <NEW_LINE> <DEDENT> def test_auth_login_success_url(self): <NEW_LINE> <INDENT> response = self.client.post( reverse("test_auth_login_success_url"), data=self.form_data ) <NEW_LINE> self.assertRedirects(response, TEST_SUCCESS_URL) | Tests for :class:`AuthLoginView <manifest.views.AuthLoginView>`.
| 625990a1099cdd3c6367637b |
class Usage: <NEW_LINE> <INDENT> def __init__(self, *, talkUsed, smsUsed, dataUsed): <NEW_LINE> <INDENT> self.talkUsed = talkUsed <NEW_LINE> self.smsUsed = smsUsed <NEW_LINE> self.dataUsed = dataUsed | Represents the total usage of an individual phone line or total phone bill for a month. | 625990a150812a4eaa621b49 |
class MailboxView(HomeAssistantView): <NEW_LINE> <INDENT> def __init__(self, mailboxes: list[Mailbox]) -> None: <NEW_LINE> <INDENT> self.mailboxes = mailboxes <NEW_LINE> <DEDENT> def get_mailbox(self, platform): <NEW_LINE> <INDENT> for mailbox in self.mailboxes: <NEW_LINE> <INDENT> if mailbox.name == platform: <NEW_LINE> <INDENT> return mailbox <NEW_LINE> <DEDENT> <DEDENT> raise HTTPNotFound | Base mailbox view. | 625990a1099cdd3c6367637c |
class UserFavViewset(viewsets.GenericViewSet, mixins.ListModelMixin, mixins.CreateModelMixin, mixins.RetrieveModelMixin, mixins.DestroyModelMixin): <NEW_LINE> <INDENT> permission_classes = (IsAuthenticated, IsOwnerOrReadOnly) <NEW_LINE> lookup_field = 'goods_id' <NEW_LINE> authentication_classes = (JSONWebTokenAuthentication, SessionAuthentication) <NEW_LINE> def get_queryset(self): <NEW_LINE> <INDENT> return UserFav.objects.filter(user=self.request.user) <NEW_LINE> <DEDENT> def get_serializer_class(self): <NEW_LINE> <INDENT> if self.action == "list": <NEW_LINE> <INDENT> return UserFavDetailSerializer <NEW_LINE> <DEDENT> elif self.action == "create": <NEW_LINE> <INDENT> return UserFavSerializer <NEW_LINE> <DEDENT> return UserFavSerializer | list:
获取用户收藏列表
retrieve:
判断某个商品是否已经收藏
create:
收藏商品 | 625990a250812a4eaa621b4a |
class NovaIdeoApplicationSchema(VisualisableElementSchema): <NEW_LINE> <INDENT> name = NameSchemaNode( editing=context_is_a_root, ) <NEW_LINE> titles = colander.SchemaNode( colander.Sequence(), colander.SchemaNode( colander.String(), name=_("Title") ), widget=SequenceWidget(), default=DEFAULT_TITLES, title=_('List of titles'), ) <NEW_LINE> comment_intention = colander.SchemaNode( colander.Sequence(), colander.SchemaNode( colander.String(), name=_("Comment intention") ), widget=SequenceWidget(), default=DEFAULT_COMMENT_INTENTIONS, title=_('Comment intentions'), ) <NEW_LINE> idea_intention = colander.SchemaNode( colander.Sequence(), colander.SchemaNode( colander.String(), name=_("Idea intention") ), widget=SequenceWidget(), default=DEFAULT_IDEA_INTENTIONS, title=_('Idea intentions'), ) <NEW_LINE> amendment_intention = colander.SchemaNode( colander.Sequence(), colander.SchemaNode( colander.String(), name=_("Amendment intention") ), widget=SequenceWidget(), default=DEFAULT_AMENDMENT_INTENTIONS, title=_('Amendment intentions'), ) <NEW_LINE> working_groups = colander.SchemaNode( colander.Sequence(), omit(WorkingGroupSchema(factory=WorkingGroup, editable=True, name=_('Working group')),['_csrf_token_']), title=_('Working groups'), ) <NEW_LINE> keywords = colander.SchemaNode( colander.Sequence(), omit(KeywordSchema(widget=LineWidget(), factory=Keyword, editable=True, name='Keyword'),['_csrf_token_']), widget=TableWidget(min_len=1), title='Keywords', ) <NEW_LINE> ideas = colander.SchemaNode( colander.Sequence(), omit(IdeaSchema(factory=Idea, name=_('Idea')),['_csrf_token_']), title=_('Ideas'), ) <NEW_LINE> participants_mini = colander.SchemaNode( colander.Integer(), title=_('Minimum number of participants for a working group'), default=3, ) <NEW_LINE> participants_maxi = colander.SchemaNode( colander.Integer(), title=_('Maximum number of participants for a working group'), default=12, ) <NEW_LINE> participations_maxi = colander.SchemaNode( colander.Integer(), title=_('Maximum number of working group by member'), default=5, ) <NEW_LINE> tokens_mini = colander.SchemaNode( colander.Integer(), title=_('Minimum number of tokens by member'), default=7, ) | Schema for Nova-Ideo configuration | 625990a2d8ef3951e32c8de1 |
class QPyDesignerCustomWidgetCollectionPlugin(__PyQt4_QtCore.QObject, QDesignerCustomWidgetCollectionInterface): <NEW_LINE> <INDENT> def childEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def connectNotify(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def customEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def disconnectNotify(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def receivers(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def sender(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def senderSignalIndex(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def timerEvent(self, *args, **kwargs): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def __init__(self, QObject_parent=None): <NEW_LINE> <INDENT> pass | QPyDesignerCustomWidgetCollectionPlugin(QObject parent=None) | 625990a2091ae35668706b3d |
class CreditCheckHistorySearch(SearchEditor): <NEW_LINE> <INDENT> title = _("Client Credit Check History Search") <NEW_LINE> editor_class = CreditCheckHistoryEditor <NEW_LINE> search_spec = CreditCheckHistoryView <NEW_LINE> size = (700, 450) <NEW_LINE> def __init__(self, store, client=None, reuse_store=False): <NEW_LINE> <INDENT> self.store = store <NEW_LINE> self.client = client <NEW_LINE> self._reuse_store = reuse_store <NEW_LINE> SearchEditor.__init__(self, store) <NEW_LINE> self.set_edit_button_label(_('Details'), Gtk.STOCK_INFO) <NEW_LINE> <DEDENT> def get_editor_model(self, credit_check_history): <NEW_LINE> <INDENT> return credit_check_history.check_history <NEW_LINE> <DEDENT> def create_filters(self): <NEW_LINE> <INDENT> if self.client: <NEW_LINE> <INDENT> self.set_text_field_columns(['identifier']) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.set_text_field_columns(['identifier', 'client_name']) <NEW_LINE> <DEDENT> self.search.set_query(self.executer_query) <NEW_LINE> <DEDENT> def get_columns(self): <NEW_LINE> <INDENT> columns = [SearchColumn('check_date', title=_('Date'), data_type=datetime.date, width=150, sorted=True), SearchColumn('identifier', title=_('Identifier'), data_type=str, width=130), Column('status', title=_('Status'), data_type=str, width=160), Column('notes', title=_('Notes'), data_type=str, width=100, expand=True), SearchColumn('user', title=_('User'), data_type=str, width=100)] <NEW_LINE> if not self.client: <NEW_LINE> <INDENT> columns.insert(1, SearchColumn('client_name', title=_('Client'), data_type=str, width=150, expand=True)) <NEW_LINE> <DEDENT> return columns <NEW_LINE> <DEDENT> def executer_query(self, store): <NEW_LINE> <INDENT> results = self.search_spec.find_by_client(self.store, self.client) <NEW_LINE> return results.order_by(CreditCheckHistoryView.check_date, CreditCheckHistoryView.identifier) <NEW_LINE> <DEDENT> def update_widgets(self, *args): <NEW_LINE> <INDENT> call_view = self.results.get_selected() <NEW_LINE> self.set_edit_button_sensitive(call_view is not None) <NEW_LINE> <DEDENT> def run_editor(self, obj): <NEW_LINE> <INDENT> visual_mode = obj is not None <NEW_LINE> if self._reuse_store: <NEW_LINE> <INDENT> self.store.savepoint('before_run_editor_client_history') <NEW_LINE> retval = run_dialog(self.editor_class, self, self.store, self.store.fetch(obj), self.store.fetch(self.client), visual_mode=visual_mode) <NEW_LINE> if not retval: <NEW_LINE> <INDENT> self.store.rollback_to_savepoint('before_run_editor_client_history') <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> store = api.new_store() <NEW_LINE> client = store.fetch(self.client) <NEW_LINE> retval = run_dialog(self.editor_class, self, store, store.fetch(obj), store.fetch(client), visual_mode=visual_mode) <NEW_LINE> store.confirm(retval) <NEW_LINE> store.close() <NEW_LINE> <DEDENT> return retval | A search dialog for querying the credit history for a |client|
| 625990a2099cdd3c63676380 |
class Event: <NEW_LINE> <INDENT> def __init__(self, code, name, description, sample): <NEW_LINE> <INDENT> self.code = code <NEW_LINE> self.name = name <NEW_LINE> self.description = description <NEW_LINE> self.sample = sample <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '{} - {}'.format(self.code, self.name) | HL-API event wrapper.
Example:
# Import module.
from prpl.apis.hl.com import Event as HLAPIEvent
# Create new instance.
api_event = HLAPIEvent(
'0',
'USER_ACCOUNTS_ADDED',
'Raised when a new account is added.',
'{"Header":{"Code":1,"Name":"USER_ACCOUNTS_ADDED"},"Body":{"AccountId":"User.Accounts.2"}}') | 625990a2187af65679d2ab6f |
class User(UserMixin, db.Model): <NEW_LINE> <INDENT> __tablename__ = 'users' <NEW_LINE> __table_args__ = {'schema': 'ref'} <NEW_LINE> id = db.Column(db.Integer, primary_key=True, name='users_id') <NEW_LINE> email = db.Column(db.String(64), unique=True, nullable=False, index=True) <NEW_LINE> password_hash = db.Column(db.String(128)) <NEW_LINE> first_name = db.Column(db.String(32), nullable=False) <NEW_LINE> last_name = db.Column(db.String(32), nullable=False) <NEW_LINE> @property <NEW_LINE> def password(self): <NEW_LINE> <INDENT> raise AttributeError('Password is not a readable property.') <NEW_LINE> <DEDENT> @password.setter <NEW_LINE> def password(self, password): <NEW_LINE> <INDENT> self.password_hash = generate_password_hash(password) <NEW_LINE> <DEDENT> @property <NEW_LINE> def full_name(self): <NEW_LINE> <INDENT> return self.first_name + ' ' + self.last_name <NEW_LINE> <DEDENT> def verify_password(self, password): <NEW_LINE> <INDENT> return check_password_hash(self.password_hash, password) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def insert_users(): <NEW_LINE> <INDENT> users = { 1: {'email': '[email protected]', 'password': 'sandag', 'first_name': 'Clint', 'last_name': 'Daniels'}, 2: {'email': '[email protected]', 'password': 'sandag', 'first_name': 'Elias', 'last_name': 'Sanz'}, } <NEW_LINE> for u in users: <NEW_LINE> <INDENT> user = User.query.filter_by(id=u).first() <NEW_LINE> if user is None: <NEW_LINE> <INDENT> user = User(id=u) <NEW_LINE> <DEDENT> user.email = users[u]['email'] <NEW_LINE> user.password = users[u]['password'] <NEW_LINE> user.first_name = users[u]['first_name'] <NEW_LINE> user.last_name = users[u]['last_name'] <NEW_LINE> db.session.add(user) <NEW_LINE> <DEDENT> db.session.commit() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<User %r-%r>' % (self.id, self.email) | SQL Alchemy Class that maps to ref.users | 625990a2adb09d7d5dc0c46c |
class ModelEmbeddings(nn.Module): <NEW_LINE> <INDENT> def __init__(self, embed_size, vocab): <NEW_LINE> <INDENT> super(ModelEmbeddings, self).__init__() <NEW_LINE> self.embed_size = embed_size <NEW_LINE> src_pad_token_idx = vocab.src["<pad>"] <NEW_LINE> tgt_pad_token_idx = vocab.tgt["<pad>"] <NEW_LINE> self.source = nn.Embedding( len(vocab.src), embedding_dim=self.embed_size, padding_idx=src_pad_token_idx ) <NEW_LINE> self.target = nn.Embedding( len(vocab.tgt), embedding_dim=self.embed_size, padding_idx=tgt_pad_token_idx ) | Class that converts input words to their embeddings. | 625990a2adb09d7d5dc0c46e |
class RandomEventGenerator: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.spraypaint_success_chance = SPRAYPAINT_SUCCESS_CHANCE <NEW_LINE> self.steal_success_chance = STEAL_SUCCESS_CHANCE <NEW_LINE> self.hack_success_chance = HACK_SUCCESS_CHANCE <NEW_LINE> self.skate_success_chance = SKATE_SUCCES_CHANCE <NEW_LINE> random.seed() <NEW_LINE> <DEDENT> def attempt_steal(self): <NEW_LINE> <INDENT> num = random.randint(1,100) <NEW_LINE> if num <= self.steal_success_chance: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def attempt_hack(self): <NEW_LINE> <INDENT> num = random.randint(1,100) <NEW_LINE> if num <= self.hack_success_chance: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def attempt_skate(self): <NEW_LINE> <INDENT> num = random.randint(1, 100) <NEW_LINE> if num <= self.skate_success_chance: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def get_random_cash_amount(self, min_amount, max_amount): <NEW_LINE> <INDENT> amount = random.randint(min_amount, max_amount) <NEW_LINE> return amount <NEW_LINE> <DEDENT> def attempt_spraypaint(self): <NEW_LINE> <INDENT> num = random.randint(1,100) <NEW_LINE> if num <= self.spraypaint_success_chance: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False <NEW_LINE> <DEDENT> def coin_flip(self): <NEW_LINE> <INDENT> num = random.randint(1,100) <NEW_LINE> if num <= 50: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> return False | Used to generate / determine random event results within the game.
Anything that is randomized in the game should be seeded/randomized and returned from here | 625990a2187af65679d2ab75 |
class CondenseInitBlock(nn.Module): <NEW_LINE> <INDENT> def __init__(self, in_channels, out_channels): <NEW_LINE> <INDENT> super(CondenseInitBlock, self).__init__() <NEW_LINE> self.conv = nn.Conv2d( in_channels=in_channels, out_channels=out_channels, kernel_size=3, stride=2, padding=1, bias=False) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> x = self.conv(x) <NEW_LINE> return x | CondenseNet specific initial block.
Parameters:
----------
in_channels : int
Number of input channels.
out_channels : int
Number of output channels. | 625990a250812a4eaa621b54 |
class RequestParser(object): <NEW_LINE> <INDENT> def __init__( self, argument_class=Argument, result_class=ParseResult, trim=False, bundle_errors=False, ): <NEW_LINE> <INDENT> self.args = [] <NEW_LINE> self.argument_class = argument_class <NEW_LINE> self.result_class = result_class <NEW_LINE> self.trim = trim <NEW_LINE> self.bundle_errors = bundle_errors <NEW_LINE> <DEDENT> def add_argument(self, *args, **kwargs): <NEW_LINE> <INDENT> if len(args) == 1 and isinstance(args[0], self.argument_class): <NEW_LINE> <INDENT> self.args.append(args[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.args.append(self.argument_class(*args, **kwargs)) <NEW_LINE> <DEDENT> if self.trim and self.argument_class is Argument: <NEW_LINE> <INDENT> self.args[-1].trim = kwargs.get("trim", self.trim) <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def parse_args(self, req=None, strict=False): <NEW_LINE> <INDENT> if req is None: <NEW_LINE> <INDENT> req = request <NEW_LINE> <DEDENT> result = self.result_class() <NEW_LINE> req.unparsed_arguments = ( dict(self.argument_class("").source(req)) if strict else {} ) <NEW_LINE> errors = {} <NEW_LINE> for arg in self.args: <NEW_LINE> <INDENT> value, found = arg.parse(req, self.bundle_errors) <NEW_LINE> if isinstance(value, ValueError): <NEW_LINE> <INDENT> errors.update(found) <NEW_LINE> found = None <NEW_LINE> <DEDENT> if found or arg.store_missing: <NEW_LINE> <INDENT> result[arg.dest or arg.name] = value <NEW_LINE> <DEDENT> <DEDENT> if errors: <NEW_LINE> <INDENT> abort(HTTPStatus.BAD_REQUEST, str(list(errors.values())[0]), errors=errors) <NEW_LINE> <DEDENT> if strict and req.unparsed_arguments: <NEW_LINE> <INDENT> arguments = ", ".join(req.unparsed_arguments.keys()) <NEW_LINE> msg = "Unknown arguments: {0}".format(arguments) <NEW_LINE> raise exceptions.BadRequest(msg) <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> parser_copy = self.__class__(self.argument_class, self.result_class) <NEW_LINE> parser_copy.args = deepcopy(self.args) <NEW_LINE> parser_copy.trim = self.trim <NEW_LINE> parser_copy.bundle_errors = self.bundle_errors <NEW_LINE> return parser_copy <NEW_LINE> <DEDENT> def replace_argument(self, name, *args, **kwargs): <NEW_LINE> <INDENT> new_arg = self.argument_class(name, *args, **kwargs) <NEW_LINE> for index, arg in enumerate(self.args[:]): <NEW_LINE> <INDENT> if new_arg.name == arg.name: <NEW_LINE> <INDENT> del self.args[index] <NEW_LINE> self.args.append(new_arg) <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def remove_argument(self, name): <NEW_LINE> <INDENT> for index, arg in enumerate(self.args[:]): <NEW_LINE> <INDENT> if name == arg.name: <NEW_LINE> <INDENT> del self.args[index] <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> @property <NEW_LINE> def __schema__(self): <NEW_LINE> <INDENT> params = [] <NEW_LINE> locations = set() <NEW_LINE> for arg in self.args: <NEW_LINE> <INDENT> param = arg.__schema__ <NEW_LINE> if param: <NEW_LINE> <INDENT> params.append(param) <NEW_LINE> locations.add(param["in"]) <NEW_LINE> <DEDENT> <DEDENT> if "body" in locations and "formData" in locations: <NEW_LINE> <INDENT> raise SpecsError("Can't use formData and body at the same time") <NEW_LINE> <DEDENT> return params | Enables adding and parsing of multiple arguments in the context of a single request.
Ex::
from flask_restplus import RequestParser
parser = RequestParser()
parser.add_argument('foo')
parser.add_argument('int_bar', type=int)
args = parser.parse_args()
:param bool trim: If enabled, trims whitespace on all arguments in this parser
:param bool bundle_errors: If enabled, do not abort when first error occurs,
return a dict with the name of the argument and the error message to be
bundled and return all validation errors | 625990a2c4546d3d9def822a |
class UserProfile(models.Model): <NEW_LINE> <INDENT> user = models.OneToOneField(User) <NEW_LINE> code = models.CharField(max_length=200, blank=True, null=True) <NEW_LINE> access_token = models.CharField(max_length=200, blank=True, null=True) <NEW_LINE> token_expiration = models.DateTimeField(blank=True, null=True) <NEW_LINE> token_type = models.CharField(max_length=20, blank=True, null=True) <NEW_LINE> id_token = models.CharField(max_length=1000, blank=True, null=True) <NEW_LINE> last_message_processed = models.IntegerField(blank=True, null=True) <NEW_LINE> last_message_on_server = models.IntegerField(blank=True, null=True) <NEW_LINE> def token_is_current(self): <NEW_LINE> <INDENT> token_expiration = self.token_expiration <NEW_LINE> if token_expiration: <NEW_LINE> <INDENT> if token_expiration > django.utils.timezone.now(): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return "%s's profile" % self.user | Keeps track of extra information about the user, specifically
credentials for logging into other cloud services. | 625990a2187af65679d2ab77 |
class ROMS_Grid(object): <NEW_LINE> <INDENT> def __init__(self, name, hgrid=CGrid, vgrid=s_coordinate): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.hgrid = hgrid <NEW_LINE> self.vgrid = vgrid | grd = ROMS_Grid(hgrid, vgrid)
ROMS Grid object combining horizontal and vertical grid | 625990a2187af65679d2ab78 |
class PullRequestCommentEvent(PullRequestEvent): <NEW_LINE> <INDENT> name = 'pullrequest-comment' <NEW_LINE> display_name = lazy_ugettext('pullrequest commented') <NEW_LINE> def __init__(self, pullrequest, comment): <NEW_LINE> <INDENT> super(PullRequestCommentEvent, self).__init__(pullrequest) <NEW_LINE> self.comment = comment <NEW_LINE> <DEDENT> def as_dict(self): <NEW_LINE> <INDENT> from rhodecode.model.comment import ChangesetCommentsModel <NEW_LINE> data = super(PullRequestCommentEvent, self).as_dict() <NEW_LINE> status = None <NEW_LINE> if self.comment.status_change: <NEW_LINE> <INDENT> status = self.comment.status_change[0].status <NEW_LINE> <DEDENT> data.update({ 'comment': { 'status': status, 'text': self.comment.text, 'url': ChangesetCommentsModel().get_url(self.comment) } }) <NEW_LINE> return data | An instance of this class is emitted as an :term:`event` after a pull
request comment is created. | 625990a2adb09d7d5dc0c47c |
class InvalidPacketError(CorruptEvohomeError): <NEW_LINE> <INDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super().__init__(*args, **kwargs) <NEW_LINE> self.message = args[0] if args else None <NEW_LINE> <DEDENT> def __str__(self) -> str: <NEW_LINE> <INDENT> err_msg = "Corrupt packet" <NEW_LINE> err_tip = "" <NEW_LINE> if self.message: <NEW_LINE> <INDENT> return f"{err_msg}: {self.message}{err_tip}" <NEW_LINE> <DEDENT> return f"{err_msg} {err_tip}" | Raised when the packet is inconsistent. | 625990a2c4546d3d9def822d |
class NodeResolveFingerprintStrategy(DefaultFingerprintHashingMixin, FingerprintStrategy): <NEW_LINE> <INDENT> _package_manager_lockfiles = { 'yarn': ['package.json', 'yarn.lock'], 'npm': ['package.json', 'package-lock.json', 'npm-shrinkwrap.json'] } <NEW_LINE> def _get_files_to_watch(self, target): <NEW_LINE> <INDENT> package_manager = target.payload.get_field_value("package_manager", '') <NEW_LINE> lockfiles = self._package_manager_lockfiles.get(package_manager, []) <NEW_LINE> paths = [os.path.join(target.address.spec_path, name) for name in lockfiles] <NEW_LINE> return paths <NEW_LINE> <DEDENT> def compute_fingerprint(self, target): <NEW_LINE> <INDENT> if NodeResolve.can_resolve_target(target): <NEW_LINE> <INDENT> hasher = sha1() <NEW_LINE> for lockfile_path in self._get_files_to_watch(target): <NEW_LINE> <INDENT> absolute_lockfile_path = os.path.join(get_buildroot(), lockfile_path) <NEW_LINE> if os.path.exists(absolute_lockfile_path): <NEW_LINE> <INDENT> with open(absolute_lockfile_path, 'r') as lockfile: <NEW_LINE> <INDENT> contents = lockfile.read().encode() <NEW_LINE> hasher.update(contents) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return hasher.hexdigest() <NEW_LINE> <DEDENT> return None | Fingerprint package lockfiles (e.g. package.json, yarn.lock...),
so that we don't automatically run this if none of those have changed.
We read every file and add its contents to the hash. | 625990a3d8ef3951e32c8def |
class _RegisterBase(object): <NEW_LINE> <INDENT> def __init__(self, assign_defaults=(), method_name=None, overwrite=False): <NEW_LINE> <INDENT> if isinstance(assign_defaults, str): <NEW_LINE> <INDENT> self._assign_defaults = [assign_defaults] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self._assign_defaults = assign_defaults <NEW_LINE> <DEDENT> self._method_name = method_name <NEW_LINE> self._overwrite = overwrite <NEW_LINE> _valid_defaults.update(self._assign_defaults) <NEW_LINE> default_args = sorted(_valid_defaults) <NEW_LINE> default_values = [None] * len(_valid_defaults) <NEW_LINE> if six.PY2: <NEW_LINE> <INDENT> default_func = PrettyTensor.with_defaults.__func__ <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> default_func = PrettyTensor.with_defaults <NEW_LINE> <DEDENT> _set_ipython_string(default_func, default_args, default_values, _original_set_defaults_doc) <NEW_LINE> _set_ipython_string(defaults_scope, default_args, default_values, _original_defaults_scope_doc) <NEW_LINE> <DEDENT> def __new__(cls, *args, **kwargs): <NEW_LINE> <INDENT> if len(args) == 1 and isinstance(args[0], collections.Callable): <NEW_LINE> <INDENT> assert not kwargs <NEW_LINE> register = super(_RegisterBase, cls).__new__(cls) <NEW_LINE> register.__init__() <NEW_LINE> return register(args[0]) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return super(_RegisterBase, cls).__new__(cls) <NEW_LINE> <DEDENT> <DEDENT> def fill_kwargs(self, input_layer, kwargs): <NEW_LINE> <INDENT> return input_layer._replace_args_with_defaults(_args=self._assign_defaults, **kwargs) <NEW_LINE> <DEDENT> def __call__(self, obj): <NEW_LINE> <INDENT> if inspect.isclass(obj): <NEW_LINE> <INDENT> cls = obj <NEW_LINE> doc = cls.__call__.__doc__ <NEW_LINE> self._name = cls.__name__ <NEW_LINE> method = self.create_method(obj) <NEW_LINE> argspec = inspect.getargspec(obj.__call__) <NEW_LINE> args = argspec.args[2:] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> func = obj <NEW_LINE> self._name = func.__name__ <NEW_LINE> doc = func.__doc__ <NEW_LINE> method = self.create_method(func) <NEW_LINE> argspec = inspect.getargspec(func) <NEW_LINE> args = argspec.args[1:] <NEW_LINE> <DEDENT> name = self._method_name if self._method_name else self._name <NEW_LINE> self._name = name <NEW_LINE> method.__module__ = obj.__module__ <NEW_LINE> method.__name__ = name <NEW_LINE> _set_ipython_string(method, args, argspec.defaults, doc) <NEW_LINE> _remove_first_arg_from_doc(method) <NEW_LINE> self._has_name_param = 'name' in argspec.args <NEW_LINE> if not self._overwrite: <NEW_LINE> <INDENT> assert not hasattr(Layer, name), 'Method already defined: %s' % name <NEW_LINE> assert not hasattr(SequentialLayerBuilder, name), 'Clash with Sequential: %s' % name <NEW_LINE> <DEDENT> setattr(PrettyTensor, name, method) <NEW_LINE> for default in self._assign_defaults: <NEW_LINE> <INDENT> _defaults_to_methods[default].append(name) <NEW_LINE> <DEDENT> return method <NEW_LINE> <DEDENT> def create_method(self, func): <NEW_LINE> <INDENT> raise NotImplementedError('Abstract class') | Base class for the Register* decorators. | 625990a3c4546d3d9def8231 |
class GetFaceInfo(QQAIClass): <NEW_LINE> <INDENT> api = 'https://api.ai.qq.com/fcgi-bin/face/face_getfaceinfo' <NEW_LINE> def make_params(self, face_id): <NEW_LINE> <INDENT> params = {'app_id': self.app_id, 'time_stamp': int(time.time()), 'nonce_str': int(time.time()), 'person_id': face_id } <NEW_LINE> params['sign'] = self.get_sign(params) <NEW_LINE> return params <NEW_LINE> <DEDENT> def run(self, face_id): <NEW_LINE> <INDENT> params = self.make_params(face_id) <NEW_LINE> response = self.call_api(params) <NEW_LINE> result = json.loads(response.text) <NEW_LINE> return result | 获取人脸信息 | 625990a350812a4eaa621b5d |
class EmissionBudgetLevelNode(DjangoNode): <NEW_LINE> <INDENT> date = graphene.Date() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = EmissionBudgetLevel <NEW_LINE> fields = [ 'identifier', 'name', 'carbon_footprint', 'year' ] | Mobility emission budget to reach different prize levels | 625990a350812a4eaa621b5f |
class JoinPath(BaseInterface): <NEW_LINE> <INDENT> input_spec = JoinPathInputSpec <NEW_LINE> output_spec = JoinPathOutputSpec <NEW_LINE> def _list_outputs(self): <NEW_LINE> <INDENT> outputs = self._outputs().get() <NEW_LINE> outputs['path'] = op.join(self.inputs.dirname, self.inputs.filename) <NEW_LINE> return outputs <NEW_LINE> <DEDENT> def _run_interface(self, runtime): <NEW_LINE> <INDENT> return runtime | Joins a filename to a directory name | 625990a3adb09d7d5dc0c48c |
class Service(BaseService): <NEW_LINE> <INDENT> def __init__(self, executable_path, port=0, service_args=None, log_path=None, env=None): <NEW_LINE> <INDENT> super(Service, self).__init__(executable_path, port=port) <NEW_LINE> self.service_args = service_args or [] <NEW_LINE> if log_path: <NEW_LINE> <INDENT> self.service_args.append('--log-path=%s' % log_path) <NEW_LINE> <DEDENT> self.env = env or os.environ <NEW_LINE> <DEDENT> @property <NEW_LINE> def _start_args(self): <NEW_LINE> <INDENT> return [self.path, "--port=%d" % self.port] + self.service_args <NEW_LINE> <DEDENT> @property <NEW_LINE> def _start_kwargs(self): <NEW_LINE> <INDENT> return dict(env=self.env, stdout=PIPE, stderr=PIPE) <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> if self.process is None: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> six.moves.urllib.request.urlopen("http://127.0.0.1:%d/shutdown" % self.port) <NEW_LINE> self.wait_for_close_or_force() | Object that manages the starting and stopping of the ChromeDriver | 625990a3d8ef3951e32c8df5 |
class DBConnectionProducer: <NEW_LINE> <INDENT> @staticmethod <NEW_LINE> def get_connection(engine): <NEW_LINE> <INDENT> if (engine.lower() == 'mongo'): <NEW_LINE> <INDENT> return MongoConnection(engine) <NEW_LINE> <DEDENT> elif (engine.lower() == 'postgre'): <NEW_LINE> <INDENT> return PostgreConnection(engine) | Returns a new db connection according to the setting.py information
Uses the factory method to create instances of connection | 625990a3187af65679d2ab83 |
class PriorBox(object): <NEW_LINE> <INDENT> def __init__(self, cfg): <NEW_LINE> <INDENT> super(PriorBox, self).__init__() <NEW_LINE> self.size = cfg.MODEL.SIZE <NEW_LINE> if self.size == '300': <NEW_LINE> <INDENT> size_cfg = cfg.SMALL <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> size_cfg = cfg.BIG <NEW_LINE> <DEDENT> self.img_wh = size_cfg.IMG_WH <NEW_LINE> self.num_priors = len(size_cfg.ASPECT_RATIOS) <NEW_LINE> self.feature_maps = size_cfg.FEATURE_MAPS <NEW_LINE> self.variance = size_cfg.VARIANCE or [0.1] <NEW_LINE> self.min_sizes = size_cfg.MIN_SIZES <NEW_LINE> self.use_max_sizes = size_cfg.USE_MAX_SIZE <NEW_LINE> if self.use_max_sizes: <NEW_LINE> <INDENT> self.max_sizes = size_cfg.MAX_SIZES <NEW_LINE> <DEDENT> self.steps = size_cfg.STEPS <NEW_LINE> self.aspect_ratios = size_cfg.ASPECT_RATIOS <NEW_LINE> self.clip = size_cfg.CLIP <NEW_LINE> for v in self.variance: <NEW_LINE> <INDENT> if v <= 0: <NEW_LINE> <INDENT> raise ValueError('Variances must be greater than 0') <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def forward(self): <NEW_LINE> <INDENT> mean = [] <NEW_LINE> for k, f in enumerate(self.feature_maps): <NEW_LINE> <INDENT> grid_h, grid_w = f[1], f[0] <NEW_LINE> for i in range(grid_h): <NEW_LINE> <INDENT> for j in range(grid_w): <NEW_LINE> <INDENT> f_k_h = self.img_wh[1] / self.steps[k][1] <NEW_LINE> f_k_w = self.img_wh[0] / self.steps[k][0] <NEW_LINE> cx = (j + 0.5) / f_k_w <NEW_LINE> cy = (i + 0.5) / f_k_h <NEW_LINE> s_k_h = self.min_sizes[k] / self.img_wh[1] <NEW_LINE> s_k_w = self.min_sizes[k] / self.img_wh[0] <NEW_LINE> mean += [cx, cy, s_k_w, s_k_h] <NEW_LINE> if self.use_max_sizes: <NEW_LINE> <INDENT> s_k_prime_w = sqrt( s_k_w * (self.max_sizes[k] / self.img_wh[0])) <NEW_LINE> s_k_prime_h = sqrt( s_k_h * (self.max_sizes[k] / self.img_wh[1])) <NEW_LINE> mean += [cx, cy, s_k_prime_w, s_k_prime_h] <NEW_LINE> <DEDENT> for ar in self.aspect_ratios[k]: <NEW_LINE> <INDENT> mean += [cx, cy, s_k_w * sqrt(ar), s_k_h / sqrt(ar)] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> output = torch.Tensor(mean).view(-1, 4) <NEW_LINE> if self.clip: <NEW_LINE> <INDENT> output.clamp_(max=1, min=0) <NEW_LINE> <DEDENT> return output | Compute priorbox coordinates in center-offset form for each source
feature map.
Note:
This 'layer' has changed between versions of the original SSD
paper, so we include both versions, but note v2 is the most tested and most
recent version of the paper. | 625990a3c4546d3d9def8238 |
class LabelFrame(Frame): <NEW_LINE> <INDENT> def __init__(self, master, controller): <NEW_LINE> <INDENT> self._controller = controller <NEW_LINE> Frame.__init__(self, master) <NEW_LINE> self.printed = None <NEW_LINE> self._prettyPrint = Label(self, text=self.printed) <NEW_LINE> self._prettyPrint.pack(side=LEFT, pady=5) | This class is the Frame at the top which holds the label for
displaying time, tem, date, sunlight etc. | 625990a350812a4eaa621b64 |
class VolumeManageController(wsgi.Controller): <NEW_LINE> <INDENT> _view_builder_class = volume_views.ViewBuilder <NEW_LINE> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(VolumeManageController, self).__init__(*args, **kwargs) <NEW_LINE> self.volume_api = cinder_volume.API() <NEW_LINE> self._list_manageable_view = list_manageable_view.ViewBuilder() <NEW_LINE> <DEDENT> @wsgi.response(http_client.ACCEPTED) <NEW_LINE> def create(self, req, body): <NEW_LINE> <INDENT> context = req.environ['cinder.context'] <NEW_LINE> authorize_manage(context) <NEW_LINE> self.assert_valid_body(body, 'volume') <NEW_LINE> volume = body['volume'] <NEW_LINE> self.validate_name_and_description(volume) <NEW_LINE> if 'ref' not in volume: <NEW_LINE> <INDENT> raise exception.MissingRequired(element='ref') <NEW_LINE> <DEDENT> cluster_name, host = common.get_cluster_host(req, volume, '3.16') <NEW_LINE> LOG.debug('Manage volume request body: %s', body) <NEW_LINE> kwargs = {} <NEW_LINE> req_volume_type = volume.get('volume_type', None) <NEW_LINE> if req_volume_type: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> kwargs['volume_type'] = volume_types.get_by_name_or_id( context, req_volume_type) <NEW_LINE> <DEDENT> except exception.VolumeTypeNotFound: <NEW_LINE> <INDENT> msg = _("Cannot find requested '%s' " "volume type") % req_volume_type <NEW_LINE> raise exception.InvalidVolumeType(reason=msg) <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> kwargs['volume_type'] = {} <NEW_LINE> <DEDENT> kwargs['name'] = volume.get('name', None) <NEW_LINE> kwargs['description'] = volume.get('description', None) <NEW_LINE> kwargs['metadata'] = volume.get('metadata', None) <NEW_LINE> kwargs['availability_zone'] = volume.get('availability_zone', None) <NEW_LINE> kwargs['bootable'] = utils.get_bool_param('bootable', volume) <NEW_LINE> utils.check_metadata_properties(kwargs['metadata']) <NEW_LINE> try: <NEW_LINE> <INDENT> new_volume = self.volume_api.manage_existing(context, host, cluster_name, volume['ref'], **kwargs) <NEW_LINE> <DEDENT> except exception.ServiceNotFound: <NEW_LINE> <INDENT> msg = _("Host '%s' not found") % volume['host'] <NEW_LINE> raise exception.ServiceUnavailable(message=msg) <NEW_LINE> <DEDENT> utils.add_visible_admin_metadata(new_volume) <NEW_LINE> return self._view_builder.detail(req, new_volume) <NEW_LINE> <DEDENT> @wsgi.extends <NEW_LINE> def index(self, req): <NEW_LINE> <INDENT> context = req.environ['cinder.context'] <NEW_LINE> authorize_list_manageable(context) <NEW_LINE> return resource_common_manage.get_manageable_resources( req, False, self.volume_api.get_manageable_volumes, self._list_manageable_view) <NEW_LINE> <DEDENT> @wsgi.extends <NEW_LINE> def detail(self, req): <NEW_LINE> <INDENT> context = req.environ['cinder.context'] <NEW_LINE> authorize_list_manageable(context) <NEW_LINE> return resource_common_manage.get_manageable_resources( req, True, self.volume_api.get_manageable_volumes, self._list_manageable_view) | The /os-volume-manage controller for the OpenStack API. | 625990a3d8ef3951e32c8df8 |
class ErrorDetail(str): <NEW_LINE> <INDENT> code = None <NEW_LINE> def __new__(cls, string, code=None): <NEW_LINE> <INDENT> self = super(ErrorDetail, cls).__new__(cls, string) <NEW_LINE> self.code = code <NEW_LINE> return self | A string-like object that can additionally | 625990a350812a4eaa621b65 |
class SE_kernel(Kernel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> Kernel.__init__(self) <NEW_LINE> <DEDENT> def set_params(self, params): <NEW_LINE> <INDENT> self.weight = params[0] <NEW_LINE> self.l = params[1] <NEW_LINE> <DEDENT> def squared_distance(self, x1, x2): <NEW_LINE> <INDENT> return np.sum((x1-x2) * (x1-x2))/self.l**2 <NEW_LINE> <DEDENT> def kernel(self, x1, x2): <NEW_LINE> <INDENT> return self.weight**2*np.exp(-0.5 * self.squared_distance(x1, x2)) <NEW_LINE> <DEDENT> def dK_dweight(self, x1, x2): <NEW_LINE> <INDENT> return 2*self.weight*np.exp(-0.5 * self.squared_distance(x1, x2)) <NEW_LINE> <DEDENT> def dK_dl(self, x1, x2): <NEW_LINE> <INDENT> return self.kernel*la.norm(x1-x2)**2/self.l**3 | Squared exponential kernel without derivatives | 625990a3c4546d3d9def823a |
class Thread (app.web.pastes.PasteListRequestHandler): <NEW_LINE> <INDENT> def get (self, paste_slug): <NEW_LINE> <INDENT> self.set_module(__name__ + ".__init__") <NEW_LINE> self.paste_slug = paste_slug <NEW_LINE> self.pastes = self.get_pastes(paste_slug) <NEW_LINE> self.path.add("Pastes", app.url("pastes/")) <NEW_LINE> if len(self.pastes) > 0: <NEW_LINE> <INDENT> self.get_200() <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.get_404() <NEW_LINE> <DEDENT> <DEDENT> def get_200 (self): <NEW_LINE> <INDENT> self.path.add(self.paste_slug, app.url("%s", self.paste_slug)) <NEW_LINE> self.path.add("Thread", app.url("threads/%s", self.paste_slug)) <NEW_LINE> tpl_pastes = self.templatize_pastes(self.pastes) <NEW_LINE> global_size = 0 <NEW_LINE> global_loc = 0 <NEW_LINE> for o_paste in self.pastes: <NEW_LINE> <INDENT> if o_paste.characters: <NEW_LINE> <INDENT> global_size += o_paste.characters <NEW_LINE> <DEDENT> if o_paste.lines: <NEW_LINE> <INDENT> global_loc += o_paste.lines <NEW_LINE> <DEDENT> <DEDENT> self.content["u_atom"] = app.url("threads/%s.atom", self.paste_slug) <NEW_LINE> self.content["thread_slug"] = self.paste_slug <NEW_LINE> self.content["thread_size"] = app.util.make_filesize_readable(global_size) <NEW_LINE> self.content["thread_loc"] = global_loc <NEW_LINE> self.content["pastes"] = tpl_pastes <NEW_LINE> self.content["paste_count"] = len(self.pastes) <NEW_LINE> self.write_out("./200.html") <NEW_LINE> <DEDENT> def get_404 (self): <NEW_LINE> <INDENT> self.error(404) <NEW_LINE> self.write_out("./404.html") <NEW_LINE> <DEDENT> def get_pastes (self, paste_slug): <NEW_LINE> <INDENT> qry_pastes = app.model.Pasty.all() <NEW_LINE> qry_pastes.filter("thread =", paste_slug) <NEW_LINE> qry_pastes.order("-posted_at") <NEW_LINE> return qry_pastes.fetch(1000, 0) | Show a table of all the pastes in the thread. | 625990a3091ae35668706b6e |
class Analyzer(): <NEW_LINE> <INDENT> def __init__(self, positives, negatives): <NEW_LINE> <INDENT> self.pwords=set() <NEW_LINE> self.nwords=set() <NEW_LINE> pfile=open(positives,"r") <NEW_LINE> for line in pfile: <NEW_LINE> <INDENT> if not line.startswith(";" or " "): <NEW_LINE> <INDENT> self.pwords.add(line.strip()) <NEW_LINE> <DEDENT> <DEDENT> pfile.close() <NEW_LINE> nfile=open(negatives,"r") <NEW_LINE> for line in nfile: <NEW_LINE> <INDENT> if not line.startswith(";" or " "): <NEW_LINE> <INDENT> self.nwords.add(line.strip()) <NEW_LINE> <DEDENT> <DEDENT> nfile.close() <NEW_LINE> <DEDENT> def analyze(self, text): <NEW_LINE> <INDENT> tokenizer = nltk.tokenize.TweetTokenizer() <NEW_LINE> tokens = tokenizer.tokenize(text) <NEW_LINE> total=0 <NEW_LINE> for word in tokens: <NEW_LINE> <INDENT> if word.lower() in self.pwords: <NEW_LINE> <INDENT> total+=1 <NEW_LINE> <DEDENT> elif word.lower() in self.nwords: <NEW_LINE> <INDENT> total-=1 <NEW_LINE> <DEDENT> <DEDENT> return total | Implements sentiment analysis. | 625990a350812a4eaa621b67 |
class IngredientSerializer(serializers.ModelSerializer): <NEW_LINE> <INDENT> class Meta: <NEW_LINE> <INDENT> model = Ingredient <NEW_LINE> fields = ('id', 'name') <NEW_LINE> read_only_fields = ('id',) | Serializer for ingredient object | 625990a3091ae35668706b70 |
class _RectangularProjection(Projection, metaclass=ABCMeta): <NEW_LINE> <INDENT> def __init__(self, proj4_params, half_width, half_height, globe=None): <NEW_LINE> <INDENT> self._half_width = half_width <NEW_LINE> self._half_height = half_height <NEW_LINE> super().__init__(proj4_params, globe=globe) <NEW_LINE> <DEDENT> @property <NEW_LINE> def boundary(self): <NEW_LINE> <INDENT> w, h = self._half_width, self._half_height <NEW_LINE> return sgeom.LinearRing([(-w, -h), (-w, h), (w, h), (w, -h), (-w, -h)]) <NEW_LINE> <DEDENT> @property <NEW_LINE> def x_limits(self): <NEW_LINE> <INDENT> return (-self._half_width, self._half_width) <NEW_LINE> <DEDENT> @property <NEW_LINE> def y_limits(self): <NEW_LINE> <INDENT> return (-self._half_height, self._half_height) | The abstract superclass of projections with a rectangular domain which
is symmetric about the origin. | 625990a3c4546d3d9def823d |
class PV_MagAbove(PV_ChainUGen): <NEW_LINE> <INDENT> _ordered_input_names = collections.OrderedDict( [("pv_chain", None), ("threshold", 0)] ) | Passes magnitudes above threshold.
::
>>> pv_chain = supriya.ugens.FFT(
... source=supriya.ugens.WhiteNoise.ar(),
... )
>>> pv_mag_above = supriya.ugens.PV_MagAbove.new(
... pv_chain=pv_chain,
... threshold=0,
... )
>>> pv_mag_above
PV_MagAbove.kr() | 625990a3187af65679d2ab89 |
class GetRecentMediaForLocationResultSet(ResultSet): <NEW_LINE> <INDENT> def get_Response(self): <NEW_LINE> <INDENT> return self._output.get('Response', None) | Retrieve the value for the "Response" output from this choreography execution. ((json) The response from Instagram.) | 625990a3adb09d7d5dc0c49e |
class GetUplinkStatusCommand(CommandWithErrors): <NEW_LINE> <INDENT> name = "Retrieve fabric Uplink status" <NEW_LINE> result_type = str <NEW_LINE> def parse_response(self, out, err): <NEW_LINE> <INDENT> if err: <NEW_LINE> <INDENT> raise IpmiError(err) <NEW_LINE> <DEDENT> return str(out) <NEW_LINE> <DEDENT> ipmitool_args = ['cxoem', 'fabric', 'get', 'uplink_status'] | Describes the cxoem fabric get uplink_status IPMI command | 625990a350812a4eaa621b6a |
class Result(IntEnum): <NEW_LINE> <INDENT> SUCCESS = auto() <NEW_LINE> FAIL = auto() <NEW_LINE> UNDETERMINED = auto() | Packet parsing results. We start at undetermined
and move to either success or fail. | 625990a3187af65679d2ab8b |
class Emoji(RedditBase): <NEW_LINE> <INDENT> STR_FIELD = 'name' <NEW_LINE> def __eq__(self, other): <NEW_LINE> <INDENT> if isinstance(other, str): <NEW_LINE> <INDENT> return other == str(self) <NEW_LINE> <DEDENT> return (isinstance(other, self.__class__) and str(self) == str(other) and other.subreddit == self.subreddit) <NEW_LINE> <DEDENT> def __hash__(self): <NEW_LINE> <INDENT> return (hash(self.__class__.__name__) ^ hash(str(self)) ^ hash(self.subreddit)) <NEW_LINE> <DEDENT> def __init__(self, reddit, subreddit, name, _data=None): <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.subreddit = subreddit <NEW_LINE> super(Emoji, self).__init__(reddit, _data) <NEW_LINE> <DEDENT> def _fetch(self): <NEW_LINE> <INDENT> for emoji in self.subreddit.emoji: <NEW_LINE> <INDENT> if emoji.name == self.name: <NEW_LINE> <INDENT> self.__dict__.update(emoji.__dict__) <NEW_LINE> self._fetched = True <NEW_LINE> return <NEW_LINE> <DEDENT> <DEDENT> raise ClientException('/r/{} does not have the emoji {}' .format(self.subreddit, self.name)) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> url = API_PATH['emoji_delete'].format( emoji_name=self.name, subreddit=self.subreddit) <NEW_LINE> self._reddit.request('DELETE', url) | An individual Emoji object.
**Typical Attributes**
This table describes attributes that typically belong to objects of this
class. Since attributes are dynamically provided (see
:ref:`determine-available-attributes-of-an-object`), there is not a
guarantee that these attributes will always be present, nor is this list
necessarily comprehensive.
======================= ===================================================
Attribute Description
======================= ===================================================
``name`` The name of the emoji.
``url`` The URL of the emoji image.
======================= =================================================== | 625990a3adb09d7d5dc0c4a2 |
class RackspaceOnMetalDebianImage(OnMetalImage): <NEW_LINE> <INDENT> images = {"OnMetal - Debian Testing (Stretch)": {"minRam": 512, "minDisk": 20, "id": Image.image_id(), "OS-EXT-IMG-SIZE:size": Image.image_size()}, "OnMetal - Debian Unstable (Sid)": {"minRam": 512, "minDisk": 20, "id": Image.image_id(), "OS-EXT-IMG-SIZE:size": Image.image_size()}, "OnMetal - Debian 8 (Jessie)": {"minRam": 512, "minDisk": 20, "id": Image.image_id(), "OS-EXT-IMG-SIZE:size": Image.image_size()}, "OnMetal - Debian 7 (Wheezy)": {"minRam": 512, "minDisk": 20, "id": Image.image_id(), "OS-EXT-IMG-SIZE:size": Image.image_size()}} <NEW_LINE> def metadata_json(self): <NEW_LINE> <INDENT> return { "flavor_classes": "onmetal", "image_type": "base", "os_type": "linux", "org.openstack__1__os_distro": "org.debian", "vm_mode": "metal", "auto_disk_config": "disabled" } | A Rackspace OnMetal image object representation | 625990a4c4546d3d9def8240 |
class csr(ArrayConvertible, sp.csr_matrix): <NEW_LINE> <INDENT> pass | Wrapper for CSR matrices to be array-convertible. | 625990a4adb09d7d5dc0c4a4 |
class AccessSequence: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._accesses = OrderedDict() <NEW_LINE> <DEDENT> def add(self, timestamp, access): <NEW_LINE> <INDENT> self._accesses[timestamp] = access <NEW_LINE> <DEDENT> def get(self): <NEW_LINE> <INDENT> for t, a in self._accesses.items(): <NEW_LINE> <INDENT> yield t, a <NEW_LINE> <DEDENT> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self._accesses) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> out = "(" <NEW_LINE> for timestamp, access in self._accesses.items(): <NEW_LINE> <INDENT> out += "({} : {}, {}),".format(timestamp, access.type, access.address) <NEW_LINE> <DEDENT> return out + ")" | Stores a sequence of Access's by timestamp. | 625990a4c4546d3d9def8241 |
class Moment(MutableDate): <NEW_LINE> <INDENT> def __init__(self, *args): <NEW_LINE> <INDENT> if args: <NEW_LINE> <INDENT> date, formula = parse_date_and_formula(*args) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> date, formula = (None, None) <NEW_LINE> <DEDENT> self._date = date <NEW_LINE> self._formula = formula <NEW_LINE> <DEDENT> def now(self): <NEW_LINE> <INDENT> self._date = datetime.now() <NEW_LINE> self._formula = "%Y-%m-%d" <NEW_LINE> return self <NEW_LINE> <DEDENT> def utc(self, *args): <NEW_LINE> <INDENT> date, formula = parse_date_and_formula(*args) <NEW_LINE> self._date = pytz.timezone('UTC').localize(date) <NEW_LINE> self._formula = formula <NEW_LINE> return self <NEW_LINE> <DEDENT> def utcnow(self): <NEW_LINE> <INDENT> self._date = pytz.timezone('UTC').localize(datetime.utcnow()) <NEW_LINE> self._formula = "%Y-%m-%d" <NEW_LINE> return self <NEW_LINE> <DEDENT> def unix(self, timestamp, utc=False): <NEW_LINE> <INDENT> if utc: <NEW_LINE> <INDENT> fromtimestamp = datetime.utcfromtimestamp <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> fromtimestamp = datetime.fromtimestamp <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self._date = fromtimestamp(timestamp) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self._date = fromtimestamp(timestamp / 1000) <NEW_LINE> <DEDENT> self._formula = "%Y-%m-%d" <NEW_LINE> return self <NEW_LINE> <DEDENT> def locale(self, zone=None): <NEW_LINE> <INDENT> if not zone: <NEW_LINE> <INDENT> self._date = datetime.fromtimestamp(timegm(self._date.timetuple())) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> self._date = pytz.timezone(zone).localize(self._date) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> self._date = self._date.replace(tzinfo=pytz.timezone(zone)) <NEW_LINE> <DEDENT> <DEDENT> return self <NEW_LINE> <DEDENT> def timezone(self, zone): <NEW_LINE> <INDENT> date = self._date <NEW_LINE> try: <NEW_LINE> <INDENT> date = times.to_local(times.to_universal(date), zone) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> date = times.to_local(date, zone) <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> self._date = date <NEW_LINE> <DEDENT> return self <NEW_LINE> <DEDENT> def format(self, formula): <NEW_LINE> <INDENT> formula = parse_js_date(formula) <NEW_LINE> return self._date.strftime(formula) <NEW_LINE> <DEDENT> def strftime(self, formula): <NEW_LINE> <INDENT> return self._date.strftime(formula) <NEW_LINE> <DEDENT> def diff(self, moment, measurement=None): <NEW_LINE> <INDENT> return self - moment <NEW_LINE> <DEDENT> def done(self): <NEW_LINE> <INDENT> return self._date <NEW_LINE> <DEDENT> def clone(self): <NEW_LINE> <INDENT> c = Moment(self._date) <NEW_LINE> c._formula = self._formula <NEW_LINE> return c <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> return Moment(self._date) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> if self._date is not None: <NEW_LINE> <INDENT> return "<Moment(%s)>" % (self._date.strftime(self._formula)) <NEW_LINE> <DEDENT> return "<Moment>" <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> formatted = self._date.strftime('%Y-%m-%dT%H:%M:%S') <NEW_LINE> tz = str.format('{0:+06.2f}', -float(timezone) / 3600) <NEW_LINE> return formatted + tz | A class to abstract date difficulties. | 625990a4adb09d7d5dc0c4a6 |
class ErrorResponse(msrest.serialization.Model): <NEW_LINE> <INDENT> _validation = { 'error': {'required': True}, } <NEW_LINE> _attribute_map = { 'error': {'key': 'error', 'type': 'TextAnalyticsError'}, } <NEW_LINE> def __init__( self, **kwargs ): <NEW_LINE> <INDENT> super(ErrorResponse, self).__init__(**kwargs) <NEW_LINE> self.error = kwargs['error'] | ErrorResponse.
All required parameters must be populated in order to send to Azure.
:ivar error: Required. Document Error.
:vartype error: ~azure.ai.textanalytics.v3_2_preview_2.models.TextAnalyticsError | 625990a4d8ef3951e32c8e00 |
class CoralDocFile(): <NEW_LINE> <INDENT> def __init__(self, local_host, doc_dir, language, relative_path): <NEW_LINE> <INDENT> self.cdf_local_host = local_host <NEW_LINE> self.cdf_doc_dir = doc_dir <NEW_LINE> self.cdf_language = language <NEW_LINE> self.cdf_relative_path = relative_path <NEW_LINE> self._cdf_line_number = None <NEW_LINE> self.cdf_fpath = "%s/%s/%s" % (doc_dir, language, relative_path) <NEW_LINE> <DEDENT> def cdf_line_number(self, log): <NEW_LINE> <INDENT> if self._cdf_line_number is not None: <NEW_LINE> <INDENT> return self._cdf_line_number <NEW_LINE> <DEDENT> command = "cat %s | wc -l" % self.cdf_fpath <NEW_LINE> retval = self.cdf_local_host.sh_run(log, command) <NEW_LINE> if retval.cr_exit_status: <NEW_LINE> <INDENT> log.cl_error("failed to run command [%s] on host [%s], " "ret = [%d], stdout = [%s], stderr = [%s]", command, self.cdf_local_host.sh_hostname, retval.cr_exit_status, retval.cr_stdout, retval.cr_stderr) <NEW_LINE> return -1 <NEW_LINE> <DEDENT> line_number_str = retval.cr_stdout.strip() <NEW_LINE> try: <NEW_LINE> <INDENT> line_number = int(line_number_str) <NEW_LINE> <DEDENT> except: <NEW_LINE> <INDENT> log.cl_error("invalid output [%s] of command [%s] on host [%s]", retval.cr_stdout, command, self.cdf_local_host.sh_hostname) <NEW_LINE> return -1 <NEW_LINE> <DEDENT> self._cdf_line_number = line_number <NEW_LINE> return self._cdf_line_number | Each doc file has an object of this type | 625990a450812a4eaa621b6d |
class TestFaIr(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.fake = Faker('fa_IR') <NEW_LINE> Faker.seed(0) <NEW_LINE> <DEDENT> def test_color_name(self): <NEW_LINE> <INDENT> color_name = self.fake.color_name() <NEW_LINE> assert isinstance(color_name, str) <NEW_LINE> assert color_name in FaIrProvider.all_colors.keys() <NEW_LINE> <DEDENT> def test_safe_color_name(self): <NEW_LINE> <INDENT> safe_color_name = self.fake.safe_color_name() <NEW_LINE> assert isinstance(safe_color_name, str) <NEW_LINE> assert safe_color_name in FaIrProvider.safe_colors | Tests colors in the fa_IR locale | 625990a4c4546d3d9def8243 |
class StateCounter(object): <NEW_LINE> <INDENT> def __init__(self, fec, sec, rac, aac): <NEW_LINE> <INDENT> self.first_entry_count = fec <NEW_LINE> self.subsequent_entries_count = sec <NEW_LINE> self.resolved_answer_count = rac <NEW_LINE> self.active_answer_count = aac <NEW_LINE> <DEDENT> @property <NEW_LINE> def total_entry_count(self): <NEW_LINE> <INDENT> return self.first_entry_count + self.subsequent_entries_count <NEW_LINE> <DEDENT> @property <NEW_LINE> def no_answer_count(self): <NEW_LINE> <INDENT> return (self.first_entry_count + self.subsequent_entries_count - self.resolved_answer_count - self.active_answer_count) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def get(cls, exploration_id, state_id): <NEW_LINE> <INDENT> state_counter_model = stats_models.StateCounterModel.get_or_create( exploration_id, state_id) <NEW_LINE> return cls( state_counter_model.first_entry_count, state_counter_model.subsequent_entries_count, state_counter_model.resolved_answer_count, state_counter_model.active_answer_count ) | Domain object that keeps counts associated with states.
All methods and properties in this file should be independent of the
specific storage model used. | 625990a4adb09d7d5dc0c4ac |
class FireAnt(Ant): <NEW_LINE> <INDENT> name = 'Fire' <NEW_LINE> damage = 3 <NEW_LINE> food_cost = 5 <NEW_LINE> armor = 1 <NEW_LINE> implemented = True <NEW_LINE> def reduce_armor(self, amount): <NEW_LINE> <INDENT> self.armor -= amount <NEW_LINE> if self.armor <= 0: <NEW_LINE> <INDENT> copy = self.place.bees[:] <NEW_LINE> for i in copy: <NEW_LINE> <INDENT> Ant.reduce_armor(i, self.damage) <NEW_LINE> <DEDENT> self.place.remove_insect(self) | FireAnt cooks any Bee in its Place when it expires. | 625990a4091ae35668706b80 |
@tf_export("data.experimental.service.DispatcherConfig") <NEW_LINE> class DispatcherConfig( collections.namedtuple("DispatcherConfig", [ "port", "protocol", "work_dir", "fault_tolerant_mode", "job_gc_check_interval_ms", "job_gc_timeout_ms" ])): <NEW_LINE> <INDENT> def __new__(cls, port=0, protocol=None, work_dir=None, fault_tolerant_mode=False, job_gc_check_interval_ms=None, job_gc_timeout_ms=None): <NEW_LINE> <INDENT> if protocol is None: <NEW_LINE> <INDENT> protocol = _pywrap_utils.TF_DATA_DefaultProtocol() <NEW_LINE> <DEDENT> if job_gc_check_interval_ms is None: <NEW_LINE> <INDENT> job_gc_check_interval_ms = 10 * 60 * 1000 <NEW_LINE> <DEDENT> if job_gc_timeout_ms is None: <NEW_LINE> <INDENT> job_gc_timeout_ms = 5 * 60 * 1000 <NEW_LINE> <DEDENT> return super(DispatcherConfig, cls).__new__(cls, port, protocol, work_dir, fault_tolerant_mode, job_gc_check_interval_ms, job_gc_timeout_ms) | Configuration class for tf.data service dispatchers.
Fields:
port: Specifies the port to bind to. A value of 0 indicates that the server
may bind to any available port.
protocol: The protocol to use for communicating with the tf.data service,
e.g. "grpc".
work_dir: A directory to store dispatcher state in. This
argument is required for the dispatcher to be able to recover from
restarts.
fault_tolerant_mode: Whether the dispatcher should write its state to a
journal so that it can recover from restarts. Dispatcher state, including
registered datasets and created jobs, is synchronously written to the
journal before responding to RPCs. If `True`, `work_dir` must also be
specified.
job_gc_check_interval_ms: How often the dispatcher should scan through to
delete old and unused jobs, in milliseconds. If not set, the runtime will
select a reasonable default. A higher value will reduce load on the
dispatcher, while a lower value will reduce the time it takes for the
dispatcher to garbage collect expired jobs.
job_gc_timeout_ms: How long a job needs to be unused before it becomes a
candidate for garbage collection, in milliseconds. A value of -1 indicates
that jobs should never be garbage collected. If not set, the runtime will
select a reasonable default. A higher value will cause jobs to stay around
longer with no consumers. This is useful if there is a large gap in
time between when consumers read from the job. A lower value will reduce
the time it takes to reclaim the resources from expired jobs. | 625990a4187af65679d2ab91 |
class NumAudioBuses(InfoUGenBase): <NEW_LINE> <INDENT> __documentation_section__ = 'Info UGens' <NEW_LINE> __slots__ = () <NEW_LINE> def __init__( self, calculation_rate=None, ): <NEW_LINE> <INDENT> InfoUGenBase.__init__( self, calculation_rate=calculation_rate, ) | A number of audio buses info unit generator.
::
>>> ugentools.NumAudioBuses.ir()
NumAudioBuses.ir() | 625990a4adb09d7d5dc0c4b2 |
class GuiParameters(object): <NEW_LINE> <INDENT> def __init__(self, prefix, template, width, height, accent, boring, light, language, replace_images, replace_code, update_code, name=None): <NEW_LINE> <INDENT> self.prefix = prefix <NEW_LINE> self.template = template <NEW_LINE> self.width = width <NEW_LINE> self.height = height <NEW_LINE> self.scale = min(1.0 * width / WIDTH, 1.0 * height / HEIGHT) <NEW_LINE> self.accent_color = Color(accent) <NEW_LINE> self.boring_color = Color(boring) <NEW_LINE> self.light = light <NEW_LINE> if light: <NEW_LINE> <INDENT> self.hover_color = self.accent_color <NEW_LINE> self.muted_color = self.accent_color.tint(.6) <NEW_LINE> self.hover_muted_color = self.accent_color.tint(.4) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.hover_color = self.accent_color.tint(.6) <NEW_LINE> self.muted_color = self.accent_color.shade(.4) <NEW_LINE> self.hover_muted_color = self.accent_color.shade(.6) <NEW_LINE> <DEDENT> self.menu_color = self.accent_color.replace_hsv_saturation(.25).replace_value(.5) <NEW_LINE> self.title_color = self.accent_color.replace_hsv_saturation(.5).replace_value(1.0) <NEW_LINE> if light: <NEW_LINE> <INDENT> self.selected_color = Color("#555555") <NEW_LINE> self.idle_color = Color("#aaaaaa") <NEW_LINE> self.idle_small_color = Color("#888888") <NEW_LINE> self.text_color = Color("#404040") <NEW_LINE> self.choice_color = Color("#cccccc") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.selected_color = Color("#ffffff") <NEW_LINE> self.idle_color = Color("#888888") <NEW_LINE> self.idle_small_color = Color("#aaaaaa") <NEW_LINE> self.text_color = Color("#ffffff") <NEW_LINE> self.choice_color = Color("#cccccc") <NEW_LINE> <DEDENT> self.insensitive_color = self.idle_color.replace_opacity(.5) <NEW_LINE> self.language = language <NEW_LINE> if replace_code: <NEW_LINE> <INDENT> update_code = True <NEW_LINE> <DEDENT> self.replace_images = replace_images <NEW_LINE> self.replace_code = replace_code <NEW_LINE> self.update_code = update_code <NEW_LINE> self.skip_backup = False <NEW_LINE> name = name or '' <NEW_LINE> self.name = name <NEW_LINE> GOOD_CHARACTERS = "0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ-_" <NEW_LINE> simple_name = "".join(i for i in name if i in GOOD_CHARACTERS) <NEW_LINE> if not simple_name: <NEW_LINE> <INDENT> simple_name = "game" <NEW_LINE> <DEDENT> self.simple_name = simple_name <NEW_LINE> self.savedir = self.simple_name + "-" + str(int(time.time())) | This represents the parameters to the gui. This is used to initialize
the ImageGenerator and CodeGenerator objects to a consistent set of
parameters. | 625990a450812a4eaa621b73 |
class ConfigWriter(object): <NEW_LINE> <INDENT> def filenames(self, controllers): <NEW_LINE> <INDENT> filenames = ['clouds.yaml', 'credentials.yaml'] <NEW_LINE> for controller in controllers: <NEW_LINE> <INDENT> filename = "bootstrap-{}.yaml".format(controller.name) <NEW_LINE> filenames.append(filename) <NEW_LINE> <DEDENT> return filenames <NEW_LINE> <DEDENT> def write(self, controllers, cfgdir): <NEW_LINE> <INDENT> configs = self._as_dicts(controllers) <NEW_LINE> clouds_filename = os.path.join(cfgdir, "clouds.yaml") <NEW_LINE> with open(clouds_filename, "w") as fd: <NEW_LINE> <INDENT> config = {"clouds": configs["clouds"]} <NEW_LINE> yaml.safe_dump(config, fd) <NEW_LINE> <DEDENT> credentials_filename = os.path.join(cfgdir, "credentials.yaml") <NEW_LINE> with open(credentials_filename, "w") as fd: <NEW_LINE> <INDENT> config = {"credentials": configs["credentials"]} <NEW_LINE> yaml.safe_dump(config, fd) <NEW_LINE> <DEDENT> bootstrap_filenames = {} <NEW_LINE> for name, config in configs["bootstrap"].items(): <NEW_LINE> <INDENT> filename = "bootstrap-{}.yaml".format(name) <NEW_LINE> bootstrap_filename = os.path.join(cfgdir, filename) <NEW_LINE> with open(bootstrap_filename, "w") as fd: <NEW_LINE> <INDENT> yaml.safe_dump(config, fd) <NEW_LINE> <DEDENT> bootstrap_filenames[name] = bootstrap_filename <NEW_LINE> <DEDENT> return bootstrap_filenames <NEW_LINE> <DEDENT> def _as_dicts(self, controllers): <NEW_LINE> <INDENT> configs = { "clouds": {}, "credentials": {}, "bootstrap": {}, } <NEW_LINE> if not controllers: <NEW_LINE> <INDENT> return configs <NEW_LINE> <DEDENT> clouds = configs["clouds"] = {} <NEW_LINE> credentials = configs["credentials"] = {} <NEW_LINE> bootstrap = configs["bootstrap"] = {} <NEW_LINE> for controller in controllers: <NEW_LINE> <INDENT> self._update_clouds(controller, clouds) <NEW_LINE> self._update_credentials(controller, credentials) <NEW_LINE> self._update_bootstrap(controller, bootstrap) <NEW_LINE> <DEDENT> return configs <NEW_LINE> <DEDENT> def _update_clouds(self, controller, clouds): <NEW_LINE> <INDENT> cloud = controller.cloud <NEW_LINE> if cloud.name in clouds: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> config = { "type": cloud.driver, } <NEW_LINE> if cloud.endpoint: <NEW_LINE> <INDENT> config["endpoint"] = str(cloud.endpoint) <NEW_LINE> <DEDENT> clouds[cloud.name] = config <NEW_LINE> <DEDENT> <DEDENT> def _update_credentials(self, controller, credentials): <NEW_LINE> <INDENT> if controller.cloud.credentials: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> <DEDENT> def _update_bootstrap(self, controller, bootstraps): <NEW_LINE> <INDENT> if controller.name in bootstraps: <NEW_LINE> <INDENT> raise NotImplementedError <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> bootstrap = controller.bootstrap <NEW_LINE> config = {} <NEW_LINE> if bootstrap.default_series: <NEW_LINE> <INDENT> config["default-series"] = bootstrap.default_series <NEW_LINE> <DEDENT> if bootstrap.admin_secret: <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> bootstraps[controller.name] = config | The JujuConfig writer specific to Juju 2.x. | 625990a4c4546d3d9def824c |
class AppReleaseViewSet(BaseAppViewSet): <NEW_LINE> <INDENT> model = models.Release <NEW_LINE> serializer_class = serializers.ReleaseSerializer <NEW_LINE> def get_object(self, *args, **kwargs): <NEW_LINE> <INDENT> return self.get_queryset(**kwargs).get(version=self.kwargs['version']) <NEW_LINE> <DEDENT> def rollback(self, request, *args, **kwargs): <NEW_LINE> <INDENT> app = get_object_or_404(models.App, id=self.kwargs['id']) <NEW_LINE> release = app.release_set.latest() <NEW_LINE> last_version = release.version <NEW_LINE> version = int(request.DATA.get('version', last_version - 1)) <NEW_LINE> if version < 1: <NEW_LINE> <INDENT> return Response(status=status.HTTP_404_NOT_FOUND) <NEW_LINE> <DEDENT> summary = "{} rolled back to v{}".format(request.user, version) <NEW_LINE> prev = app.release_set.get(version=version) <NEW_LINE> new_release = release.new( request.user, build=prev.build, config=prev.config, summary=summary, source_version='v{}'.format(version)) <NEW_LINE> app.deploy(new_release) <NEW_LINE> response = {'version': new_release.version} <NEW_LINE> return Response(response, status=status.HTTP_201_CREATED) | RESTful views for :class:`~api.models.Release`. | 625990a4187af65679d2ab9a |
class RedisKeySchemaTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> self.test_db = redis.StrictRedis() <NEW_LINE> if test_db.dbsize() > 0: <NEW_LINE> <INDENT> raise ValueError("Redis Test Instance must be empty") <NEW_LINE> <DEDENT> <DEDENT> def tearDown(self): <NEW_LINE> <INDENT> self.test_db.flushall() | This class is an example of testing a Redis Key Schema in Python | 625990a4c4546d3d9def824f |
class DataParallelWithCallback(DataParallel): <NEW_LINE> <INDENT> def __init__(self, module, device_ids=None, output_device=None, dim=0, chunk_size=None): <NEW_LINE> <INDENT> super(DataParallelWithCallback, self).__init__(module) <NEW_LINE> if not torch.cuda.is_available(): <NEW_LINE> <INDENT> self.module = module <NEW_LINE> self.device_ids = [] <NEW_LINE> return <NEW_LINE> <DEDENT> if device_ids is None: <NEW_LINE> <INDENT> device_ids = list(range(torch.cuda.device_count())) <NEW_LINE> <DEDENT> if output_device is None: <NEW_LINE> <INDENT> output_device = device_ids[0] <NEW_LINE> <DEDENT> self.dim = dim <NEW_LINE> self.module = module <NEW_LINE> self.device_ids = device_ids <NEW_LINE> self.output_device = output_device <NEW_LINE> self.chunk_size = chunk_size <NEW_LINE> if len(self.device_ids) == 1: <NEW_LINE> <INDENT> self.module.cuda(device_ids[0]) <NEW_LINE> <DEDENT> <DEDENT> def forward(self, *inputs, **kwargs): <NEW_LINE> <INDENT> if not self.device_ids: <NEW_LINE> <INDENT> return self.module(*inputs, **kwargs) <NEW_LINE> <DEDENT> inputs, kwargs = self.scatter(inputs, kwargs, self.device_ids, self.chunk_size) <NEW_LINE> if len(self.device_ids) == 1: <NEW_LINE> <INDENT> return self.module(*inputs[0], **kwargs[0]) <NEW_LINE> <DEDENT> replicas = self.replicate(self.module, self.device_ids[:len(inputs)]) <NEW_LINE> outputs = self.parallel_apply(replicas, inputs, kwargs) <NEW_LINE> return self.gather(outputs, self.output_device) <NEW_LINE> <DEDENT> def scatter(self, inputs, kwargs, device_ids, chunk_size): <NEW_LINE> <INDENT> return scatter_kwargs(inputs, kwargs, device_ids, dim=self.dim, chunk_size=self.chunk_size) <NEW_LINE> <DEDENT> def replicate(self, module, device_ids): <NEW_LINE> <INDENT> modules = super(DataParallelWithCallback, self).replicate(module, device_ids) <NEW_LINE> execute_replication_callbacks(modules) <NEW_LINE> return modules | Data Parallel with a replication callback.
An replication callback `__data_parallel_replicate__` of each module will be invoked after being created by
original `replicate` function.
The callback will be invoked with arguments `__data_parallel_replicate__(ctx, copy_id)`
Examples:
> sync_bn = SynchronizedBatchNorm1d(10, eps=1e-5, affine=False)
> sync_bn = DataParallelWithCallback(sync_bn, device_ids=[0, 1])
# sync_bn.__data_parallel_replicate__ will be invoked. | 625990a4adb09d7d5dc0c4c2 |
class BaseBackend: <NEW_LINE> <INDENT> __metaclass__ = ABCMeta <NEW_LINE> @abstractproperty <NEW_LINE> def is_persistent(self): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> @abstractproperty <NEW_LINE> def is_existing(self): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def load(self): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def save(self, data): <NEW_LINE> <INDENT> return NotImplemented <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return "<{}>".format(self.__class__.__name__) | abstract class that should be implemented by all
cache storage backends | 625990a5187af65679d2ab9f |
class NetD(nn.Module): <NEW_LINE> <INDENT> def __init__(self, opt): <NEW_LINE> <INDENT> super(NetD, self).__init__() <NEW_LINE> ndf = opt.ndf <NEW_LINE> self.main = nn.Sequential( nn.Conv2d(3, ndf, 5, 3, 1, bias=False), nn.LeakyReLU(0.2, inplace=True), nn.Conv2d(ndf, ndf * 2, 4, 2, 1, bias=False), nn.BatchNorm2d(ndf * 2), nn.LeakyReLU(0.2, inplace=True), nn.Conv2d(ndf * 2, ndf * 4, 4, 2, 1, bias=False), nn.BatchNorm2d(ndf * 4), nn.LeakyReLU(0.2, inplace=True), nn.Conv2d(ndf * 4, ndf * 8, 4, 2, 1, bias=False), nn.BatchNorm2d(ndf * 8), nn.LeakyReLU(0.2, inplace=True), nn.Conv2d(ndf * 8, 1, 4, 1, 0, bias=False), nn.Sigmoid() ) <NEW_LINE> <DEDENT> def forward(self, input): <NEW_LINE> <INDENT> return self.main(input).view(-1) | 判别器定义 | 625990a550812a4eaa621b7f |
class AlphaBetaAgent(MultiAgentSearchAgent): <NEW_LINE> <INDENT> def getAction(self, gameState): <NEW_LINE> <INDENT> res = self.value(gameState, 0, 0, None, None) <NEW_LINE> return res[1] <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def max_int_action(*args): <NEW_LINE> <INDENT> valid_args = [x for x in args if x is not None] <NEW_LINE> if not valid_args: <NEW_LINE> <INDENT> return None, None <NEW_LINE> <DEDENT> return max(valid_args) <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def min_int_action(*args): <NEW_LINE> <INDENT> valid_args = [x for x in args if x is not None] <NEW_LINE> if not valid_args: <NEW_LINE> <INDENT> return None, None <NEW_LINE> <DEDENT> return min(valid_args) <NEW_LINE> <DEDENT> def value(self, gameState, depth, agentIndex, alpha, beta): <NEW_LINE> <INDENT> if agentIndex == gameState.getNumAgents(): <NEW_LINE> <INDENT> depth += 1 <NEW_LINE> <DEDENT> if depth == self.depth or gameState.isWin() or gameState.isLose(): <NEW_LINE> <INDENT> return self.evaluationFunction(gameState), None <NEW_LINE> <DEDENT> if agentIndex == gameState.getNumAgents(): <NEW_LINE> <INDENT> agentIndex = 0 <NEW_LINE> <DEDENT> if self.is_max_agent(agentIndex): <NEW_LINE> <INDENT> return self.max_value(gameState, depth, agentIndex, alpha, beta) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.min_value(gameState, depth, agentIndex, alpha, beta) <NEW_LINE> <DEDENT> <DEDENT> def max_value(self, gameState, depth, agentIndex, alpha, beta): <NEW_LINE> <INDENT> v = (None, None) <NEW_LINE> for action in gameState.getLegalActions(agentIndex): <NEW_LINE> <INDENT> new_state_value = self.value(gameState.generateSuccessor(agentIndex, action), depth, agentIndex + 1, alpha, beta)[0] <NEW_LINE> v = self.max_action(v, (new_state_value, action)) <NEW_LINE> alpha = self.max_int_action(alpha, v[0]) <NEW_LINE> if beta is not None and alpha is not None and alpha > beta: <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> <DEDENT> return v <NEW_LINE> <DEDENT> def min_value(self, gameState, depth, agentIndex, alpha, beta): <NEW_LINE> <INDENT> v = (None, None) <NEW_LINE> for action in gameState.getLegalActions(agentIndex): <NEW_LINE> <INDENT> new_state_value = self.value(gameState.generateSuccessor(agentIndex, action), depth, agentIndex + 1, alpha, beta)[0] <NEW_LINE> v = self.min_action(v, (new_state_value, action)) <NEW_LINE> beta = self.min_int_action(beta, v[0]) <NEW_LINE> if beta is not None and alpha is not None and alpha > beta: <NEW_LINE> <INDENT> return v <NEW_LINE> <DEDENT> <DEDENT> return v | Your minimax agent with alpha-beta pruning (question 3) | 625990a5c4546d3d9def8256 |
class AiAnalysisTaskClassificationInput(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.Definition = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.Definition = params.get("Definition") | 智能分类任务输入类型
| 625990a5c4546d3d9def8257 |
class Pomkl(Pompi, IntelMKL, IntelFFTW): <NEW_LINE> <INDENT> NAME = 'pomkl' <NEW_LINE> SUBTOOLCHAIN = [Pompi.NAME, Pmkl.NAME] | Compiler toolchain with PGI compilers, OpenMPI,
Intel Math Kernel Library (MKL) and Intel FFTW wrappers. | 625990a5c4546d3d9def8259 |
class PeerList(object): <NEW_LINE> <INDENT> __slots__ = [ "peers", "pids" ] <NEW_LINE> def __init__(self, *args): <NEW_LINE> <INDENT> self.peers = [] <NEW_LINE> for row in args: <NEW_LINE> <INDENT> self.append(row) <NEW_LINE> <DEDENT> <DEDENT> def append(self, peer, status='DONE', score=None): <NEW_LINE> <INDENT> self.peers.append((peer, status, score)) <NEW_LINE> <DEDENT> def merge_single(self, new_peer, new_status='DONE', new_score=1.0): <NEW_LINE> <INDENT> new_peer_list = PeerList() <NEW_LINE> found = False <NEW_LINE> for (peer, status, score) in self.peers: <NEW_LINE> <INDENT> if peer.pid == new_peer.pid: <NEW_LINE> <INDENT> found = True <NEW_LINE> if peer.older_than(new_peer): <NEW_LINE> <INDENT> peer = new_peer <NEW_LINE> <DEDENT> if score < new_score: <NEW_LINE> <INDENT> score = new_score <NEW_LINE> <DEDENT> if status == 'TODO' and new_status != 'TODO': <NEW_LINE> <INDENT> status = new_status <NEW_LINE> <DEDENT> <DEDENT> new_peer_list.append(peer, status, score) <NEW_LINE> <DEDENT> if not found: <NEW_LINE> <INDENT> new_peer_list.append(new_peer, new_status, new_score) <NEW_LINE> <DEDENT> self.peers = new_peer_list.peers <NEW_LINE> <DEDENT> def merge(self, peer_list): <NEW_LINE> <INDENT> for (peer, status, score) in peer_list: <NEW_LINE> <INDENT> self.merge_single(peer, status, score) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, k): <NEW_LINE> <INDENT> return self.peers.__getitem__(k) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self.peers.__iter__() <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.peers) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> result = "" <NEW_LINE> count = 0 <NEW_LINE> for (peer, status, score) in self.peers: <NEW_LINE> <INDENT> if count < 250: <NEW_LINE> <INDENT> count += 1 <NEW_LINE> if result != "": <NEW_LINE> <INDENT> result += ", " <NEW_LINE> <DEDENT> result += repr(peer) <NEW_LINE> <DEDENT> <DEDENT> return "PeerList(" + result + ")" | A PeerList is a ranked list of Snippet objects.
| 625990a5adb09d7d5dc0c4d6 |
class NvencAdvancedSettingsSignal: <NEW_LINE> <INDENT> def __init__(self, nvenc_handlers, inputs_page_handlers): <NEW_LINE> <INDENT> self.nvenc_handlers = nvenc_handlers <NEW_LINE> self.inputs_page_handlers = inputs_page_handlers <NEW_LINE> <DEDENT> def on_nvenc_advanced_settings_switch_state_set(self, nvenc_advanced_settings_switch, user_data=None): <NEW_LINE> <INDENT> advanced_settings_enabled = nvenc_advanced_settings_switch.get_active() <NEW_LINE> self.nvenc_handlers.set_advanced_settings_state(advanced_settings_enabled) <NEW_LINE> self.nvenc_handlers.update_qp_from_advanced_settings() <NEW_LINE> if self.nvenc_handlers.is_widgets_setting_up: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> threading.Thread(target=self.nvenc_handlers.update_settings, args=()).start() | Handles the signal emitted when NVENC advanced settings are toggled. | 625990a5187af65679d2aba5 |
class Config(MutableMapping): <NEW_LINE> <INDENT> def __init__(self, filename, makedirs=True): <NEW_LINE> <INDENT> self._filename = filename <NEW_LINE> if makedirs: <NEW_LINE> <INDENT> os.makedirs(os.path.dirname(self._filename), exist_ok=True) <NEW_LINE> <DEDENT> if os.path.exists(self._filename): <NEW_LINE> <INDENT> with open(self._filename) as s: <NEW_LINE> <INDENT> self._data = yaml.safe_load(s) <NEW_LINE> <DEDENT> if self._data is None: <NEW_LINE> <INDENT> self._data = {} <NEW_LINE> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> self._data = {} <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, item): <NEW_LINE> <INDENT> return self._data[item] <NEW_LINE> <DEDENT> def __setitem__(self, key, value): <NEW_LINE> <INDENT> self._data[key] = value <NEW_LINE> with open(self._filename, 'wt') as s: <NEW_LINE> <INDENT> yaml.safe_dump(self._data, s) <NEW_LINE> <DEDENT> <DEDENT> def __delitem__(self, key): <NEW_LINE> <INDENT> del self._data[key] <NEW_LINE> with open(self._filename, 'wt') as s: <NEW_LINE> <INDENT> yaml.safe_dump(self._data, s) <NEW_LINE> <DEDENT> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return self._data.__iter__() <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return self._data.__len__() <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return f"<emat.Config {self._filename}>\n"+self._data.__repr__() <NEW_LINE> <DEDENT> def get_subdir(self, key, *subdirs, makedirs=True, normpath=True): <NEW_LINE> <INDENT> directory = self.get(key, None) <NEW_LINE> if directory is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> directory = os.path.join(directory, *subdirs) <NEW_LINE> if normpath: <NEW_LINE> <INDENT> directory = os.path.normpath(directory) <NEW_LINE> <DEDENT> if makedirs: <NEW_LINE> <INDENT> os.makedirs(directory, exist_ok=True) <NEW_LINE> <DEDENT> return directory | A configuration dictionary-like object.
Args:
filename (str): Initial values for this dictionary are
loaded from this file using `yaml.safe_load`. Changes
to this dictionary are immediately written to disk in
the same file.
makedirs (bool, default True): If true, any intermediate
directories are created as needed. | 625990a5c4546d3d9def825a |
class Universe(object): <NEW_LINE> <INDENT> LTP = Ola_pb2.LTP <NEW_LINE> HTP = Ola_pb2.HTP <NEW_LINE> def __init__(self, universe_id, name, merge_mode, input_ports, output_ports): <NEW_LINE> <INDENT> self._id = universe_id <NEW_LINE> self._name = name <NEW_LINE> self._merge_mode = merge_mode <NEW_LINE> self._input_ports = sorted(input_ports) <NEW_LINE> self._output_ports = sorted(output_ports) <NEW_LINE> <DEDENT> @property <NEW_LINE> def id(self): <NEW_LINE> <INDENT> return self._id <NEW_LINE> <DEDENT> @property <NEW_LINE> def name(self): <NEW_LINE> <INDENT> return self._name <NEW_LINE> <DEDENT> @property <NEW_LINE> def merge_mode(self): <NEW_LINE> <INDENT> return self._merge_mode <NEW_LINE> <DEDENT> @property <NEW_LINE> def input_ports(self): <NEW_LINE> <INDENT> return self._input_ports <NEW_LINE> <DEDENT> @property <NEW_LINE> def output_ports(self): <NEW_LINE> <INDENT> return self._output_ports <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def FromProtobuf(universe_pb): <NEW_LINE> <INDENT> input_ports = [Port.FromProtobuf(x) for x in universe_pb.input_ports] <NEW_LINE> output_ports = [Port.FromProtobuf(x) for x in universe_pb.output_ports] <NEW_LINE> return Universe(universe_pb.universe, universe_pb.name, universe_pb.merge_mode, input_ports, output_ports) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> merge_mode = 'LTP' if self.merge_mode == Universe.LTP else 'HTP' <NEW_LINE> s = 'Universe(id={id}, name="{name}", merge_mode={merge_mode})' <NEW_LINE> return s.format(id=self.id, name=self.name, merge_mode=merge_mode) <NEW_LINE> <DEDENT> def __lt__(self, other): <NEW_LINE> <INDENT> return self.id < other.id <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.id == other.id <NEW_LINE> <DEDENT> def __le__(self, other): <NEW_LINE> <INDENT> return self.id <= other.id <NEW_LINE> <DEDENT> def __gt__(self, other): <NEW_LINE> <INDENT> return self.id > other.id <NEW_LINE> <DEDENT> def __ge__(self, other): <NEW_LINE> <INDENT> return self.id >= other.id <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return self.id != other.id | Represents a universe.
Attributes:
id: the integer universe id
name: the name of this universe
merge_mode: the merge mode this universe is using | 625990a550812a4eaa621b8a |
class BaseHTTPResponse: <NEW_LINE> <INDENT> __slots__ = ( "asgi", "body", "content_type", "stream", "status", "headers", "_cookies", ) <NEW_LINE> _dumps = json_dumps <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> self.asgi: bool = False <NEW_LINE> self.body: Optional[bytes] = None <NEW_LINE> self.content_type: Optional[str] = None <NEW_LINE> self.stream: Optional[Union[Http, ASGIApp]] = None <NEW_LINE> self.status: int = None <NEW_LINE> self.headers = Header({}) <NEW_LINE> self._cookies: Optional[CookieJar] = None <NEW_LINE> <DEDENT> def _encode_body(self, data: Optional[AnyStr]): <NEW_LINE> <INDENT> if data is None: <NEW_LINE> <INDENT> return b"" <NEW_LINE> <DEDENT> return ( data.encode() if hasattr(data, "encode") else data ) <NEW_LINE> <DEDENT> @property <NEW_LINE> def cookies(self) -> CookieJar: <NEW_LINE> <INDENT> if self._cookies is None: <NEW_LINE> <INDENT> self._cookies = CookieJar(self.headers) <NEW_LINE> <DEDENT> return self._cookies <NEW_LINE> <DEDENT> @property <NEW_LINE> def processed_headers(self) -> Iterator[Tuple[bytes, bytes]]: <NEW_LINE> <INDENT> if self.status in (304, 412): <NEW_LINE> <INDENT> self.headers = remove_entity_headers(self.headers) <NEW_LINE> <DEDENT> if has_message_body(self.status): <NEW_LINE> <INDENT> self.headers.setdefault("content-type", self.content_type) <NEW_LINE> <DEDENT> return ( (name.encode("ascii"), f"{value}".encode(errors="surrogateescape")) for name, value in self.headers.items() ) <NEW_LINE> <DEDENT> async def send( self, data: Optional[AnyStr] = None, end_stream: Optional[bool] = None, ) -> None: <NEW_LINE> <INDENT> if data is None and end_stream is None: <NEW_LINE> <INDENT> end_stream = True <NEW_LINE> <DEDENT> if self.stream is None: <NEW_LINE> <INDENT> raise SanicException( "No stream is connected to the response object instance." ) <NEW_LINE> <DEDENT> if self.stream.send is None: <NEW_LINE> <INDENT> if end_stream and not data: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> raise ServerError( "Response stream was ended, no more response data is " "allowed to be sent." ) <NEW_LINE> <DEDENT> data = ( data.encode() if hasattr(data, "encode") else data or b"" ) <NEW_LINE> await self.stream.send(data, end_stream=end_stream) | The base class for all HTTP Responses | 625990a5187af65679d2abab |
class Detail(View): <NEW_LINE> <INDENT> def get(self, request, pk): <NEW_LINE> <INDENT> common_return_dict = utils.refresh(request) <NEW_LINE> article = get_object_or_404(Blog, id=pk) <NEW_LINE> if article.password != utils.PARAS().BLOG_DEFAULT_PASSWORD: <NEW_LINE> <INDENT> if not request.GET.get('psd') or request.GET.get('psd') != article.password: <NEW_LINE> <INDENT> return HttpResponse("阿欧~密码不正确咩~") <NEW_LINE> <DEDENT> <DEDENT> user_image_list = Image.objects.filter(type_of_user_icon=True) <NEW_LINE> comment_list = Comment.objects.filter(Q(blog=article), Q(display=True)) <NEW_LINE> group_comment_dict = utils.group_comment(comment_list) <NEW_LINE> common_return_dict['article'] = article <NEW_LINE> common_return_dict['group_comment_dict'] = group_comment_dict <NEW_LINE> common_return_dict['image_list'] = [[i % 10 if i != 0 else 1, img] for i, img in enumerate(user_image_list)] <NEW_LINE> common_return_dict['category'] = article.category.name <NEW_LINE> common_return_dict['random_image'] = choice(user_image_list) <NEW_LINE> common_return_dict['default_password'] = utils.PARAS().BLOG_DEFAULT_PASSWORD <NEW_LINE> article.read_num += 1 <NEW_LINE> article.save() <NEW_LINE> return render(request, 'blog/detail.html', common_return_dict) <NEW_LINE> <DEDENT> def post(self, request, pk): <NEW_LINE> <INDENT> article = get_object_or_404(Blog, id=pk) <NEW_LINE> if request.POST['type'] == utils.PARAS().TYPE_FOR_COMMENT: <NEW_LINE> <INDENT> response = utils.record_comment(request, article) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> response = HttpResponse(utils.PARAS().UNKNOWN_RETURN_INFO) <NEW_LINE> <DEDENT> return response | 博客详情页 | 625990a550812a4eaa621b8b |
class SugarConfigurationException(SugarException): <NEW_LINE> <INDENT> __prefix__ = "Configuration error" | Used when configuration exception occurs (wrong or not found). | 625990a5187af65679d2abac |
class TournamentWebsocketMixin(TournamentFromUrlMixin): <NEW_LINE> <INDENT> group_prefix = None <NEW_LINE> def get_url_kwargs(self): <NEW_LINE> <INDENT> return self.scope["url_route"]["kwargs"] <NEW_LINE> <DEDENT> def group_name(self): <NEW_LINE> <INDENT> if self.group_prefix is None: <NEW_LINE> <INDENT> raise ImproperlyConfigured("group_prefix must be specified on subclasses of TournamentWebsocketMixin") <NEW_LINE> <DEDENT> return self.group_prefix + '_' + self.tournament.slug <NEW_LINE> <DEDENT> def send_error(self, error, message, original_content): <NEW_LINE> <INDENT> self.send_json({ 'error': force_str(error), 'message': force_str(message), 'original_content': original_content, 'component_id': original_content['component_id'], }) <NEW_LINE> return super() <NEW_LINE> <DEDENT> def connect(self): <NEW_LINE> <INDENT> async_to_sync(self.channel_layer.group_add)( self.group_name(), self.channel_name, ) <NEW_LINE> super().connect() <NEW_LINE> <DEDENT> def disconnect(self, message): <NEW_LINE> <INDENT> async_to_sync(self.channel_layer.group_discard)( self.group_name(), self.channel_name, ) <NEW_LINE> super().disconnect(message) | Mixin for websocket consumers that listen for changes relating to a
particular tournament, as specified in the URL.
Subclasses must provide a `group_prefix` that serves as a name for the
stream; the name of the group is a concatenation of this and the tournament
slug. | 625990a6c4546d3d9def8262 |
class UserUrl(MainHandler): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> if not self.user: <NEW_LINE> <INDENT> self.write_json({'error':'not logged in'}) <NEW_LINE> return <NEW_LINE> <DEDENT> s = self.request.get('s') <NEW_LINE> if s.isdigit():s=int(s) <NEW_LINE> else:s=0 <NEW_LINE> n = self.request.get('n') <NEW_LINE> if n.isdigit():n=int(n) <NEW_LINE> else:n=10 <NEW_LINE> urls = Url.by_user(self.local_user.key.id(),s=s,n=n) <NEW_LINE> if urls: <NEW_LINE> <INDENT> urls_json = gql_json_parser(urls) <NEW_LINE> self.write_json(urls_json) <NEW_LINE> return <NEW_LINE> <DEDENT> self.write_json({'no_urls':True}) | get user stored URLs | 625990a6adb09d7d5dc0c4ec |
class Map(DaeObject): <NEW_LINE> <INDENT> def __init__(self, sampler, texcoord, xmlnode=None): <NEW_LINE> <INDENT> self.sampler = sampler <NEW_LINE> self.texcoord = texcoord <NEW_LINE> if xmlnode != None: <NEW_LINE> <INDENT> self.xmlnode = xmlnode <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.xmlnode = E.texture(texture=self.sampler.id, texcoord=self.texcoord) <NEW_LINE> <DEDENT> <DEDENT> @staticmethod <NEW_LINE> def load( collada, localscope, node ): <NEW_LINE> <INDENT> samplerid = node.get('texture') <NEW_LINE> texcoord = node.get('texcoord') <NEW_LINE> sampler = localscope.get(samplerid) <NEW_LINE> if sampler is None: <NEW_LINE> <INDENT> for s2d in localscope.values(): <NEW_LINE> <INDENT> if type(s2d) is Sampler2D: <NEW_LINE> <INDENT> if s2d.surface.image.id == samplerid: <NEW_LINE> <INDENT> sampler = s2d <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> <DEDENT> if sampler is None or type(sampler) != Sampler2D: <NEW_LINE> <INDENT> err = DaeMissingSampler2D('Missing sampler ' + samplerid + ' in node ' + node.tag) <NEW_LINE> err.samplerid = samplerid <NEW_LINE> raise err <NEW_LINE> <DEDENT> return Map(sampler, texcoord, xmlnode = node) <NEW_LINE> <DEDENT> def save(self): <NEW_LINE> <INDENT> self.xmlnode.set('texture', self.sampler.id) <NEW_LINE> self.xmlnode.set('texcoord', self.texcoord) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '<Map sampler=%s texcoord=%s>' % (self.sampler.id, self.texcoord) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return str(self) | Class containing data coming from <texture> tag inside material.
When a material defines its properties like `diffuse`, it can give you
a color or a texture. In the latter, the texture is mapped with a
sampler and a texture coordinate channel. If a material defined a texture
for one of its properties, you'll find an object of this class in the
corresponding attribute. | 625990a6091ae35668706bc0 |
class XNATCopyInputSpec(CommandLineInputSpec): <NEW_LINE> <INDENT> project = traits.Str(mandatory=True, desc='The XNAT project id') <NEW_LINE> subject = traits.Str(mandatory=True, desc='The XNAT subject name') <NEW_LINE> session = traits.Str(mandatory=True, argstr='%s', position=-2, desc='The XNAT session name') <NEW_LINE> reconstruction = traits.Str(desc='The XNAT reconstruction name') <NEW_LINE> assessor = traits.Str(desc='The XNAT assessor name') <NEW_LINE> resource = traits.Str(desc='The XNAT resource name (scan default is NIFTI)') <NEW_LINE> inout = traits.Str(desc='The XNAT reconstruction or assessor resource' ' in/out qualifier') <NEW_LINE> scan = traits.Either(traits.Int, traits.Str, desc='The XNAT scan name') <NEW_LINE> in_files = InputMultiPath(File(exists=True), argstr='%s', position=-3, desc='The files to upload') <NEW_LINE> modality = traits.Str(argstr='--modality %s', desc="The XNAT scan modality, e.g. 'MR'") <NEW_LINE> force = traits.Bool(argstr='--force', desc='Flag indicating whether to replace an existing' ' XNAT file') <NEW_LINE> skip_existing = traits.Bool(argstr='--skip-existing', desc='Flag indicating whether to skip upload' ' to an existing target XNAT file') <NEW_LINE> dest = traits.Str(argstr='%s', position=-1, desc='The download directory') | The input spec with arguments in the following order:
* options
* the input files, for an upload
* the XNAT object path
* the destination directory, for a download | 625990a6187af65679d2abb0 |
class RealeseCommand(Command): <NEW_LINE> <INDENT> description = 'Release the package.' <NEW_LINE> user_options = [] <NEW_LINE> @staticmethod <NEW_LINE> def status(s): <NEW_LINE> <INDENT> print('\033[1m{0}\033[0m'.format(s)) <NEW_LINE> <DEDENT> def initialize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def finalize_options(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def run(self): <NEW_LINE> <INDENT> self.status('Release git tags…') <NEW_LINE> os.system('git tag v{0}'.format(about['__version__'])) <NEW_LINE> os.system('git push --tags') <NEW_LINE> sys.exit() | Support setup.py upload. | 625990a650812a4eaa621b90 |
class HybridServicesAdminNegativeTestJSON(ServicesAdminNegativeTest.ServicesAdminNegativeTestJSON): <NEW_LINE> <INDENT> pass | Tests Services API. List and Enable/Disable require admin privileges. | 625990a6adb09d7d5dc0c4f2 |
class StsUtil: <NEW_LINE> <INDENT> def __init__(self, device_serial_or_address, logger, device_access_timeout_secs=60): <NEW_LINE> <INDENT> self.device_serial_or_address = device_serial_or_address <NEW_LINE> self.logger = logger <NEW_LINE> self.device_access_timeout_secs = device_access_timeout_secs <NEW_LINE> try: <NEW_LINE> <INDENT> self.device_fingerprint = self.read_device_fingerprint() <NEW_LINE> <DEDENT> except subprocess.CalledProcessError: <NEW_LINE> <INDENT> self.device_fingerprint = None <NEW_LINE> <DEDENT> <DEDENT> def read_device_fingerprint(self): <NEW_LINE> <INDENT> fingerprint = subprocess.check_output( [ "adb", "-s", self.device_serial_or_address, "shell", "getprop", "ro.build.fingerprint", ], universal_newlines=True, timeout=self.device_access_timeout_secs, ).rstrip() <NEW_LINE> self.logger.debug("Device reports fingerprint '%s'", fingerprint) <NEW_LINE> return fingerprint <NEW_LINE> <DEDENT> def fix_result_file_fingerprints(self, result_dir): <NEW_LINE> <INDENT> if self.device_fingerprint is None: <NEW_LINE> <INDENT> self.device_fingerprint = self.read_device_fingerprint() <NEW_LINE> <DEDENT> test_result_path = os.path.join(result_dir, "test_result.xml") <NEW_LINE> test_result_path_orig = test_result_path + ".orig" <NEW_LINE> shutil.move(test_result_path, test_result_path_orig) <NEW_LINE> test_result_failures_path = os.path.join( result_dir, "test_result_failures.html" ) <NEW_LINE> test_result_failures_path_orig = test_result_failures_path + ".orig" <NEW_LINE> shutil.move(test_result_failures_path, test_result_failures_path_orig) <NEW_LINE> test_result_tree = ET.parse(test_result_path_orig) <NEW_LINE> result_build_node = test_result_tree.getroot().find("Build") <NEW_LINE> manipulated_fingerprint = result_build_node.get("build_fingerprint") <NEW_LINE> self.logger.debug( "Reverting STS manipulated device fingerprint: '%s' -> '%s'", manipulated_fingerprint, self.device_fingerprint, ) <NEW_LINE> result_build_node.set("build_fingerprint", self.device_fingerprint) <NEW_LINE> test_result_tree.write(test_result_path) <NEW_LINE> with open(test_result_failures_path_orig, "r") as test_result_failures_file: <NEW_LINE> <INDENT> test_result_failures = test_result_failures_file.read().replace( manipulated_fingerprint, self.device_fingerprint ) <NEW_LINE> <DEDENT> with open(test_result_failures_path, "w") as test_result_failures_file: <NEW_LINE> <INDENT> test_result_failures_file.write(test_result_failures) | Interface for STS related workarounds when automating TradeFed.
For applying StsUtil, use one instance per TradeFed STS invocation. Ideally,
construct it before running any tests, so when the passed device is in a
good known state. Call fix_result_file_fingerprints() after each completed
run, before rerunning.
Applying StsUtil to non-STS TradeFed runs does not help, but should also not
affect the results in any way. | 625990a6091ae35668706bc6 |
class HumanPlayer(Player): <NEW_LINE> <INDENT> def __init__(self,name): <NEW_LINE> <INDENT> super().__init__(name) <NEW_LINE> <DEDENT> def is_playable(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def pick_card(self, putdown_pile): <NEW_LINE> <INDENT> return None | Subclass of Player
Player that selects cards to play using the GUI | 625990a650812a4eaa621b93 |
class BaseDyadic(Dyadic, AtomicExpr): <NEW_LINE> <INDENT> def __new__(cls, vector1, vector2): <NEW_LINE> <INDENT> from sympy.vector.vector import Vector, BaseVector, VectorZero <NEW_LINE> if not isinstance(vector1, (BaseVector, VectorZero)) or not isinstance(vector2, (BaseVector, VectorZero)): <NEW_LINE> <INDENT> raise TypeError("BaseDyadic cannot be composed of non-base "+ "vectors") <NEW_LINE> <DEDENT> elif vector1 == Vector.zero or vector2 == Vector.zero: <NEW_LINE> <INDENT> return Dyadic.zero <NEW_LINE> <DEDENT> obj = super(BaseDyadic, cls).__new__(cls, vector1, vector2) <NEW_LINE> obj._base_instance = obj <NEW_LINE> obj._measure_number = 1 <NEW_LINE> obj._components = {obj: S(1)} <NEW_LINE> obj._sys = vector1._sys <NEW_LINE> return obj <NEW_LINE> <DEDENT> def __str__(self, printer=None): <NEW_LINE> <INDENT> return "(" + str(self.args[0]) + "|" + str(self.args[1]) + ")" <NEW_LINE> <DEDENT> _sympystr = __str__ <NEW_LINE> _sympyrepr = _sympystr | Class to denote a base dyadic tensor component. | 625990a6adb09d7d5dc0c4f6 |
class AddComentsView(View): <NEW_LINE> <INDENT> def post(self, request): <NEW_LINE> <INDENT> if not request.user.is_authenticated(): <NEW_LINE> <INDENT> return HttpResponse('{"status":"fail", "msg":"用户未登录"}', content_type='application/json') <NEW_LINE> <DEDENT> course_id = request.POST.get("course_id", 0) <NEW_LINE> comments = request.POST.get("comments", "") <NEW_LINE> if int(course_id) > 0 and comments: <NEW_LINE> <INDENT> course_comments = CourseComments() <NEW_LINE> course = Course.objects.get(id=int(course_id)) <NEW_LINE> course_comments.course = course <NEW_LINE> course_comments.comments = comments <NEW_LINE> course_comments.user = request.user <NEW_LINE> course_comments.save() <NEW_LINE> return HttpResponse('{"status":"success", "msg":"添加成功"}', content_type='application/json') <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return HttpResponse('{"status":"fail", "msg":"添加失败"}', content_type='application/json') | 用户添加课程评论 | 625990a6091ae35668706bcc |