code
stringlengths 4
4.48k
| docstring
stringlengths 1
6.45k
| _id
stringlengths 24
24
|
---|---|---|
class AddDistroTests(DatabaseTestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> super(AddDistroTests, self).setUp() <NEW_LINE> session = Session() <NEW_LINE> self.user = models.User(email="[email protected]", username="user") <NEW_LINE> user_social_auth = social_models.UserSocialAuth( user_id=self.user.id, user=self.user ) <NEW_LINE> session.add(self.user) <NEW_LINE> session.add(user_social_auth) <NEW_LINE> self.admin = models.User(email="[email protected]", username="admin") <NEW_LINE> admin_social_auth = social_models.UserSocialAuth( user_id=self.admin.id, user=self.admin ) <NEW_LINE> session.add_all([admin_social_auth, self.admin]) <NEW_LINE> session.commit() <NEW_LINE> self.client = self.flask_app.test_client() <NEW_LINE> <DEDENT> def test_no_csrf_token(self): <NEW_LINE> <INDENT> with login_user(self.flask_app, self.user): <NEW_LINE> <INDENT> output = self.client.post("/distro/add", data={"name": "Fedora"}) <NEW_LINE> self.assertEqual(200, output.status_code) <NEW_LINE> self.assertEqual(0, models.Distro.query.count()) <NEW_LINE> <DEDENT> <DEDENT> def test_invalid_csrf_token(self): <NEW_LINE> <INDENT> with login_user(self.flask_app, self.user): <NEW_LINE> <INDENT> output = self.client.post( "/distro/add", data={"csrf_token": "abc", "name": "Fedora"} ) <NEW_LINE> self.assertEqual(200, output.status_code) <NEW_LINE> self.assertEqual(0, models.Distro.query.count()) <NEW_LINE> <DEDENT> <DEDENT> def test_add_distro(self): <NEW_LINE> <INDENT> with login_user(self.flask_app, self.user): <NEW_LINE> <INDENT> output = self.client.get("/distro/add") <NEW_LINE> csrf_token = output.data.split(b'name="csrf_token" type="hidden" value="')[ 1 ].split(b'">')[0] <NEW_LINE> data = {"name": "Fedora", "csrf_token": csrf_token} <NEW_LINE> with fml_testing.mock_sends(anitya_schema.DistroCreated): <NEW_LINE> <INDENT> output = self.client.post( "/distro/add", data=data, follow_redirects=True ) <NEW_LINE> <DEDENT> self.assertEqual(200, output.status_code) <NEW_LINE> self.assertTrue(b"Distribution added" in output.data) <NEW_LINE> <DEDENT> <DEDENT> def test_duplicate_distro(self): <NEW_LINE> <INDENT> with login_user(self.flask_app, self.user): <NEW_LINE> <INDENT> output = self.client.get("/distro/add") <NEW_LINE> csrf_token = output.data.split(b'name="csrf_token" type="hidden" value="')[ 1 ].split(b'">')[0] <NEW_LINE> data = {"name": "Fedora", "csrf_token": csrf_token} <NEW_LINE> with fml_testing.mock_sends(anitya_schema.DistroCreated): <NEW_LINE> <INDENT> create_output = self.client.post( "/distro/add", data=data, follow_redirects=True ) <NEW_LINE> self.assertEqual(200, output.status_code) <NEW_LINE> <DEDENT> dup_output = self.client.post( "/distro/add", data=data, follow_redirects=True ) <NEW_LINE> self.assertEqual(200, output.status_code) <NEW_LINE> self.assertTrue(b"Distribution added" in create_output.data) <NEW_LINE> self.assertTrue(b"Could not add this distro" in dup_output.data) | Tests for the :func:`anitya.admin.add_distro` view function. | 625990a6c4546d3d9def826c |
class TestEnv(Env): <NEW_LINE> <INDENT> @property <NEW_LINE> def action_space(self): <NEW_LINE> <INDENT> return Box(low=0, high=0, shape=(0,), dtype=np.float32) <NEW_LINE> <DEDENT> @property <NEW_LINE> def observation_space(self): <NEW_LINE> <INDENT> return Box(low=0, high=0, shape=(0,), dtype=np.float32) <NEW_LINE> <DEDENT> def _apply_rl_actions(self, rl_actions): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> def compute_reward(self, rl_actions, **kwargs): <NEW_LINE> <INDENT> if "reward_fn" in self.env_params.additional_params: <NEW_LINE> <INDENT> return self.env_params.additional_params["reward_fn"](self) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 0 <NEW_LINE> <DEDENT> <DEDENT> def get_state(self, **kwargs): <NEW_LINE> <INDENT> return np.array([]) | Test environment used to run simulations in the absence of autonomy.
Required from env_params
None
Optional from env_params
reward_fn : A reward function which takes an an input the environment
class and returns a real number.
States
States are an empty list.
Actions
No actions are provided to any RL agent.
Rewards
The reward is zero at every step.
Termination
A rollout is terminated if the time horizon is reached or if two
vehicles collide into one another. | 625990a6c4546d3d9def826d |
class Author(models.Model): <NEW_LINE> <INDENT> first_name = models.CharField(max_length=100) <NEW_LINE> last_name = models.CharField(max_length=100) <NEW_LINE> date_of_birth = models.DateField(null=True, blank=True) <NEW_LINE> date_of_death = models.DateField('Died', null=True, blank=True) <NEW_LINE> def get_absolute_url(self): <NEW_LINE> <INDENT> return reverse('author-detail', args=[str(self.id)]) <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return '%s, %s' % (self.first_name, self.last_name) <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> ordering = ['last_name'] | Model representing an author. | 625990a6187af65679d2abb8 |
class OnePointElementCrossover(ElementCrossover): <NEW_LINE> <INDENT> def __init__(self, element_pool, max_diff_elements=None, min_percentage_elements=None, verbose=False, rng=np.random): <NEW_LINE> <INDENT> ElementCrossover.__init__(self, element_pool, max_diff_elements, min_percentage_elements, verbose, rng=rng) <NEW_LINE> self.descriptor = 'OnePointElementCrossover' <NEW_LINE> <DEDENT> def get_new_individual(self, parents): <NEW_LINE> <INDENT> f, m = parents <NEW_LINE> indi = self.initialize_individual(f) <NEW_LINE> indi.info['data']['parents'] = [i.info['confid'] for i in parents] <NEW_LINE> cut_choices = [i for i in range(1, len(f) - 1)] <NEW_LINE> self.rng.shuffle(cut_choices) <NEW_LINE> for cut in cut_choices: <NEW_LINE> <INDENT> fsyms = f.get_chemical_symbols() <NEW_LINE> msyms = m.get_chemical_symbols() <NEW_LINE> syms = fsyms[:cut] + msyms[cut:] <NEW_LINE> ok = True <NEW_LINE> for i, e in enumerate(self.element_pools): <NEW_LINE> <INDENT> elems = e[:] <NEW_LINE> elems_in, indices_in = zip(*[(a.symbol, a.index) for a in f if a.symbol in elems]) <NEW_LINE> max_diff_elem = self.max_diff_elements[i] <NEW_LINE> min_percent_elem = self.min_percentage_elements[i] <NEW_LINE> if min_percent_elem == 0: <NEW_LINE> <INDENT> min_percent_elem = 1. / len(elems_in) <NEW_LINE> <DEDENT> if max_diff_elem is None: <NEW_LINE> <INDENT> max_diff_elem = len(elems_in) <NEW_LINE> <DEDENT> syms_in = [syms[i] for i in indices_in] <NEW_LINE> for s in set(syms_in): <NEW_LINE> <INDENT> percentage = syms_in.count(s) / float(len(syms_in)) <NEW_LINE> if percentage < min_percent_elem: <NEW_LINE> <INDENT> ok = False <NEW_LINE> break <NEW_LINE> <DEDENT> num_diff = len(set(syms_in)) <NEW_LINE> if num_diff > max_diff_elem: <NEW_LINE> <INDENT> ok = False <NEW_LINE> break <NEW_LINE> <DEDENT> <DEDENT> if not ok: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> if ok: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> <DEDENT> for a in f[:cut] + m[cut:]: <NEW_LINE> <INDENT> indi.append(a) <NEW_LINE> <DEDENT> parent_message = ':Parents {0} {1}'.format(f.info['confid'], m.info['confid']) <NEW_LINE> return (self.finalize_individual(indi), self.descriptor + parent_message) | Crossover of the elements in the atoms objects. Point of cross
is chosen randomly.
Parameters:
element_pool: List of elements in the phase space. The elements can be
grouped if the individual consist of different types of elements.
The list should then be a list of lists e.g. [[list1], [list2]]
max_diff_elements: The maximum number of different elements in the
individual. Default is infinite. If the elements are grouped
max_diff_elements should be supplied as a list with each input
corresponding to the elements specified in the same input in
element_pool.
min_percentage_elements: The minimum percentage of any element in
the individual. Default is any number is allowed. If the elements
are grouped min_percentage_elements should be supplied as a list
with each input corresponding to the elements specified in the
same input in element_pool.
Example: element_pool=[[A,B,C,D],[x,y,z]], max_diff_elements=[3,2],
min_percentage_elements=[.25, .5]
An individual could be "D,B,B,C,x,x,x,x,z,z,z,z"
rng: Random number generator
By default numpy.random. | 625990a6c4546d3d9def826f |
class Server(_AsyncioServer): <NEW_LINE> <INDENT> def __init__(self, targets, id_parameters=None): <NEW_LINE> <INDENT> _AsyncioServer.__init__(self) <NEW_LINE> self.targets = targets <NEW_LINE> self.id_parameters = id_parameters <NEW_LINE> <DEDENT> @asyncio.coroutine <NEW_LINE> def _handle_connection_cr(self, reader, writer): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> line = yield from reader.readline() <NEW_LINE> if line != _init_string: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> obj = { "targets": sorted(self.targets.keys()), "parameters": self.id_parameters } <NEW_LINE> line = pyon.encode(obj) + "\n" <NEW_LINE> writer.write(line.encode()) <NEW_LINE> line = yield from reader.readline() <NEW_LINE> if not line: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> target_name = line.decode()[:-1] <NEW_LINE> try: <NEW_LINE> <INDENT> target = self.targets[target_name] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> while True: <NEW_LINE> <INDENT> line = yield from reader.readline() <NEW_LINE> if not line: <NEW_LINE> <INDENT> break <NEW_LINE> <DEDENT> obj = pyon.decode(line.decode()) <NEW_LINE> try: <NEW_LINE> <INDENT> if obj["action"] == "get_rpc_method_list": <NEW_LINE> <INDENT> members = inspect.getmembers(target, inspect.ismethod) <NEW_LINE> methods = {} <NEW_LINE> for name, method in members: <NEW_LINE> <INDENT> if name.startswith("_"): <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> method = getattr(target, name) <NEW_LINE> argspec = inspect.getfullargspec(method) <NEW_LINE> methods[name] = (dict(argspec.__dict__), inspect.getdoc(method)) <NEW_LINE> <DEDENT> obj = {"status": "ok", "ret": methods} <NEW_LINE> <DEDENT> elif obj["action"] == "call": <NEW_LINE> <INDENT> logger.debug("calling %s", _PrettyPrintCall(obj)) <NEW_LINE> method = getattr(target, obj["name"]) <NEW_LINE> ret = method(*obj["args"], **obj["kwargs"]) <NEW_LINE> obj = {"status": "ok", "ret": ret} <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError("Unknown action: {}" .format(obj["action"])) <NEW_LINE> <DEDENT> <DEDENT> except Exception: <NEW_LINE> <INDENT> obj = {"status": "failed", "message": traceback.format_exc()} <NEW_LINE> <DEDENT> line = pyon.encode(obj) + "\n" <NEW_LINE> writer.write(line.encode()) <NEW_LINE> <DEDENT> <DEDENT> finally: <NEW_LINE> <INDENT> writer.close() | This class creates a TCP server that handles requests coming from
``Client`` objects.
The server is designed using ``asyncio`` so that it can easily support
multiple connections without the locking issues that arise in
multi-threaded applications. Multiple connection support is useful even in
simple cases: it allows new connections to be be accepted even when the
previous client failed to properly shut down its connection.
:param targets: A dictionary of objects providing the RPC methods to be
exposed to the client. Keys are names identifying each object.
Clients select one of these objects using its name upon connection.
:param id_parameters: An optional human-readable string giving more
information about the parameters of the server. | 625990a6187af65679d2abbb |
class AutoDiffException(Exception): <NEW_LINE> <INDENT> pass | Base class for all exceptions related to automatic differentiation failures. | 625990a6187af65679d2abbc |
class BackendAddressPool(SubResource): <NEW_LINE> <INDENT> _validation = { 'etag': {'readonly': True}, 'type': {'readonly': True}, 'backend_ip_configurations': {'readonly': True}, 'load_balancing_rules': {'readonly': True}, 'outbound_rule': {'readonly': True}, 'outbound_rules': {'readonly': True}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'backend_ip_configurations': {'key': 'properties.backendIPConfigurations', 'type': '[NetworkInterfaceIPConfiguration]'}, 'load_balancing_rules': {'key': 'properties.loadBalancingRules', 'type': '[SubResource]'}, 'outbound_rule': {'key': 'properties.outboundRule', 'type': 'SubResource'}, 'outbound_rules': {'key': 'properties.outboundRules', 'type': '[SubResource]'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, **kwargs ): <NEW_LINE> <INDENT> super(BackendAddressPool, self).__init__(id=id, **kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.etag = None <NEW_LINE> self.type = None <NEW_LINE> self.backend_ip_configurations = None <NEW_LINE> self.load_balancing_rules = None <NEW_LINE> self.outbound_rule = None <NEW_LINE> self.outbound_rules = None <NEW_LINE> self.provisioning_state = None | Pool of backend IP addresses.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within the set of backend address pools
used by the load balancer. This name can be used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:ivar backend_ip_configurations: An array of references to IP addresses defined in network
interfaces.
:vartype backend_ip_configurations:
list[~azure.mgmt.network.v2019_12_01.models.NetworkInterfaceIPConfiguration]
:ivar load_balancing_rules: An array of references to load balancing rules that use this
backend address pool.
:vartype load_balancing_rules: list[~azure.mgmt.network.v2019_12_01.models.SubResource]
:ivar outbound_rule: A reference to an outbound rule that uses this backend address pool.
:vartype outbound_rule: ~azure.mgmt.network.v2019_12_01.models.SubResource
:ivar outbound_rules: An array of references to outbound rules that use this backend address
pool.
:vartype outbound_rules: list[~azure.mgmt.network.v2019_12_01.models.SubResource]
:ivar provisioning_state: The provisioning state of the backend address pool resource. Possible
values include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2019_12_01.models.ProvisioningState | 625990a7091ae35668706bdb |
class MailRequest(object): <NEW_LINE> <INDENT> def __init__(self, Peer, From, To, Data): <NEW_LINE> <INDENT> self.Peer = Peer <NEW_LINE> self.Data = Data <NEW_LINE> try: <NEW_LINE> <INDENT> self.From = _decode_header_randomness(From).pop() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.From = None <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> self.To = _decode_header_randomness(To).pop() <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> self.To = None <NEW_LINE> <DEDENT> self.base = encoding.from_string(self.Data) <NEW_LINE> if 'from' not in self.base: <NEW_LINE> <INDENT> self.base['from'] = self.From <NEW_LINE> <DEDENT> if 'to' not in self.base: <NEW_LINE> <INDENT> self.base['to'] = self.To <NEW_LINE> <DEDENT> self.From = self.From or self.base['from'] <NEW_LINE> self.To = self.To or self.base[ROUTABLE_TO_HEADER] <NEW_LINE> self.bounce = None <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return "From: %r" % [self.Peer, self.From, self.To] <NEW_LINE> <DEDENT> def all_parts(self): <NEW_LINE> <INDENT> return self.base.parts <NEW_LINE> <DEDENT> def body(self): <NEW_LINE> <INDENT> if self.base.parts: <NEW_LINE> <INDENT> return self.base.parts[0].body <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.base.body <NEW_LINE> <DEDENT> <DEDENT> def __contains__(self, key): <NEW_LINE> <INDENT> return self.base.__contains__(key) <NEW_LINE> <DEDENT> def __getitem__(self, name): <NEW_LINE> <INDENT> return self.base.__getitem__(name) <NEW_LINE> <DEDENT> def __setitem__(self, name, val): <NEW_LINE> <INDENT> self.base.__setitem__(name, val) <NEW_LINE> <DEDENT> def __delitem__(self, name): <NEW_LINE> <INDENT> del self.base[name] <NEW_LINE> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return encoding.to_string(self.base) <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return self.base.items() <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self.base.keys() <NEW_LINE> <DEDENT> def to_message(self): <NEW_LINE> <INDENT> return encoding.to_message(self.base) <NEW_LINE> <DEDENT> def walk(self): <NEW_LINE> <INDENT> for x in self.base.walk(): <NEW_LINE> <INDENT> yield x <NEW_LINE> <DEDENT> <DEDENT> def is_bounce(self, threshold=0.3): <NEW_LINE> <INDENT> if not self.bounce: <NEW_LINE> <INDENT> self.bounce = bounce.detect(self) <NEW_LINE> <DEDENT> return self.bounce.score > threshold <NEW_LINE> <DEDENT> def get_bcc(self): <NEW_LINE> <INDENT> bcc_pos = re.search(b"\nBcc:", self.Data) <NEW_LINE> if bcc_pos is not None: <NEW_LINE> <INDENT> start = bcc_pos.start() + 5 <NEW_LINE> date_pos = re.search(b"\nDate:", self.Data) <NEW_LINE> end = date_pos.end() - 6 <NEW_LINE> address = self.Data[start:end].decode("utf-8") <NEW_LINE> address_list = address.split(",") <NEW_LINE> bcc_address_list = [x.strip() for x in address_list] <NEW_LINE> return bcc_address_list <NEW_LINE> <DEDENT> return [] <NEW_LINE> <DEDENT> @property <NEW_LINE> def original(self): <NEW_LINE> <INDENT> warnings.warn("MailRequest.original is deprecated, use MailRequest.Data instead", category=DeprecationWarning, stacklevel=2) <NEW_LINE> return self.Data | This is what is given to your message handlers. The information you get out
of this is *ALWAYS* in Python str (unicode in Python 2.7) and should be
usable by any API. Modifying this object will cause other handlers that
deal with it to get your modifications, but in general you don't want to do
more than maybe tag a few headers. | 625990a7adb09d7d5dc0c508 |
class LinksFacets(): <NEW_LINE> <INDENT> pass | The links arrayThe links array is an optional child property of the items
array. It contains one or more anonymous objects, each with five
possible properties:
href (REQUIRED),
rel (REQURIED),
name (OPTIONAL),
render(OPTIONAL),
prompt (OPTIONAL
// sample links array
{
"collection" :
{
"version" : "1.0",
"href" : URI,
"items" :
[
{
"href" : URI,
"data" : [ARRAY],
"links" :
[
{"href" : URI, "rel" : STRING, "prompt" : STRING, "name" : STRING, "render" : "image"},
{"href" : URI, "rel" : STRING, "prompt" : STRING, "name" : STRING}, "render" : "link",
...
{"href" : URI, "rel" : STRING, "prompt" : STRING, "name" : STRING}
]
}
]
} | 625990a7c4546d3d9def8274 |
class DesktopChromeDriver(Driver): <NEW_LINE> <INDENT> def __init__(self, properties): <NEW_LINE> <INDENT> Driver.__init__(self, properties) <NEW_LINE> if ((properties.get_remote_url() is None) or (properties.get_remote_url() == "")): <NEW_LINE> <INDENT> executable_path = properties.get_executable_path() <NEW_LINE> print("Picking up Chrome executable at " + executable_path) <NEW_LINE> chrome_options = Options() <NEW_LINE> chrome_options.add_argument("--disable-extensions") <NEW_LINE> chrome_options.add_argument("--disable-logging") <NEW_LINE> driver = webdriver.Chrome(executable_path, chrome_options=chrome_options) <NEW_LINE> self.set_web_driver(driver) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> capabilities = DesiredCapabilities.CHROME.copy() <NEW_LINE> capabilities['platform'] = properties.get_platform() <NEW_LINE> capabilities['version'] = properties.get_browser_version() <NEW_LINE> webdriver.Remote(command_executor=properties.get_remote_url(), desired_capabilities=capabilities) | Init desktop Chrome driver. | 625990a7091ae35668706bdf |
class TingYunObjectWrapperBase(_ObjectProxy): <NEW_LINE> <INDENT> @property <NEW_LINE> def _previous_object(self): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self._self_previous_object <NEW_LINE> <DEDENT> except AttributeError: <NEW_LINE> <INDENT> self._self_previous_object = getattr(self.__wrapped__, '_previous_object', self.__wrapped__) <NEW_LINE> return self._self_previous_object | 使用细则参考 http://wrapt.readthedocs.io/en/latest/
| 625990a7c4546d3d9def8276 |
class UnversionedListMeta(object): <NEW_LINE> <INDENT> def __init__(self, self_link=None, resource_version=None): <NEW_LINE> <INDENT> self.swagger_types = { 'self_link': 'str', 'resource_version': 'str' } <NEW_LINE> self.attribute_map = { 'self_link': 'selfLink', 'resource_version': 'resourceVersion' } <NEW_LINE> self._self_link = self_link <NEW_LINE> self._resource_version = resource_version <NEW_LINE> <DEDENT> @property <NEW_LINE> def self_link(self): <NEW_LINE> <INDENT> return self._self_link <NEW_LINE> <DEDENT> @self_link.setter <NEW_LINE> def self_link(self, self_link): <NEW_LINE> <INDENT> self._self_link = self_link <NEW_LINE> <DEDENT> @property <NEW_LINE> def resource_version(self): <NEW_LINE> <INDENT> return self._resource_version <NEW_LINE> <DEDENT> @resource_version.setter <NEW_LINE> def resource_version(self, resource_version): <NEW_LINE> <INDENT> self._resource_version = resource_version <NEW_LINE> <DEDENT> def to_dict(self): <NEW_LINE> <INDENT> result = {} <NEW_LINE> for attr, _ in iteritems(self.swagger_types): <NEW_LINE> <INDENT> value = getattr(self, attr) <NEW_LINE> if isinstance(value, list): <NEW_LINE> <INDENT> result[attr] = list(map( lambda x: x.to_dict() if hasattr(x, "to_dict") else x, value )) <NEW_LINE> <DEDENT> elif hasattr(value, "to_dict"): <NEW_LINE> <INDENT> result[attr] = value.to_dict() <NEW_LINE> <DEDENT> elif isinstance(value, dict): <NEW_LINE> <INDENT> result[attr] = dict(map( lambda item: (item[0], item[1].to_dict()) if hasattr(item[1], "to_dict") else item, value.items() )) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> result[attr] = value <NEW_LINE> <DEDENT> <DEDENT> return result <NEW_LINE> <DEDENT> def to_str(self): <NEW_LINE> <INDENT> return pformat(self.to_dict()) <NEW_LINE> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return self.to_str() <NEW_LINE> <DEDENT> def __eq__(self, other): <NEW_LINE> <INDENT> return self.__dict__ == other.__dict__ <NEW_LINE> <DEDENT> def __ne__(self, other): <NEW_LINE> <INDENT> return not self == other | NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually. | 625990a7adb09d7d5dc0c514 |
class Student: <NEW_LINE> <INDENT> def __init__(self, first_name, last_name, age): <NEW_LINE> <INDENT> self.first_name = first_name <NEW_LINE> self.last_name = last_name <NEW_LINE> self.age = age <NEW_LINE> <DEDENT> def to_json(self, attrs=None): <NEW_LINE> <INDENT> dic = {} <NEW_LINE> if attrs is None: <NEW_LINE> <INDENT> return self.__dict__ <NEW_LINE> <DEDENT> for item in attrs: <NEW_LINE> <INDENT> if item in self.__dict__.keys(): <NEW_LINE> <INDENT> dic[item] = self.__dict__[item] <NEW_LINE> <DEDENT> <DEDENT> return dic | Public instance attributes:
first_name
last_name
age | 625990a7091ae35668706beb |
class TsfPageApiGroupInfo(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.TotalCount = None <NEW_LINE> self.Content = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.TotalCount = params.get("TotalCount") <NEW_LINE> if params.get("Content") is not None: <NEW_LINE> <INDENT> self.Content = [] <NEW_LINE> for item in params.get("Content"): <NEW_LINE> <INDENT> obj = ApiGroupInfo() <NEW_LINE> obj._deserialize(item) <NEW_LINE> self.Content.append(obj) <NEW_LINE> <DEDENT> <DEDENT> memeber_set = set(params.keys()) <NEW_LINE> for name, value in vars(self).items(): <NEW_LINE> <INDENT> if name in memeber_set: <NEW_LINE> <INDENT> memeber_set.remove(name) <NEW_LINE> <DEDENT> <DEDENT> if len(memeber_set) > 0: <NEW_LINE> <INDENT> warnings.warn("%s fileds are useless." % ",".join(memeber_set)) | ApiGroupInfo翻页结构体
| 625990a7adb09d7d5dc0c518 |
class EEG_Report: <NEW_LINE> <INDENT> def __init__(self, path=None): <NEW_LINE> <INDENT> self.path = path <NEW_LINE> <DEDENT> def get_age_sex(self): <NEW_LINE> <INDENT> import eegreportparser as erp <NEW_LINE> ageStr,sex = erp.GetDemographics(self.path) <NEW_LINE> try: <NEW_LINE> <INDENT> age = float(ageStr) <NEW_LINE> <DEDENT> except ValueError: <NEW_LINE> <INDENT> age = None <NEW_LINE> <DEDENT> if sex == "Undetermined": <NEW_LINE> <INDENT> sex = None <NEW_LINE> <DEDENT> return age,sex | Each instance of this class represents an EEG recording session in the dataset.
| 625990a7187af65679d2abc6 |
class PlotlyDisplay(IPython.core.display.HTML): <NEW_LINE> <INDENT> def __init__(self, url, width, height): <NEW_LINE> <INDENT> self.resource = url <NEW_LINE> self.embed_code = get_embed(url, width=width, height=height) <NEW_LINE> super(PlotlyDisplay, self).__init__(data=self.embed_code) <NEW_LINE> <DEDENT> def _repr_html_(self): <NEW_LINE> <INDENT> return self.embed_code | An IPython display object for use with plotly urls
PlotlyDisplay objects should be instantiated with a url for a plot.
IPython will *choose* the proper display representation from any
Python object, and using provided methods if they exist. By defining
the following, if an HTML display is unusable, the PlotlyDisplay
object can provide alternate representations. | 625990a7187af65679d2abc7 |
class Subscriber: <NEW_LINE> <INDENT> def __init__(self, context, address, tick_filter=""): <NEW_LINE> <INDENT> self.filter = tick_filter <NEW_LINE> self.context = context <NEW_LINE> self.address = address <NEW_LINE> self.socket = None <NEW_LINE> self.handler = None <NEW_LINE> self.quit_event = threading.Event() <NEW_LINE> self.quit_event.clear() <NEW_LINE> <DEDENT> def start(self, callback=None): <NEW_LINE> <INDENT> if callback and not hasattr(callback, "__call__"): <NEW_LINE> <INDENT> print("%s cannot be invoked" % str(callback)) <NEW_LINE> return <NEW_LINE> <DEDENT> if self.handler and not self.quit_event: <NEW_LINE> <INDENT> self.quit_event.set() <NEW_LINE> self.handler.join() <NEW_LINE> <DEDENT> self.quit_event.clear() <NEW_LINE> self.handler = threading.Thread(target=Subscriber.work_thread, args=(None, self.context, self.address, self.quit_event, callback)) <NEW_LINE> self.handler.start() <NEW_LINE> <DEDENT> def stop(self): <NEW_LINE> <INDENT> if self.handler: <NEW_LINE> <INDENT> self.quit_event.set() <NEW_LINE> self.handler.join() <NEW_LINE> self.handler = None <NEW_LINE> <DEDENT> <DEDENT> def work_thread(self, *args): <NEW_LINE> <INDENT> socket = args[0].socket(zmq.SUB) <NEW_LINE> socket.connect(args[1]) <NEW_LINE> socket.setsockopt_string(zmq.SUBSCRIBE, '') <NEW_LINE> print("Subscriber is collecting tick information......") <NEW_LINE> quit_event = args[2] <NEW_LINE> callback = args[3] <NEW_LINE> while not quit_event.is_set(): <NEW_LINE> <INDENT> tick_info = socket.recv_string() <NEW_LINE> if callback: <NEW_LINE> <INDENT> callback(tick_info) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> print(tick_info) <NEW_LINE> <DEDENT> <DEDENT> socket.close() <NEW_LINE> quit_event.clear() <NEW_LINE> print("Subscriber has stopped, no more tick information will be collected.") | 订阅者 | 625990a7c4546d3d9def827d |
class PoxCliDriver(Emulator): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(Emulator, self).__init__() <NEW_LINE> self.handle = self <NEW_LINE> self.wrapped = sys.modules[__name__] <NEW_LINE> <DEDENT> def connect(self, **connectargs): <NEW_LINE> <INDENT> for key in connectargs: <NEW_LINE> <INDENT> vars(self)[key] = connectargs[key] <NEW_LINE> <DEDENT> self.name = self.options['name'] <NEW_LINE> poxLibPath = 'default' <NEW_LINE> copy = super(PoxCliDriver, self).secureCopy(self.user_name, self.ip_address,'/home/openflow/pox/pox/core.py', self.pwd,path+'/lib/pox/') <NEW_LINE> self.handle = super(PoxCliDriver, self).connect(user_name = self.user_name, ip_address = self.ip_address,port = None, pwd = self.pwd) <NEW_LINE> if self.handle: <NEW_LINE> <INDENT> self.handle.expect("openflow") <NEW_LINE> command = self.getcmd(self.options) <NEW_LINE> main.log.info("Entering into POX hierarchy") <NEW_LINE> if self.options['pox_lib_location'] != 'default': <NEW_LINE> <INDENT> self.execute(cmd="cd "+self.options['pox_lib_location'],prompt="/pox\$",timeout=120) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.execute(cmd="cd ~/TestON/lib/pox/",prompt="/pox\$",timeout=120) <NEW_LINE> <DEDENT> main.log.info("launching POX controller with given components") <NEW_LINE> self.execute(cmd=command,prompt="DEBUG:",timeout=120) <NEW_LINE> return main.TRUE <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> main.log.error("Connection failed to the host "+self.user_name+"@"+self.ip_address) <NEW_LINE> main.log.error("Failed to connect to the POX controller") <NEW_LINE> return main.FALSE <NEW_LINE> <DEDENT> <DEDENT> def disconnect(self,handle): <NEW_LINE> <INDENT> if self.handle: <NEW_LINE> <INDENT> self.execute(cmd="exit()",prompt="/pox\$",timeout=120) <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> main.log.error("Connection failed to the host") <NEW_LINE> <DEDENT> <DEDENT> def get_version(self): <NEW_LINE> <INDENT> file_input = path+'/lib/pox/core.py' <NEW_LINE> version = super(PoxCliDriver, self).get_version() <NEW_LINE> pattern = '\s*self\.version(.*)' <NEW_LINE> import re <NEW_LINE> for line in open(file_input,'r').readlines(): <NEW_LINE> <INDENT> result = re.match(pattern, line) <NEW_LINE> if result: <NEW_LINE> <INDENT> version = result.group(0) <NEW_LINE> version = re.sub("\s*self\.version\s*=\s*|\(|\)",'',version) <NEW_LINE> version = re.sub(",",'.',version) <NEW_LINE> version = "POX "+version <NEW_LINE> <DEDENT> <DEDENT> return version <NEW_LINE> <DEDENT> def getcmd(self,options): <NEW_LINE> <INDENT> command = "./pox.py " <NEW_LINE> for item in options.keys(): <NEW_LINE> <INDENT> if isinstance(options[item],dict): <NEW_LINE> <INDENT> command = command + item <NEW_LINE> for items in options[item].keys(): <NEW_LINE> <INDENT> if options[item][items] == "None": <NEW_LINE> <INDENT> command = command + " --" + items + " " <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> command = command + " --" + items + "=" + options[item][items] + " " <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> else: <NEW_LINE> <INDENT> if item == 'pox_lib_location': <NEW_LINE> <INDENT> poxLibPath = options[item] <NEW_LINE> <DEDENT> elif item == 'type' or item == 'name': <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> else : <NEW_LINE> <INDENT> command = command + item <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> return command | PoxCliDriver driver provides the basic functions of POX controller | 625990a8c4546d3d9def8283 |
class LinearCalibration(AbstractCalibration): <NEW_LINE> <INDENT> def __init__(self, y_intercept, slope): <NEW_LINE> <INDENT> super(LinearCalibration, self).__init__() <NEW_LINE> self.constant = y_intercept <NEW_LINE> self.slope = slope <NEW_LINE> <DEDENT> def __call__(self, x): <NEW_LINE> <INDENT> return self.constant + self.slope * x <NEW_LINE> <DEDENT> def is_affine(self): <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> def get_slope(self): <NEW_LINE> <INDENT> return self.slope | Linear calibration :math:`x \mapsto a + b x`,
where *a* is the y-intercept and *b* is the slope.
:param y_intercept: y-intercept
:param slope: Slope of the affine transformation | 625990a8c4546d3d9def8284 |
class Chanel(object): <NEW_LINE> <INDENT> def __init__(self, api): <NEW_LINE> <INDENT> self.api = api <NEW_LINE> <DEDENT> @property <NEW_LINE> def session(self): <NEW_LINE> <INDENT> return self.api.get_session() <NEW_LINE> <DEDENT> def send_wss_request(self, name, msg): <NEW_LINE> <INDENT> return self.api.send_wss_request(name, msg) | Class for base IQ option websocket chanel. | 625990a8091ae35668706c01 |
class AmphoraFinalize(BaseAmphoraTask): <NEW_LINE> <INDENT> def execute(self, amphora): <NEW_LINE> <INDENT> self.amphora_driver.finalize_amphora(amphora) <NEW_LINE> LOG.debug("Finalized the amphora.") <NEW_LINE> <DEDENT> def revert(self, result, amphora, *args, **kwargs): <NEW_LINE> <INDENT> if isinstance(result, failure.Failure): <NEW_LINE> <INDENT> return <NEW_LINE> <DEDENT> LOG.warning("Reverting amphora finalize.") <NEW_LINE> self.task_utils.mark_amphora_status_error(amphora.id) | Task to finalize the amphora before any listeners are configured. | 625990a8c4546d3d9def8286 |
class ObjectNameCase(unittest.TestCase): <NEW_LINE> <INDENT> def testSimple(self): <NEW_LINE> <INDENT> name = 'object1' <NEW_LINE> obj = QObject() <NEW_LINE> obj.setObjectName(name) <NEW_LINE> self.assertEqual(name, obj.objectName()) <NEW_LINE> <DEDENT> def testEmpty(self): <NEW_LINE> <INDENT> name = '' <NEW_LINE> obj = QObject() <NEW_LINE> obj.setObjectName(name) <NEW_LINE> self.assertEqual(name, obj.objectName()) <NEW_LINE> <DEDENT> def testDefault(self): <NEW_LINE> <INDENT> obj = QObject() <NEW_LINE> self.assertEqual('', obj.objectName()) <NEW_LINE> <DEDENT> def testUnicode(self): <NEW_LINE> <INDENT> name = py3k.unicode_('não') <NEW_LINE> obj = QObject() <NEW_LINE> obj.setObjectName(name) <NEW_LINE> self.assertEqual(obj.objectName(), name) | Tests related to QObject object name | 625990a8091ae35668706c03 |
class SeleniumSessionWorkflowPopulator(SeleniumSessionGetPostMixin, populators.BaseWorkflowPopulator, ImporterGalaxyInterface): <NEW_LINE> <INDENT> def __init__(self, selenium_context: GalaxySeleniumContext): <NEW_LINE> <INDENT> self.selenium_context = selenium_context <NEW_LINE> self.dataset_populator = SeleniumSessionDatasetPopulator(selenium_context) <NEW_LINE> self.dataset_collection_populator = SeleniumSessionDatasetCollectionPopulator(selenium_context) <NEW_LINE> <DEDENT> def import_workflow(self, workflow: dict, **kwds) -> dict: <NEW_LINE> <INDENT> workflow_str = json.dumps(workflow, indent=4) <NEW_LINE> data = { 'workflow': workflow_str, } <NEW_LINE> data.update(**kwds) <NEW_LINE> upload_response = self._post("workflows", data=data) <NEW_LINE> upload_response.raise_for_status() <NEW_LINE> return upload_response.json() <NEW_LINE> <DEDENT> def upload_yaml_workflow(self, has_yaml, **kwds) -> str: <NEW_LINE> <INDENT> workflow = convert_and_import_workflow(has_yaml, galaxy_interface=self, **kwds) <NEW_LINE> return workflow["id"] | Implementation of BaseWorkflowPopulator backed by bioblend. | 625990a8c4546d3d9def828c |
class STD_ANON_7 (pyxb.binding.datatypes.string, pyxb.binding.basis.enumeration_mixin): <NEW_LINE> <INDENT> _ExpandedName = None <NEW_LINE> _XSDLocation = pyxb.utils.utility.Location('/home/claudio/Applications/eudat/b2safe/manifest/mets.xsd', 1483, 8) <NEW_LINE> _Documentation = None | An atomic simple type. | 625990a8c4546d3d9def8290 |
class VirtualMachineScaleSetStorageProfile(Model): <NEW_LINE> <INDENT> _attribute_map = { 'image_reference': {'key': 'imageReference', 'type': 'ImageReference'}, 'os_disk': {'key': 'osDisk', 'type': 'VirtualMachineScaleSetOSDisk'}, } <NEW_LINE> def __init__(self, image_reference=None, os_disk=None): <NEW_LINE> <INDENT> self.image_reference = image_reference <NEW_LINE> self.os_disk = os_disk | Describes a virtual machine scale set storage profile.
:param image_reference: the image reference.
:type image_reference: :class:`ImageReference
<azure.mgmt.compute.models.ImageReference>`
:param os_disk: the OS disk.
:type os_disk: :class:`VirtualMachineScaleSetOSDisk
<azure.mgmt.compute.models.VirtualMachineScaleSetOSDisk>` | 625990a9187af65679d2abde |
class ProfileWriter(ProfileParser): <NEW_LINE> <INDENT> def set_rule(self, rule): <NEW_LINE> <INDENT> action = rule[0] <NEW_LINE> target = rule[1] <NEW_LINE> sub = rule[2] <NEW_LINE> rule_list = [] <NEW_LINE> if not self.has_section(target): <NEW_LINE> <INDENT> self.add_section(target) <NEW_LINE> <DEDENT> if self.has_option(target, action): <NEW_LINE> <INDENT> rule_list = self.get_list(target, action) <NEW_LINE> <DEDENT> rule_list.append(sub) <NEW_LINE> text_rules = "\n\t".join(rule_list) <NEW_LINE> self.set(target, action, text_rules) | Object that writes basic CoPilot profiles | 625990a9091ae35668706c1f |
class HostStats(object): <NEW_LINE> <INDENT> def __init__(self, host, target, method = '', baseline = None, measures = None, baselineMeasures = None): <NEW_LINE> <INDENT> self.host = host <NEW_LINE> self.target = target <NEW_LINE> if measures is not None: <NEW_LINE> <INDENT> self.measures = measures <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.measures = [] <NEW_LINE> <DEDENT> self.baseline = baseline <NEW_LINE> self.baselineMeasures = baselineMeasures if baselineMeasures is not None else [] <NEW_LINE> self.method = method <NEW_LINE> <DEDENT> def subtrackBaseline(self, ping): <NEW_LINE> <INDENT> return DelayStats(ping.timestamp, ping.step, ping.sent, ping.received, ping.rttmin - self.baseline.rttmin, ping.rttavg - self.baseline.rttavg, ping.rttmax - self.baseline.rttmax, ping.rttdev) <NEW_LINE> <DEDENT> def computeBaseline(self): <NEW_LINE> <INDENT> sent = 0 <NEW_LINE> received = 0 <NEW_LINE> rttmin = 0 <NEW_LINE> rttavg = 0 <NEW_LINE> rttmax = 0 <NEW_LINE> rttdev = 0 <NEW_LINE> for measure in self.baselineMeasures: <NEW_LINE> <INDENT> sent += measure.sent <NEW_LINE> received += measure.received <NEW_LINE> rttmin = min(rttmin, measure.rttmin) if rttmin is not 0 else measure.rttmin <NEW_LINE> rttmax = max(rttmax, measure.rttmax) <NEW_LINE> rttavg += measure.rttavg <NEW_LINE> rttdev += measure.rttdev ** 2 <NEW_LINE> <DEDENT> rttdev = math.sqrt(rttdev / len(self.baselineMeasures)) <NEW_LINE> rttavg /= len(self.baselineMeasures) <NEW_LINE> self.baseline = DelayStats( sent = sent, received = received, rttmin = rttmin, rttavg = rttavg, rttmax = rttmax, rttdev = rttdev ) <NEW_LINE> <DEDENT> def getPair(self): <NEW_LINE> <INDENT> return self.host.name, self.target.name <NEW_LINE> <DEDENT> def getStrPair(self): <NEW_LINE> <INDENT> return "%s,%s" % self.getPair() <NEW_LINE> <DEDENT> def printAll(self): <NEW_LINE> <INDENT> return "\n%s -> %s\nbaseline for %s: \n %s\nmeasures : \n %s\n" % (self.host.name, self.target.name, self.method, self.baseline.printAll() if self.baseline is not None else '', "\n ".join([m.printAll() for m in self.measures])) <NEW_LINE> <DEDENT> def toDict(self): <NEW_LINE> <INDENT> return {'host': self.host.name, 'target': self.target.name, 'baseline': self.baseline.toDict(), 'baselineMeasures': [m.toDict() for m in self.baselineMeasures], 'measures': [m.toDict() for m in self.measures]} | Storage for delay results | 625990a9187af65679d2abe5 |
class LineMemoryMonitor(LineMonitor): <NEW_LINE> <INDENT> def __init__(self, recorder, record_type=None): <NEW_LINE> <INDENT> if record_type is None: <NEW_LINE> <INDENT> record_type = LineMemoryRecord <NEW_LINE> <DEDENT> super(LineMemoryMonitor, self).__init__(recorder, record_type) <NEW_LINE> self._process = None <NEW_LINE> <DEDENT> def enable(self): <NEW_LINE> <INDENT> if self._call_tracker('ping'): <NEW_LINE> <INDENT> self._process = psutil.Process(os.getpid()) <NEW_LINE> self._recorder.prepare(self._record_type) <NEW_LINE> self._tracer.replace(self.on_line_event) <NEW_LINE> <DEDENT> <DEDENT> def disable(self): <NEW_LINE> <INDENT> if self._call_tracker('pong'): <NEW_LINE> <INDENT> self._tracer.recover() <NEW_LINE> self._recorder.finalize() <NEW_LINE> self._process = None <NEW_LINE> <DEDENT> <DEDENT> def gather_info(self, frame): <NEW_LINE> <INDENT> rss, vms = self._process.memory_info() <NEW_LINE> filename, lineno, function, line, _ = inspect.getframeinfo(frame, context=1) <NEW_LINE> if line is None: <NEW_LINE> <INDENT> line = ['<compiled string>'] <NEW_LINE> <DEDENT> return ( self._index, function, lineno, rss, vms, line[0].rstrip(), filename) | Record process memory on python line events.
The class hooks on the settrace function to receive trace events and
record the current process memory when a line of code is about to be
executed. | 625990a9187af65679d2abec |
class User_Event_Entity: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> unique_users = set() <NEW_LINE> unique_events = set() <NEW_LINE> events_for_user = defaultdict(set) <NEW_LINE> users_for_event = defaultdict(set) <NEW_LINE> for filename in ['data/train.csv', 'data/test.csv']: <NEW_LINE> <INDENT> with open(filename, 'r') as in_f: <NEW_LINE> <INDENT> in_f.readline().strip().split(',') <NEW_LINE> for line in in_f.readlines(): <NEW_LINE> <INDENT> cols = line.strip().split(',') <NEW_LINE> unique_users.add(cols[0]) <NEW_LINE> unique_events.add(cols[1]) <NEW_LINE> events_for_user[cols[0]].add(cols[1]) <NEW_LINE> users_for_event[cols[1]].add(cols[0]) <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.user_event_scores = ss.dok_matrix((len(unique_users), len(unique_events))) <NEW_LINE> self.user_index = dict() <NEW_LINE> self.event_index = dict() <NEW_LINE> for i, user in enumerate(unique_users): <NEW_LINE> <INDENT> self.user_index[user] = i <NEW_LINE> <DEDENT> for i, event in enumerate(unique_events): <NEW_LINE> <INDENT> self.event_index[event] = i <NEW_LINE> <DEDENT> with open('data/train.csv', 'r') as train_f: <NEW_LINE> <INDENT> train_f.readline() <NEW_LINE> for line in train_f.readlines(): <NEW_LINE> <INDENT> cols = line.strip().split(',') <NEW_LINE> user_i = self.user_index[cols[0]] <NEW_LINE> event_j = self.event_index[cols[1]] <NEW_LINE> self.user_event_scores[user_i, event_j] = int(cols[4]) - int(cols[5]) <NEW_LINE> <DEDENT> <DEDENT> sio.mmwrite('prep_data/matrix_user_event_scores', self.user_event_scores) <NEW_LINE> self.unique_user_pairs = set() <NEW_LINE> self.unique_event_pairs = set() <NEW_LINE> for event in unique_events: <NEW_LINE> <INDENT> users = users_for_event[event] <NEW_LINE> if len(users) > 2: <NEW_LINE> <INDENT> self.unique_user_pairs.update(itertools.combinations(users, 2)) <NEW_LINE> <DEDENT> <DEDENT> for user in unique_users: <NEW_LINE> <INDENT> events = events_for_user[user] <NEW_LINE> if len(events) > 2: <NEW_LINE> <INDENT> self.unique_event_pairs.update(itertools.combinations(events, 2)) <NEW_LINE> <DEDENT> <DEDENT> pickle.dump(self.user_index, open("prep_data/user_index.pkl", 'wb')) <NEW_LINE> pickle.dump(self.event_index, open("prep_data/event_index.pkl", 'wb')) | user-event 信息:
user_index : 用户id
event_index: 事件id
user_event_scores: user --> event 感兴趣程度
unique_user_pairs: 有关系的用户对
unique_event_pairs: 有关系的事件对 | 625990a9091ae35668706c3b |
class Solution: <NEW_LINE> <INDENT> def winSum(self, nums, k): <NEW_LINE> <INDENT> if not nums: <NEW_LINE> <INDENT> return [] <NEW_LINE> <DEDENT> s = sum(nums[:k]) <NEW_LINE> result = [s] <NEW_LINE> for i in range(len(nums) - k): <NEW_LINE> <INDENT> j = i + k <NEW_LINE> s = s - nums[i] + nums[j] <NEW_LINE> result.append(s) <NEW_LINE> <DEDENT> return result | @param: nums: a list of integers.
@param: k: length of window.
@return: the sum of the element inside the window at each moving. | 625990aa091ae35668706c45 |
class EncoderDecoder(nn.Module): <NEW_LINE> <INDENT> def __init__(self, encoder, decoder, src_embed, tgt_embed, generator): <NEW_LINE> <INDENT> super(EncoderDecoder, self).__init__() <NEW_LINE> self.encoder = encoder <NEW_LINE> self.decoder = decoder <NEW_LINE> self.src_embed = src_embed <NEW_LINE> self.tgt_embed = tgt_embed <NEW_LINE> self.generator = generator <NEW_LINE> <DEDENT> def forward(self, src, tgt, src_mask, tgt_mask): <NEW_LINE> <INDENT> print("ed forward") <NEW_LINE> return self.decode(self.encode(src, src_mask), src_mask, tgt, tgt_mask) <NEW_LINE> <DEDENT> def encode(self, src, src_mask): <NEW_LINE> <INDENT> print("ed enc") <NEW_LINE> return self.encoder(self.src_embed(src), src_mask) <NEW_LINE> <DEDENT> def decode(self, memory, src_mask, tgt, tgt_mask): <NEW_LINE> <INDENT> print("ed dec") <NEW_LINE> return self.decoder(self.tgt_embed(tgt), memory, src_mask, tgt_mask) | Standard Encoder-Decoder Architecture. Base class for the transformer model | 625990aa091ae35668706c47 |
class KeyDefaultdict(collections.defaultdict): <NEW_LINE> <INDENT> def __missing__(self, key): <NEW_LINE> <INDENT> if self.default_factory is None: <NEW_LINE> <INDENT> raise KeyError(key) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> ret = self[key] = self.default_factory(key) <NEW_LINE> return ret | Default dict where KeyDefaultdict(C)[x] = C(x)
| 625990aa091ae35668706c49 |
class TenantDiscovery(plugin.DiscoveryBase): <NEW_LINE> <INDENT> def discover(self, manager, param=None): <NEW_LINE> <INDENT> tenants = manager.keystone.projects.list() <NEW_LINE> return tenants or [] | Discovery that supplies keystone tenants.
This discovery should be used when the pollster's work can't be divided
into smaller pieces than per-tenants. Example of this is the Swift
pollster, which polls account details and does so per-project. | 625990aa187af65679d2abf5 |
class BinaryTree: <NEW_LINE> <INDENT> def __init__(self, node, left=None, right=None): <NEW_LINE> <INDENT> self.node = node <NEW_LINE> self.left = left <NEW_LINE> self.right = right <NEW_LINE> <DEDENT> @property <NEW_LINE> def isLeaf(self): <NEW_LINE> <INDENT> return self.left is None and self.right is None <NEW_LINE> <DEDENT> @property <NEW_LINE> def height(self): <NEW_LINE> <INDENT> if self.isLeaf: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1 + max(self.left.height, self.right.height) <NEW_LINE> <DEDENT> <DEDENT> @property <NEW_LINE> def size(self): <NEW_LINE> <INDENT> if self.isLeaf: <NEW_LINE> <INDENT> return 1 <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return 1 + self.left.size + self.right.size | Class representing full binary tree | 625990aac4546d3d9def82ac |
class DefaultConfig: <NEW_LINE> <INDENT> PORT = 3978 <NEW_LINE> APP_ID = os.environ.get("MicrosoftAppId", "") <NEW_LINE> APP_PASSWORD = os.environ.get("MicrosoftAppPassword", "") <NEW_LINE> LUIS_APP_ID = os.environ.get("LuisAppId", "541aa899-3c02-4e7f-bffe-45cfe953f276") <NEW_LINE> LUIS_API_KEY = os.environ.get("LuisAPIKey", "302d9294f0784ecd801181c868f80c3e") <NEW_LINE> LUIS_API_HOST_NAME = os.environ.get("LuisAPIHostName", "https://centralindia.api.cognitive.microsoft.com/") | Bot Configuration | 625990aac4546d3d9def82ad |
class BackupSchedule(base.Resource): <NEW_LINE> <INDENT> def get(self): <NEW_LINE> <INDENT> return self.manager.get(server=self.server) <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> self.manager.delete(server=self.server) <NEW_LINE> <DEDENT> def update(self, enabled=True, weekly='disabled', daily='disabled', rotation=0): <NEW_LINE> <INDENT> self.manager.create(self.server, enabled, weekly, daily, rotation) | Represents the daily or weekly backup schedule for some server. | 625990aac4546d3d9def82af |
@Metric.register("hit_at_k_within_d") <NEW_LINE> class HitAtKWithinD(Metric): <NEW_LINE> <INDENT> def __init__(self, k=5, d=2) -> None: <NEW_LINE> <INDENT> self._k = k <NEW_LINE> self._k = 2 <NEW_LINE> self._hit_at_k_within_d = 0.0 <NEW_LINE> self._batch_size = 0 <NEW_LINE> self._predictions = None <NEW_LINE> self._gold_labels = None <NEW_LINE> self._ttl_size = 0 <NEW_LINE> <DEDENT> def __call__(self, predictions: torch.Tensor, gold_labels: torch.Tensor, mask: Optional[torch.Tensor] = None): <NEW_LINE> <INDENT> predictions = predictions.detach() <NEW_LINE> gold_labels = gold_labels.detach() <NEW_LINE> if mask is not None: <NEW_LINE> <INDENT> predictions = predictions * mask <NEW_LINE> gold_labels = gold_labels * mask <NEW_LINE> <DEDENT> batch_size = predictions.size(0) <NEW_LINE> predictions = predictions.view(batch_size, -1) <NEW_LINE> gold_labels = gold_labels.view(batch_size, -1) <NEW_LINE> self._batch_size = batch_size <NEW_LINE> self._predictions = predictions <NEW_LINE> self._gold_labels = gold_labels <NEW_LINE> self._ttl_size += batch_size <NEW_LINE> <DEDENT> def get_metric(self, reset: bool = False): <NEW_LINE> <INDENT> top_k = self._predictions.topk(self._k)[0][:,self._k-1] <NEW_LINE> predictions = torch.ge(self._predictions,top_k.unsqueeze(1).expand(self._batch_size,self._gold_labels.size(1))).float() <NEW_LINE> gold_labels = self._gold_labels.float() <NEW_LINE> self._hit_at_5 += ((gold_labels * predictions).sum(1) / gold_labels.sum(1)).sum() <NEW_LINE> hit_at_5 = self._hit_at_5 / self._ttl_size <NEW_LINE> if reset: <NEW_LINE> <INDENT> self.reset() <NEW_LINE> <DEDENT> return hit_at_5.cpu().item() <NEW_LINE> <DEDENT> @overrides <NEW_LINE> def reset(self): <NEW_LINE> <INDENT> self._hit_at_5 = 0.0 <NEW_LINE> self._batch_size = 0 <NEW_LINE> self._predictions = None <NEW_LINE> self._gold_labels = None <NEW_LINE> self._ttl_size = 0 | Just checks batch-equality of two tensors and computes an accuracy metric based on that. This
is similar to :class:`CategoricalAccuracy`, if you've already done a ``.max()`` on your
predictions. If you have categorical output, though, you should typically just use
:class:`CategoricalAccuracy`. The reason you might want to use this instead is if you've done
some kind of constrained inference and don't have a prediction tensor that matches the API of
:class:`CategoricalAccuracy`, which assumes a final dimension of size ``num_classes``. | 625990aa091ae35668706c58 |
class AuthTokenSerializer(serializers.Serializer): <NEW_LINE> <INDENT> email = serializers.CharField() <NEW_LINE> password = serializers.CharField( style={'input_type': 'password'}, trim_whitespace=False ) <NEW_LINE> def validate(self, attrs): <NEW_LINE> <INDENT> email = attrs.get('email') <NEW_LINE> password = attrs.get('password') <NEW_LINE> user = authenticate( request=self.context.get('request'), username=email, password=password ) <NEW_LINE> if not user: <NEW_LINE> <INDENT> msg = _('Unable to authenticate with provided credentials') <NEW_LINE> raise serializers.ValidationError(msg, code='authentication') <NEW_LINE> <DEDENT> attrs['user'] = user <NEW_LINE> return attrs | Serialização para o objeto de autenticação de usuário | 625990ab187af65679d2abfe |
class FAQs(models.Model): <NEW_LINE> <INDENT> question = models.CharField(max_length=255, unique=True) <NEW_LINE> brief_response = models.TextField() <NEW_LINE> detailed_response_url = models.URLField( max_length=400, null=True, blank=True) | Questions not associated with any particular users..
This is because these questions tend to cut across almost all users | 625990ab091ae35668706c5e |
class AppTokenObtainPairView(TokenObtainPairView): <NEW_LINE> <INDENT> serializer_class = serializers.AppTokenObtainPairSerializer | Takes a set of user credentials and returns an access and refresh JSON web
token pair to prove the authentication of those credentials.
Also returns language of logged in user | 625990ab091ae35668706c64 |
class PutCmd(Command): <NEW_LINE> <INDENT> aliases = ('put', 'place', 'drop') <NEW_LINE> syntax = '<thing> {in|into|inside of|inside} <container>' <NEW_LINE> arg_parsers = { 'thing': MatchObject(cls=Thing, search_for='thing', show=True), 'container': 'this' } <NEW_LINE> lock = locks.all_pass <NEW_LINE> def run(self, this, actor, args): <NEW_LINE> <INDENT> thing = args['thing'] <NEW_LINE> if this.location not in (actor, actor.location): <NEW_LINE> <INDENT> actor.tell("You can't get at ", this, ".") <NEW_LINE> <DEDENT> elif thing.location not in (actor, actor.location): <NEW_LINE> <INDENT> actor.tell("You don't have ", thing, ".") <NEW_LINE> <DEDENT> elif not this.opened: <NEW_LINE> <INDENT> actor.tell(this, " is closed.") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> thing.move_to(this, by=actor) <NEW_LINE> <DEDENT> except errors.MoveError as e: <NEW_LINE> <INDENT> if e.message: <NEW_LINE> <INDENT> actor.tell('{r', e.message) <NEW_LINE> <DEDENT> this.emit_message('add_fail', actor=actor, thing=thing) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> this.emit_message('add', actor=actor, thing=thing) | put <thing> in <container>
Places a thing inside an open container. | 625990abc4546d3d9def82b8 |
class Scheduler(object): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> pass <NEW_LINE> <DEDENT> def submit(self, pbs_script, pbs_config, pbs_vars=None, python_exe=None): <NEW_LINE> <INDENT> raise NotImplementedError | Abstract scheduler class. | 625990ab187af65679d2ac02 |
class ExpressRouteCircuitAuthorization(SubResource): <NEW_LINE> <INDENT> _validation = { 'etag': {'readonly': True}, 'type': {'readonly': True}, 'provisioning_state': {'readonly': True}, } <NEW_LINE> _attribute_map = { 'id': {'key': 'id', 'type': 'str'}, 'name': {'key': 'name', 'type': 'str'}, 'etag': {'key': 'etag', 'type': 'str'}, 'type': {'key': 'type', 'type': 'str'}, 'authorization_key': {'key': 'properties.authorizationKey', 'type': 'str'}, 'authorization_use_status': {'key': 'properties.authorizationUseStatus', 'type': 'str'}, 'provisioning_state': {'key': 'properties.provisioningState', 'type': 'str'}, } <NEW_LINE> def __init__( self, *, id: Optional[str] = None, name: Optional[str] = None, authorization_key: Optional[str] = None, authorization_use_status: Optional[Union[str, "AuthorizationUseStatus"]] = None, **kwargs ): <NEW_LINE> <INDENT> super(ExpressRouteCircuitAuthorization, self).__init__(id=id, **kwargs) <NEW_LINE> self.name = name <NEW_LINE> self.etag = None <NEW_LINE> self.type = None <NEW_LINE> self.authorization_key = authorization_key <NEW_LINE> self.authorization_use_status = authorization_use_status <NEW_LINE> self.provisioning_state = None | Authorization in an ExpressRouteCircuit resource.
Variables are only populated by the server, and will be ignored when sending a request.
:param id: Resource ID.
:type id: str
:param name: The name of the resource that is unique within a resource group. This name can be
used to access the resource.
:type name: str
:ivar etag: A unique read-only string that changes whenever the resource is updated.
:vartype etag: str
:ivar type: Type of the resource.
:vartype type: str
:param authorization_key: The authorization key.
:type authorization_key: str
:param authorization_use_status: The authorization use status. Possible values include:
"Available", "InUse".
:type authorization_use_status: str or
~azure.mgmt.network.v2020_08_01.models.AuthorizationUseStatus
:ivar provisioning_state: The provisioning state of the authorization resource. Possible values
include: "Succeeded", "Updating", "Deleting", "Failed".
:vartype provisioning_state: str or ~azure.mgmt.network.v2020_08_01.models.ProvisioningState | 625990ab091ae35668706c68 |
class UserChangeForm(forms.ModelForm): <NEW_LINE> <INDENT> password = ReadOnlyPasswordHashField() <NEW_LINE> class Meta: <NEW_LINE> <INDENT> model = User <NEW_LINE> fields = ('email', 'password', 'birthdate', 'is_active') <NEW_LINE> <DEDENT> def clean_password(self): <NEW_LINE> <INDENT> return self.initial["password"] | A form for updating users. Includes all the fields on
the user, but replaces the password field with admin's
password hash display field. | 625990ab091ae35668706c6c |
class UserRegister(generics.CreateAPIView): <NEW_LINE> <INDENT> queryset = get_user_model().objects.all() <NEW_LINE> serializer_class = UserSerializer <NEW_LINE> def create(self, request, *args, **kwargs): <NEW_LINE> <INDENT> serializer = UserSerializer(data=request.data) <NEW_LINE> random_number = random_token.generate_verification_token(10) <NEW_LINE> if serializer.is_valid(): <NEW_LINE> <INDENT> serializer.validated_data['verification_token'] = random_number <NEW_LINE> self.perform_create(serializer) <NEW_LINE> headers = self.get_success_headers(serializer.data) <NEW_LINE> payload = jwt_payload_handler(serializer.data) <NEW_LINE> token = jwt_encode_handler(payload) <NEW_LINE> res = { 'status': 'success', 'message': messages.MESSAGES['REGISTER'], 'data': { 'token': token, } } <NEW_LINE> return Response(res, status=status.HTTP_201_CREATED, headers=headers) <NEW_LINE> <DEDENT> return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) | Class representing the view for creating a user | 625990ac187af65679d2ac11 |
class Chemistry(Enum): <NEW_LINE> <INDENT> tenX_v2 = "tenX_v2" <NEW_LINE> tenX_v3 = "tenX_v3" | Parameters for 10x chemistry used by the Optimus pipeline:
https://github.com/HumanCellAtlas/skylab/blob/optimus_v1.4.0/pipelines/optimus/Optimus.wdl#L39 | 625990acc4546d3d9def82ca |
class SettingsList(FormatMany): <NEW_LINE> <INDENT> def take_action(self, parsed_args): <NEW_LINE> <INDENT> super(SettingsList, self).take_action(parsed_args) <NEW_LINE> headers = ['Setting', 'Value'] <NEW_LINE> records = [] <NEW_LINE> for s, v in settings.all_settings().items(): <NEW_LINE> <INDENT> records.append([s, v]) <NEW_LINE> <DEDENT> return (tuple(headers), tuple(records)) | List current Tapis CLI settings
| 625990ac091ae35668706c8c |
class Register(RegisterFactory): <NEW_LINE> <INDENT> def identity_verify(self, identity: str) -> bool: <NEW_LINE> <INDENT> logging.info(f'verify identity {identity} ') <NEW_LINE> session = Session() <NEW_LINE> query = session.query(Auths).filter(Auths.identity == identity) <NEW_LINE> result = True <NEW_LINE> try: <NEW_LINE> <INDENT> query.one() <NEW_LINE> logging.info(f'identity {identity} exists') <NEW_LINE> <DEDENT> except NoResultFound: <NEW_LINE> <INDENT> result = False <NEW_LINE> logging.info(f'identity {identity} does not exists') <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> session.close() <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def name_verify(self, name: str) -> bool: <NEW_LINE> <INDENT> session = Session() <NEW_LINE> query = session.query(Users).filter(Users.name == name) <NEW_LINE> result = False <NEW_LINE> try: <NEW_LINE> <INDENT> query.one() <NEW_LINE> <DEDENT> except NoResultFound: <NEW_LINE> <INDENT> result = True <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> session.close() <NEW_LINE> <DEDENT> return result <NEW_LINE> <DEDENT> def register_user(self, name: str, password: str, identity: str) -> bool: <NEW_LINE> <INDENT> session = Session() <NEW_LINE> sha1_password = sha1_encrypt(password) <NEW_LINE> new_user = Users(name=name, password=sha1_password, remote_identity=identity) <NEW_LINE> result = False <NEW_LINE> try: <NEW_LINE> <INDENT> session.add(new_user) <NEW_LINE> session.commit() <NEW_LINE> result = True <NEW_LINE> <DEDENT> except Exception as e: <NEW_LINE> <INDENT> session.rollback() <NEW_LINE> logging.info(f'add user {name} failure -> {e}') <NEW_LINE> <DEDENT> finally: <NEW_LINE> <INDENT> session.close() <NEW_LINE> <DEDENT> return result | 具体注册器 | 625990ac187af65679d2ac19 |
class API: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._buffer = '' <NEW_LINE> self._servers = {} <NEW_LINE> return <NEW_LINE> <DEDENT> def delete(self): <NEW_LINE> <INDENT> for server in self._servers: <NEW_LINE> <INDENT> server.delete() <NEW_LINE> <DEDENT> self._servers = {} <NEW_LINE> self._buffer = '' <NEW_LINE> return <NEW_LINE> <DEDENT> def create_server(self, name): <NEW_LINE> <INDENT> pr, pw = os.pipe() <NEW_LINE> pid = os.fork() <NEW_LINE> if pid == 0: <NEW_LINE> <INDENT> r = open("/dev/null", "r") <NEW_LINE> w = open("/dev/null", "w") <NEW_LINE> sys.stderr.close() <NEW_LINE> sys.stdout.close() <NEW_LINE> sys.stdin.close() <NEW_LINE> sys.stdin = r <NEW_LINE> sys.stdout = w <NEW_LINE> sys.stderr = w <NEW_LINE> server = exsim.Server() <NEW_LINE> port = server.get_port() <NEW_LINE> os.close(pr) <NEW_LINE> pw = os.fdopen(pw, 'w') <NEW_LINE> pw.write(str(port)) <NEW_LINE> pw.flush() <NEW_LINE> pw.close() <NEW_LINE> server.run() <NEW_LINE> sys.exit(0) <NEW_LINE> <DEDENT> os.close(pw) <NEW_LINE> pr = os.fdopen(pr) <NEW_LINE> s = pr.read(10) <NEW_LINE> port = int(s) <NEW_LINE> pr.close() <NEW_LINE> sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) <NEW_LINE> sock.connect(('127.0.0.1', port)) <NEW_LINE> self._connected = True <NEW_LINE> logging.info("Connected") <NEW_LINE> s = Server(self, name, sock, pid) <NEW_LINE> self._servers[name] = s <NEW_LINE> return s <NEW_LINE> <DEDENT> def delete_server(self, name): <NEW_LINE> <INDENT> server = self._servers.get(name, None) <NEW_LINE> if not server: <NEW_LINE> <INDENT> raise KeyError("No such server: %s" % name) <NEW_LINE> <DEDENT> server.delete() <NEW_LINE> return <NEW_LINE> <DEDENT> def create_endpoint(self, name, port): <NEW_LINE> <INDENT> request = {"type": "create_endpoint", "name": name, "port": port} <NEW_LINE> reply = {} <NEW_LINE> api._send(request, reply) <NEW_LINE> result = reply["result"] <NEW_LINE> if not result: <NEW_LINE> <INDENT> raise Exception(reply["message"]) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def set_endpoint_engine(self, engine, endpoint): <NEW_LINE> <INDENT> request = {"type": "set_endpoint_engine", "engine": engine, "endpoint": endpoint} <NEW_LINE> reply = {} <NEW_LINE> api._send(request, reply) <NEW_LINE> result = reply["result"] <NEW_LINE> if not result: <NEW_LINE> <INDENT> raise Exception(reply["message"]) <NEW_LINE> <DEDENT> return <NEW_LINE> <DEDENT> def set_endpoint_protocol(self, endpoint, protocol): <NEW_LINE> <INDENT> request = {"type": "set_endpoint_protocol", "endpoint": endpoint, "protocol": protocol} <NEW_LINE> reply = {} <NEW_LINE> api._send(request, reply) <NEW_LINE> result = reply["result"] <NEW_LINE> if not result: <NEW_LINE> <INDENT> raise Exception(reply["message"]) <NEW_LINE> <DEDENT> return | Client API. | 625990acc4546d3d9def82d3 |
class Timer(): <NEW_LINE> <INDENT> def __init__(self, function): <NEW_LINE> <INDENT> self.function = function <NEW_LINE> <DEDENT> def __call__(self, *args, **kwargs): <NEW_LINE> <INDENT> start_time = time.time() <NEW_LINE> result = self.function(*args, **kwargs) <NEW_LINE> end_time = time.time() <NEW_LINE> print("실행시간은 {time}초입니다.".format(time=end_time-start_time)) <NEW_LINE> return result | Source: https://jupiny.com/2016/09/25/decorator-class/
__call__: 클래스의 객체가 함수처럼 호출되면 실행되게 만드는 함수 | 625990ad091ae35668706ca2 |
class IPv6Address(_BaseV6, _BaseAddress): <NEW_LINE> <INDENT> __slots__ = ('_ip', '__weakref__') <NEW_LINE> def __init__(self, address): <NEW_LINE> <INDENT> if isinstance(address, _compat_int_types): <NEW_LINE> <INDENT> self._check_int_address(address) <NEW_LINE> self._ip = address <NEW_LINE> return <NEW_LINE> <DEDENT> if isinstance(address, bytes): <NEW_LINE> <INDENT> self._check_packed_address(address, 16) <NEW_LINE> bvs = _compat_bytes_to_byte_vals(address) <NEW_LINE> self._ip = _compat_int_from_byte_vals(bvs, 'big') <NEW_LINE> return <NEW_LINE> <DEDENT> addr_str = _compat_str(address) <NEW_LINE> if '/' in addr_str: <NEW_LINE> <INDENT> raise AddressValueError("Unexpected '/' in %r" % address) <NEW_LINE> <DEDENT> self._ip = self._ip_int_from_string(addr_str) <NEW_LINE> <DEDENT> @property <NEW_LINE> def packed(self): <NEW_LINE> <INDENT> return v6_int_to_packed(self._ip) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_multicast(self): <NEW_LINE> <INDENT> return self in self._constants._multicast_network <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_reserved(self): <NEW_LINE> <INDENT> return any(self in x for x in self._constants._reserved_networks) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_link_local(self): <NEW_LINE> <INDENT> return self in self._constants._linklocal_network <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_site_local(self): <NEW_LINE> <INDENT> return self in self._constants._sitelocal_network <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_private(self): <NEW_LINE> <INDENT> return any(self in net for net in self._constants._private_networks) <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_global(self): <NEW_LINE> <INDENT> return not self.is_private <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_unspecified(self): <NEW_LINE> <INDENT> return self._ip == 0 <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_loopback(self): <NEW_LINE> <INDENT> return self._ip == 1 <NEW_LINE> <DEDENT> @property <NEW_LINE> def ipv4_mapped(self): <NEW_LINE> <INDENT> if (self._ip >> 32) != 0xFFFF: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return IPv4Address(self._ip & 0xFFFFFFFF) <NEW_LINE> <DEDENT> @property <NEW_LINE> def teredo(self): <NEW_LINE> <INDENT> if (self._ip >> 96) != 0x20010000: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return (IPv4Address((self._ip >> 64) & 0xFFFFFFFF), IPv4Address(~self._ip & 0xFFFFFFFF)) <NEW_LINE> <DEDENT> @property <NEW_LINE> def sixtofour(self): <NEW_LINE> <INDENT> if (self._ip >> 112) != 0x2002: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> return IPv4Address((self._ip >> 80) & 0xFFFFFFFF) | Represent and manipulate single IPv6 Addresses. | 625990ad187af65679d2ac21 |
class SegmentationMetric(EvalMetric): <NEW_LINE> <INDENT> def __init__(self, nclass): <NEW_LINE> <INDENT> super(SegmentationMetric, self).__init__('pixAcc & mIoU') <NEW_LINE> self.nclass = nclass <NEW_LINE> self.lock = threading.Lock() <NEW_LINE> self.reset() <NEW_LINE> <DEDENT> def update(self, labels, preds): <NEW_LINE> <INDENT> def evaluate_worker(self, label, pred): <NEW_LINE> <INDENT> correct, labeled = batch_pix_accuracy( pred, label) <NEW_LINE> inter, union = batch_intersection_union( pred, label, self.nclass) <NEW_LINE> with self.lock: <NEW_LINE> <INDENT> self.total_correct += correct <NEW_LINE> self.total_label += labeled <NEW_LINE> self.total_inter += inter <NEW_LINE> self.total_union += union <NEW_LINE> <DEDENT> <DEDENT> if isinstance(preds, mx.nd.NDArray): <NEW_LINE> <INDENT> evaluate_worker(self, labels, preds) <NEW_LINE> <DEDENT> elif isinstance(preds, (list, tuple)): <NEW_LINE> <INDENT> threads = [threading.Thread(target=evaluate_worker, args=(self, label, pred), ) for (label, pred) in zip(labels, preds)] <NEW_LINE> for thread in threads: <NEW_LINE> <INDENT> thread.start() <NEW_LINE> <DEDENT> for thread in threads: <NEW_LINE> <INDENT> thread.join() <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def get(self): <NEW_LINE> <INDENT> pixAcc = 1.0 * self.total_correct / (np.spacing(1) + self.total_label) <NEW_LINE> IoU = 1.0 * self.total_inter / (np.spacing(1) + self.total_union) <NEW_LINE> mIoU = IoU.mean() <NEW_LINE> return pixAcc, mIoU <NEW_LINE> <DEDENT> def reset(self): <NEW_LINE> <INDENT> self.total_inter = 0 <NEW_LINE> self.total_union = 0 <NEW_LINE> self.total_correct = 0 <NEW_LINE> self.total_label = 0 | Computes pixAcc and mIoU metric scroes
| 625990adc4546d3d9def82da |
class GetTopPeers(Object): <NEW_LINE> <INDENT> ID = 0xd4982db5 <NEW_LINE> def __init__(self, offset: int, limit: int, hash: int, correspondents: bool = None, bots_pm: bool = None, bots_inline: bool = None, phone_calls: bool = None, groups: bool = None, channels: bool = None): <NEW_LINE> <INDENT> self.correspondents = correspondents <NEW_LINE> self.bots_pm = bots_pm <NEW_LINE> self.bots_inline = bots_inline <NEW_LINE> self.phone_calls = phone_calls <NEW_LINE> self.groups = groups <NEW_LINE> self.channels = channels <NEW_LINE> self.offset = offset <NEW_LINE> self.limit = limit <NEW_LINE> self.hash = hash <NEW_LINE> <DEDENT> @staticmethod <NEW_LINE> def read(b: BytesIO, *args) -> "GetTopPeers": <NEW_LINE> <INDENT> flags = Int.read(b) <NEW_LINE> correspondents = True if flags & (1 << 0) else False <NEW_LINE> bots_pm = True if flags & (1 << 1) else False <NEW_LINE> bots_inline = True if flags & (1 << 2) else False <NEW_LINE> phone_calls = True if flags & (1 << 3) else False <NEW_LINE> groups = True if flags & (1 << 10) else False <NEW_LINE> channels = True if flags & (1 << 15) else False <NEW_LINE> offset = Int.read(b) <NEW_LINE> limit = Int.read(b) <NEW_LINE> hash = Int.read(b) <NEW_LINE> return GetTopPeers(offset, limit, hash, correspondents, bots_pm, bots_inline, phone_calls, groups, channels) <NEW_LINE> <DEDENT> def write(self) -> bytes: <NEW_LINE> <INDENT> b = BytesIO() <NEW_LINE> b.write(Int(self.ID, False)) <NEW_LINE> flags = 0 <NEW_LINE> flags |= (1 << 0) if self.correspondents is not None else 0 <NEW_LINE> flags |= (1 << 1) if self.bots_pm is not None else 0 <NEW_LINE> flags |= (1 << 2) if self.bots_inline is not None else 0 <NEW_LINE> flags |= (1 << 3) if self.phone_calls is not None else 0 <NEW_LINE> flags |= (1 << 10) if self.groups is not None else 0 <NEW_LINE> flags |= (1 << 15) if self.channels is not None else 0 <NEW_LINE> b.write(Int(flags)) <NEW_LINE> b.write(Int(self.offset)) <NEW_LINE> b.write(Int(self.limit)) <NEW_LINE> b.write(Int(self.hash)) <NEW_LINE> return b.getvalue() | Attributes:
ID: ``0xd4982db5``
Args:
offset: ``int`` ``32-bit``
limit: ``int`` ``32-bit``
hash: ``int`` ``32-bit``
correspondents (optional): ``bool``
bots_pm (optional): ``bool``
bots_inline (optional): ``bool``
phone_calls (optional): ``bool``
groups (optional): ``bool``
channels (optional): ``bool``
Raises:
:obj:`Error <pyrogram.Error>`
Returns:
Either :obj:`contacts.TopPeersNotModified <pyrogram.api.types.contacts.TopPeersNotModified>`, :obj:`contacts.TopPeers <pyrogram.api.types.contacts.TopPeers>` or :obj:`contacts.TopPeersDisabled <pyrogram.api.types.contacts.TopPeersDisabled>` | 625990adc4546d3d9def82de |
class DeploymentTimeout(DeploymentFailed): <NEW_LINE> <INDENT> pass | Timeout during deployment. | 625990ad091ae35668706cb4 |
class Address(models.Model): <NEW_LINE> <INDENT> address = models.TextField(max_length=100, null=False, blank=False) <NEW_LINE> def __unicode__(self): <NEW_LINE> <INDENT> return self.address <NEW_LINE> <DEDENT> class Meta: <NEW_LINE> <INDENT> verbose_name_plural = 'Addresses' | A class representing an address. | 625990ad187af65679d2ac2c |
class SchemaProperties(object): <NEW_LINE> <INDENT> def __init__(self, properties, schema, rootschema=None): <NEW_LINE> <INDENT> self._properties = properties <NEW_LINE> self._schema = schema <NEW_LINE> self._rootschema = rootschema or schema <NEW_LINE> <DEDENT> def __bool__(self): <NEW_LINE> <INDENT> return bool(self._properties) <NEW_LINE> <DEDENT> def __dir__(self): <NEW_LINE> <INDENT> return list(self._properties.keys()) <NEW_LINE> <DEDENT> def __getattr__(self, attr): <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> return self[attr] <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return super().__getattr__(attr) <NEW_LINE> <DEDENT> <DEDENT> def __getitem__(self, attr): <NEW_LINE> <INDENT> dct = self._properties[attr] <NEW_LINE> if 'definitions' in self._schema and 'definitions' not in dct: <NEW_LINE> <INDENT> dct = dict(definitions=self._schema['definitions'], **dct) <NEW_LINE> <DEDENT> return SchemaInfo(dct, self._rootschema) <NEW_LINE> <DEDENT> def __iter__(self): <NEW_LINE> <INDENT> return iter(self._properties) <NEW_LINE> <DEDENT> def items(self): <NEW_LINE> <INDENT> return ((key, self[key]) for key in self) <NEW_LINE> <DEDENT> def keys(self): <NEW_LINE> <INDENT> return self._properties.keys() <NEW_LINE> <DEDENT> def values(self): <NEW_LINE> <INDENT> return (self[key] for key in self) | A wrapper for properties within a schema | 625990ad091ae35668706cbc |
class PDF417(Barcode): <NEW_LINE> <INDENT> def __init__(self, data, rows=None, columns=None): <NEW_LINE> <INDENT> self.data = data <NEW_LINE> self.data_hex = binascii.hexlify(data.encode('ISO-8859-1')).decode('utf8') <NEW_LINE> self.rows = rows <NEW_LINE> self.columns = columns <NEW_LINE> <DEDENT> @property <NEW_LINE> def ps(self): <NEW_LINE> <INDENT> options = [] <NEW_LINE> if self.rows: <NEW_LINE> <INDENT> options.append('rows={0}'.format(self.rows)) <NEW_LINE> <DEDENT> if self.columns: <NEW_LINE> <INDENT> options.append('columns={0}'.format(self.columns)) <NEW_LINE> <DEDENT> ps_cmd = '\n\n' <NEW_LINE> ps_cmd += self.__doc__.format( hexdata = self.data_hex, options = ','.join(options) ) <NEW_LINE> return self.__lib__ + ps_cmd <NEW_LINE> <DEDENT> @property <NEW_LINE> def eps(self): <NEW_LINE> <INDENT> return self._eps() <NEW_LINE> <DEDENT> @property <NEW_LINE> def eps_filepath(self): <NEW_LINE> <INDENT> return self._eps(return_path=True) <NEW_LINE> <DEDENT> def _eps(self, return_path=False): <NEW_LINE> <INDENT> tmp = tempfile.TemporaryDirectory() <NEW_LINE> ps_fname = os.path.join(tmp.name, 'barcode.ps') <NEW_LINE> eps_fname = os.path.join(tmp.name, 'barcode.eps') <NEW_LINE> result = None <NEW_LINE> with open(ps_fname, 'w') as fh: <NEW_LINE> <INDENT> fh.write(self.ps) <NEW_LINE> <DEDENT> converter = syscall.Ps2Eps() <NEW_LINE> converter.call(ps_fname) <NEW_LINE> if return_path is True: <NEW_LINE> <INDENT> return eps_fname <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> with open(eps_fname, 'rb') as fh: <NEW_LINE> <INDENT> result = fh.read() <NEW_LINE> <DEDENT> tmp.cleanup() <NEW_LINE> return result | 0 0 moveto <{hexdata}> ({options}) /pdf417 /uk.co.terryburton.bwipp findresource exec | 625990ad091ae35668706cbe |
class Int(Saveable): <NEW_LINE> <INDENT> default = 0 <NEW_LINE> @staticmethod <NEW_LINE> def to_python(val): <NEW_LINE> <INDENT> return int(val) | A Saveable integer
Integers are a little different in JSON than Python. Strictly speaking
JSON only has "numbers", which can be integer or float, so a little to
do here to make sure we get an int in Python. | 625990ae091ae35668706cc2 |
class World: <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.things = {} <NEW_LINE> self.locations = {} <NEW_LINE> <DEDENT> def step(self): <NEW_LINE> <INDENT> for agent in [thing for thing in self.things if hasattr(thing, "ai")]: <NEW_LINE> <INDENT> perception = agent.percept() <NEW_LINE> action = agent.ai(perception) <NEW_LINE> agent.do_action(action) <NEW_LINE> <DEDENT> <DEDENT> def add_thing(self, thing, location): <NEW_LINE> <INDENT> if thing in self.things: <NEW_LINE> <INDENT> print("Can't add the same thing twice") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.things[thing] = numpy.array(location) <NEW_LINE> if not tuple(location) in self.locations: <NEW_LINE> <INDENT> self.locations[tuple(location)] = [] <NEW_LINE> <DEDENT> self.locations[tuple(location)].insert(0, thing) <NEW_LINE> <DEDENT> <DEDENT> def remove_thing(self, thing): <NEW_LINE> <INDENT> if thing not in self.things: <NEW_LINE> <INDENT> print("Can't remove what doesn't exist") <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> loc = tuple(self.things[thing]) <NEW_LINE> del self.things[thing] <NEW_LINE> self.locations[loc].remove(thing) <NEW_LINE> if not self.locations[loc]: <NEW_LINE> <INDENT> del self.locations[loc] <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> def move_thing(self, thing, loc): <NEW_LINE> <INDENT> if not self.things_at(loc, Obstacle): <NEW_LINE> <INDENT> old_loc = tuple(self.things[thing]) <NEW_LINE> self.things[thing] = numpy.array(loc) <NEW_LINE> self.locations[old_loc].remove(thing) <NEW_LINE> if not self.locations[old_loc]: <NEW_LINE> <INDENT> del self.locations[old_loc] <NEW_LINE> <DEDENT> if tuple(loc) not in self.locations: <NEW_LINE> <INDENT> self.locations[tuple(loc)] = [] <NEW_LINE> <DEDENT> self.locations[tuple(loc)].insert(0, thing) <NEW_LINE> <DEDENT> <DEDENT> def things_at(self, location, thing_class=Thing): <NEW_LINE> <INDENT> loc = tuple(location) <NEW_LINE> things = [thing for thing in self.locations.get(loc, []) if isinstance(thing, thing_class)] <NEW_LINE> return things <NEW_LINE> <DEDENT> def outside_world(self, location): <NEW_LINE> <INDENT> x, y = tuple(location) <NEW_LINE> return (x < min([key[0] for key in self.locations]) or y < min([key[1] for key in self.locations]) or x > max([key[0] for key in self.locations]) or y > max([key[1] for key in self.locations])) | The environment in which everything takes place. | 625990aec4546d3d9def82e9 |
class Input: <NEW_LINE> <INDENT> def __init__( self: Input, name: str, structs: List[Struct], inputs: List[Variable], subject: str, output: str, ) -> None: <NEW_LINE> <INDENT> self.name = name <NEW_LINE> self.structs = structs <NEW_LINE> self.input = inputs <NEW_LINE> self.subject = subject <NEW_LINE> self.output = output <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def from_dict(cls: T[Input], dic: Dict[str, Any]) -> Optional[Input]: <NEW_LINE> <INDENT> try: <NEW_LINE> <INDENT> variables_lookup = {} <NEW_LINE> variables_dicts = [] <NEW_LINE> structs = [] <NEW_LINE> if "structs" in dic: <NEW_LINE> <INDENT> for node in dic["structs"]: <NEW_LINE> <INDENT> struct = Struct.from_dict(node) <NEW_LINE> if struct is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> structs.append(struct) <NEW_LINE> for var in struct.fields: <NEW_LINE> <INDENT> if var.name in variables_lookup: <NEW_LINE> <INDENT> raise ValueError( f'Several struct fields are called "{var.name}"' ) <NEW_LINE> <DEDENT> variables_lookup[var.name] = var <NEW_LINE> <DEDENT> variables_dicts.extend(node["fields"]) <NEW_LINE> <DEDENT> <DEDENT> variables = [] <NEW_LINE> for node in dic["input"]: <NEW_LINE> <INDENT> variable = Variable.from_dict(node) <NEW_LINE> if variable is None: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> variables.append(variable) <NEW_LINE> if variable.name in variables_lookup: <NEW_LINE> <INDENT> raise ValueError( "Several variables or struct fields " f'are called "{variable.name}"' ) <NEW_LINE> <DEDENT> variables_lookup[variable.name] = variable <NEW_LINE> variables_dicts.append(node) <NEW_LINE> <DEDENT> for name in variables_lookup: <NEW_LINE> <INDENT> if not re.fullmatch("[a-zA-Z][a-zA-Z0-9 ]*", name): <NEW_LINE> <INDENT> raise ValueError( f'Variable name "{name}" should match [a-zA-Z][a-z0-9A-Z ]*' ) <NEW_LINE> <DEDENT> <DEDENT> set_constraints(variables_lookup, variables_dicts) <NEW_LINE> subject = dic["subject"] if "subject" in dic else "" <NEW_LINE> if "function_name" not in dic and "name" in dic: <NEW_LINE> <INDENT> print('WARNING: "name" is deprecated, use "function_name"') <NEW_LINE> dic["function_name"] = dic["name"] <NEW_LINE> <DEDENT> if not re.fullmatch("[a-z][a-z0-9 ]*", dic["function_name"]): <NEW_LINE> <INDENT> raise ValueError("Field `function_name` should match [a-z][a-z0-9 ]*") <NEW_LINE> <DEDENT> return cls(dic["function_name"], structs, variables, subject, dic["output"]) <NEW_LINE> <DEDENT> except KeyError: <NEW_LINE> <INDENT> return None <NEW_LINE> <DEDENT> <DEDENT> def get_struct(self: Input, name: str) -> Struct: <NEW_LINE> <INDENT> return next(x for x in self.structs if x.name == name) <NEW_LINE> <DEDENT> def get_var(self: Input, name: str) -> Variable: <NEW_LINE> <INDENT> return next(x for x in self.input if x.name == name) <NEW_LINE> <DEDENT> def get_all_vars(self: Input) -> List[List[Variable]]: <NEW_LINE> <INDENT> ret = [] <NEW_LINE> current = [] <NEW_LINE> for var in self.input: <NEW_LINE> <INDENT> current.append(var) <NEW_LINE> if var.format_style != FormatStyle.NO_ENDLINE: <NEW_LINE> <INDENT> ret.append(current) <NEW_LINE> current = [] <NEW_LINE> <DEDENT> <DEDENT> return ret | Represents the user input, parsed | 625990ae187af65679d2ac34 |
class ComputeInstancesDeleteAccessConfigRequest(_messages.Message): <NEW_LINE> <INDENT> accessConfig = _messages.StringField(1, required=True) <NEW_LINE> instance = _messages.StringField(2, required=True) <NEW_LINE> networkInterface = _messages.StringField(3, required=True) <NEW_LINE> project = _messages.StringField(4, required=True) <NEW_LINE> requestId = _messages.StringField(5) <NEW_LINE> zone = _messages.StringField(6, required=True) | A ComputeInstancesDeleteAccessConfigRequest object.
Fields:
accessConfig: The name of the access config to delete.
instance: The instance name for this request.
networkInterface: The name of the network interface.
project: Project ID for this request.
requestId: An optional request ID to identify requests. Specify a unique
request ID so that if you must retry your request, the server will know
to ignore the request if it has already been completed. For example,
consider a situation where you make an initial request and then the
request times out. If you make the request again with the same request
ID, the server can check if original operation with the same request ID
was received, and if so, will ignore the second request. This prevents
clients from accidentally creating duplicate commitments.
zone: The name of the zone for this request. | 625990ae187af65679d2ac35 |
def get_variable_days(self, year): <NEW_LINE> <INDENT> days = super(Hawick, self).get_variable_days(year) <NEW_LINE> first_monday = self.get_nth_weekday_in_month(year, 6, MON) <NEW_LINE> friday = first_monday + timedelta(days=4) <NEW_LINE> saturday = first_monday + timedelta(days=5) <NEW_LINE> days.append((friday, "Common Riding Day 1")) <NEW_LINE> days.append((saturday, "Common Riding Day 2")) <NEW_LINE> return days | Hawick | 625990aec4546d3d9def82ee |
class Discriminator(nn.Module): <NEW_LINE> <INDENT> def __init__(self, image_size=64, conv_dim=64, c_dim=5, repeat_num=5): <NEW_LINE> <INDENT> super(Discriminator, self).__init__() <NEW_LINE> layers = [] <NEW_LINE> layers.append(nn.Conv2d(3, conv_dim, kernel_size=4, stride=2, padding=1)) <NEW_LINE> layers.append(nn.LeakyReLU(0.01)) <NEW_LINE> curr_dim = conv_dim <NEW_LINE> for i in range(1, repeat_num): <NEW_LINE> <INDENT> layers.append(nn.Conv2d(curr_dim, curr_dim*2, kernel_size=4, stride=2, padding=1)) <NEW_LINE> layers.append(nn.LeakyReLU(0.01)) <NEW_LINE> curr_dim = curr_dim * 2 <NEW_LINE> <DEDENT> kernel_size = int(image_size / np.power(2, repeat_num)) <NEW_LINE> self.main = nn.Sequential(*layers) <NEW_LINE> self.main.to("cuda") <NEW_LINE> self.conv1 = nn.Conv2d(curr_dim, 1, kernel_size=4, stride=1, padding=1, bias=False) <NEW_LINE> self.conv2 = nn.Conv2d(curr_dim, c_dim, kernel_size=kernel_size, bias=False) <NEW_LINE> <DEDENT> def forward(self, x): <NEW_LINE> <INDENT> h = self.main(x) <NEW_LINE> out_src = self.conv1(h) <NEW_LINE> out_cls = self.conv2(h) <NEW_LINE> return out_src, out_cls.view(out_cls.size(0), out_cls.size(1)) | Discriminator network with PatchGAN. | 625990ae187af65679d2ac39 |
class DataAPI(object): <NEW_LINE> <INDENT> BASE_RPC_API_VERSION = '1.0' <NEW_LINE> def __init__(self): <NEW_LINE> <INDENT> super(DataAPI, self).__init__() <NEW_LINE> target = messaging.Target(topic=CONF.data_topic, version=self.BASE_RPC_API_VERSION) <NEW_LINE> self.client = rpc.get_client(target, version_cap='1.0') <NEW_LINE> <DEDENT> def migration_start(self, context, share_id, ignore_list, share_instance_id, dest_share_instance_id, migration_info_src, migration_info_dest, notify): <NEW_LINE> <INDENT> call_context = self.client.prepare(version='1.0') <NEW_LINE> call_context.cast( context, 'migration_start', share_id=share_id, ignore_list=ignore_list, share_instance_id=share_instance_id, dest_share_instance_id=dest_share_instance_id, migration_info_src=migration_info_src, migration_info_dest=migration_info_dest, notify=notify) <NEW_LINE> <DEDENT> def data_copy_cancel(self, context, share_id): <NEW_LINE> <INDENT> call_context = self.client.prepare(version='1.0') <NEW_LINE> call_context.call(context, 'data_copy_cancel', share_id=share_id) <NEW_LINE> <DEDENT> def data_copy_get_progress(self, context, share_id): <NEW_LINE> <INDENT> call_context = self.client.prepare(version='1.0') <NEW_LINE> return call_context.call(context, 'data_copy_get_progress', share_id=share_id) | Client side of the data RPC API.
API version history:
1.0 - Initial version,
Add migration_start(),
data_copy_cancel(),
data_copy_get_progress() | 625990aec4546d3d9def82f1 |
class TagPopularity(db.Model): <NEW_LINE> <INDENT> tag = db.ReferenceProperty(Tag, required = True) <NEW_LINE> date = db.DateProperty(required = True) <NEW_LINE> number_of_posts = db.IntegerProperty(required = True, default = 0) | The number of posts that refer to a given tag on a given date
Used for "popular tags" boxes | 625990aec4546d3d9def82f5 |
class Solution: <NEW_LINE> <INDENT> def reverseWords(self, s: str) -> str: <NEW_LINE> <INDENT> s_split = s.split(' ') <NEW_LINE> if len(s_split) < 1: <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return ' '.join([item[::-1] for item in s_split]) <NEW_LINE> <DEDENT> <DEDENT> def reverseWords2(self, s: str) -> str: <NEW_LINE> <INDENT> s_split = s.split(' ') <NEW_LINE> if len(s_split) < 1: <NEW_LINE> <INDENT> return s <NEW_LINE> <DEDENT> return ' '.join([item[::-1] for item in s_split]) | 给定一个字符串,你需要反转字符串中每个单词的字符顺序,同时仍保留空格和单词的初始顺序
思路: | 625990ae187af65679d2ac3e |
class UseCase: <NEW_LINE> <INDENT> def execute(self, request): <NEW_LINE> <INDENT> if not request: <NEW_LINE> <INDENT> return ResponseFailure.build_from_invalid_request( request) <NEW_LINE> <DEDENT> try: <NEW_LINE> <INDENT> return self.process_request(request) <NEW_LINE> <DEDENT> except Exception as exc: <NEW_LINE> <INDENT> return ResponseFailure.build_system_error( '{}: {}'.format(exc.__class__.__name__, '{}'.format(exc))) <NEW_LINE> <DEDENT> <DEDENT> def process_request(self, request): <NEW_LINE> <INDENT> raise NotImplementedError( 'process_request() not implemented by UseCase class') | Abstract class for business logic of application.
Layer between domain and repo. | 625990af187af65679d2ac3f |
class ContainsArrow(Thing): <NEW_LINE> <INDENT> def __init__(self, lumpy, parent, child, **options): <NEW_LINE> <INDENT> self.lumpy = lumpy <NEW_LINE> self.parent = parent <NEW_LINE> self.child = child <NEW_LINE> underride(options, fill='orange', arrow=LAST) <NEW_LINE> self.options = options <NEW_LINE> <DEDENT> def draw(self, diag): <NEW_LINE> <INDENT> self.diag = diag <NEW_LINE> parent, child = self.parent, self.child <NEW_LINE> if not child.isdrawn(): <NEW_LINE> <INDENT> self.item = None <NEW_LINE> return <NEW_LINE> <DEDENT> canvas = diag.canvas <NEW_LINE> p = canvas.bbox(parent.boxitem).midleft() <NEW_LINE> q = canvas.bbox(child.boxitem).midright() <NEW_LINE> coords = [p, q] <NEW_LINE> self.item = canvas.line(coords, **self.options) <NEW_LINE> canvas.lower(self.item) | Represents a contains arrow.
Shows a has-a relationship between classes in a class diagram. | 625990afc4546d3d9def82f9 |
class Solution: <NEW_LINE> <INDENT> def insertNode(self, root, node): <NEW_LINE> <INDENT> if (root == null): <NEW_LINE> <INDENT> return node <NEW_LINE> <DEDENT> if (node.val < root.val): <NEW_LINE> <INDENT> root.left = insertNode(root.left, node) <NEW_LINE> <DEDENT> elif (node.val > root.val): <NEW_LINE> <INDENT> root.right = insertNode(root.right, node) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> root.val = node.val; <NEW_LINE> <DEDENT> return root | @param root: The root of the binary search tree.
@param node: insert this node into the binary search tree.
@return: The root of the new binary search tree. | 625990af091ae35668706ce8 |
class RetryConfig(_messages.Message): <NEW_LINE> <INDENT> maxAttempts = _messages.IntegerField(1, variant=_messages.Variant.INT32) <NEW_LINE> maxBackoff = _messages.StringField(2) <NEW_LINE> maxDoublings = _messages.IntegerField(3, variant=_messages.Variant.INT32) <NEW_LINE> maxRetryDuration = _messages.StringField(4) <NEW_LINE> minBackoff = _messages.StringField(5) <NEW_LINE> unlimitedAttempts = _messages.BooleanField(6) | Retry config. These settings determine how a failed task attempt is
retried.
Fields:
maxAttempts: The maximum number of attempts for a task. Cloud Tasks will
attempt the task `max_attempts` times (that is, if the first attempt
fails, then there will be `max_attempts - 1` retries). Must be > 0.
maxBackoff: A task will be [scheduled](Task.schedule_time) for retry
between min_backoff and max_backoff duration after it fails, if the
queue's RetryConfig specifies that the task should be retried. If
unspecified when the queue is created, Cloud Tasks will pick the
default. This field is output only for [pull
queues](google.cloud.tasks.v2beta2.PullTarget). `max_backoff` will be
truncated to the nearest second. This field has the same meaning as
[max_backoff_seconds in queue.yaml/xml](/appengine/docs/standard/python/
config/queueref#retry_parameters).
maxDoublings: The time between retries will double `max_doublings` times.
A task's retry interval starts at min_backoff, then doubles
`max_doublings` times, then increases linearly, and finally retries
retries at intervals of max_backoff up to max_attempts times. For
example, if min_backoff is 10s, max_backoff is 300s, and `max_doublings`
is 3, then the a task will first be retried in 10s. The retry interval
will double three times, and then increase linearly by 2^3 * 10s.
Finally, the task will retry at intervals of max_backoff until the task
has been attempted max_attempts times. Thus, the requests will retry at
10s, 20s, 40s, 80s, 160s, 240s, 300s, 300s, .... If unspecified when
the queue is created, Cloud Tasks will pick the default. This field is
output only for [pull queues](google.cloud.tasks.v2beta2.PullTarget).
This field has the same meaning as [max_doublings in queue.yaml/xml](/ap
pengine/docs/standard/python/config/queueref#retry_parameters).
maxRetryDuration: If positive, `max_retry_duration` specifies the time
limit for retrying a failed task, measured from when the task was first
attempted. Once `max_retry_duration` time has passed *and* the task has
been attempted max_attempts times, no further attempts will be made and
the task will be deleted. If zero, then the task age is unlimited. If
unspecified when the queue is created, Cloud Tasks will pick the
default. This field is output only for [pull
queues](google.cloud.tasks.v2beta2.PullTarget). `max_retry_duration`
will be truncated to the nearest second. This field has the same
meaning as [task_age_limit in queue.yaml/xml](/appengine/docs/standard/p
ython/config/queueref#retry_parameters).
minBackoff: A task will be [scheduled](Task.schedule_time) for retry
between min_backoff and max_backoff duration after it fails, if the
queue's RetryConfig specifies that the task should be retried. If
unspecified when the queue is created, Cloud Tasks will pick the
default. This field is output only for [pull
queues](google.cloud.tasks.v2beta2.PullTarget). `min_backoff` will be
truncated to the nearest second. This field has the same meaning as
[min_backoff_seconds in queue.yaml/xml](/appengine/docs/standard/python/
config/queueref#retry_parameters).
unlimitedAttempts: If true, then the number of attempts is unlimited. | 625990afc4546d3d9def82fc |
class ReadCSVTest(unittest.TestCase): <NEW_LINE> <INDENT> def setUp(self): <NEW_LINE> <INDENT> data = get_sample_image_csv_data() <NEW_LINE> self.header = data.pop(0) <NEW_LINE> self.sample_data = data <NEW_LINE> <DEDENT> def test_valid_csv_no_header_no_names_specified(self): <NEW_LINE> <INDENT> f = _make_csv_tempfile(self.sample_data) <NEW_LINE> actual = converter.read_csv(f.name, header=None) <NEW_LINE> self.assertEqual( list(actual.columns), list(input_schema.IMAGE_CSV_SCHEMA.get_input_keys())) <NEW_LINE> self.assertEqual(actual.values.tolist(), self.sample_data) <NEW_LINE> <DEDENT> def test_valid_csv_no_header_names_specified(self): <NEW_LINE> <INDENT> f = _make_csv_tempfile(self.sample_data) <NEW_LINE> actual = converter.read_csv(f.name, header=None, names=self.header) <NEW_LINE> self.assertEqual(list(actual.columns), self.header) <NEW_LINE> self.assertEqual(actual.values.tolist(), self.sample_data) <NEW_LINE> <DEDENT> def test_valid_csv_with_header_no_names_specified(self): <NEW_LINE> <INDENT> f = _make_csv_tempfile([self.header] + self.sample_data) <NEW_LINE> actual = converter.read_csv(f.name) <NEW_LINE> self.assertEqual(list(actual.columns), self.header) <NEW_LINE> self.assertEqual(actual.values.tolist(), self.sample_data) <NEW_LINE> <DEDENT> def test_valid_csv_with_header_names_specified(self): <NEW_LINE> <INDENT> f = _make_csv_tempfile([self.header] + self.sample_data) <NEW_LINE> actual = converter.read_csv(f.name, names=self.header, header=0) <NEW_LINE> self.assertEqual(list(actual.columns), self.header) <NEW_LINE> self.assertEqual(actual.values.tolist(), self.sample_data) | Tests `read_csv`. | 625990af091ae35668706cee |
class DashboardConfig(AppConfig): <NEW_LINE> <INDENT> name = 'dashboard' | Dashboard Django Application Meta Class | 625990af627d3e7fe0e08f31 |
class DataNotSent(Exception): <NEW_LINE> <INDENT> pass | Sonar is not sending information. | 625990af091ae35668706cf2 |
class ECLexer(CLexer): <NEW_LINE> <INDENT> name = 'eC' <NEW_LINE> aliases = ['ec'] <NEW_LINE> filenames = ['*.ec', '*.eh'] <NEW_LINE> mimetypes = ['text/x-echdr', 'text/x-ecsrc'] <NEW_LINE> tokens = { 'statements': [ (r'(virtual|class|private|public|property|import|delete|new|new0|' r'renew|renew0|define|get|set|remote|dllexport|dllimport|stdcall|' r'subclass|__on_register_module|namespace|using|typed_object|' r'any_object|incref|register|watch|stopwatching|firewatchers|' r'watchable|class_designer|class_fixed|class_no_expansion|isset|' r'class_default_property|property_category|class_data|' r'class_property|virtual|thisclass|' r'dbtable|dbindex|database_open|dbfield)\b', Keyword), (r'(uint|uint16|uint32|uint64|bool|byte|unichar|int64)\b', Keyword.Type), (r'(class)(\s+)', bygroups(Keyword, Text), 'classname'), (r'(null|value|this)\b', Name.Builtin), inherit, ], 'classname': [ (r'[a-zA-Z_]\w*', Name.Class, '#pop'), (r'\s*(?=>)', Text, '#pop'), ], } | For eC source code with preprocessor directives.
.. versionadded:: 1.5 | 625990afc4546d3d9def82ff |
class SearchEionet(BrowserView): <NEW_LINE> <INDENT> def __call__(self): <NEW_LINE> <INDENT> ldap_filter = self.request.form.get('filter') <NEW_LINE> agent = self.context._get_ldap_agent() <NEW_LINE> encres = json.dumps(agent.search_user(ldap_filter)) <NEW_LINE> self.request.response.setHeader("Content-Type", 'application/json') <NEW_LINE> self.request.response.setHeader("Content-Length", str(len(encres))) <NEW_LINE> return encres | ldap search vor javascript frontend | 625990af091ae35668706cf4 |
class Clients(_ClientSelectCmd): <NEW_LINE> <INDENT> options = _ClientSelectCmd.options + [ Bcfg2.Options.BooleanOption( "-c", "--clean", help="Show only clean hosts"), Bcfg2.Options.BooleanOption( "-d", "--dirty", help="Show only dirty hosts"), Bcfg2.Options.BooleanOption( "--stale", help="Show hosts that haven't run in the last 24 hours")] <NEW_LINE> def run(self, setup): <NEW_LINE> <INDENT> result = [] <NEW_LINE> show_all = not setup.stale and not setup.clean and not setup.dirty <NEW_LINE> for client in self.get_clients(): <NEW_LINE> <INDENT> interaction = client.current_interaction <NEW_LINE> if (show_all or (setup.stale and interaction.isstale()) or (setup.clean and interaction.isclean()) or (setup.dirty and not interaction.isclean())): <NEW_LINE> <INDENT> result.append(client) <NEW_LINE> <DEDENT> <DEDENT> self.display(result, setup.fields) | Query hosts | 625990afc4546d3d9def8300 |
class BaseBlenderSettings(BaseSettingsWidget): <NEW_LINE> <INDENT> def __init__(self, parent=None, *args, **kwargs): <NEW_LINE> <INDENT> super(BaseBlenderSettings, self).__init__(parent=parent) <NEW_LINE> self.fileInput = Widgets.CueLabelLineEdit('Blender File:') <NEW_LINE> self.outputPath = Widgets.CueLabelLineEdit( 'Output Path (Optional):', tooltip='Optionally set the rendered output format. ' 'See the "-o" flag of {} for more info.'.format( Constants.BLENDER_OUTPUT_OPTIONS_URL)) <NEW_LINE> self.outputSelector = Widgets.CueSelectPulldown( 'Output Format', options=Constants.BLENDER_FORMATS, multiselect=False) <NEW_LINE> self.outputLayout = QtWidgets.QHBoxLayout() <NEW_LINE> self.setupUi() <NEW_LINE> self.setupConnections() <NEW_LINE> <DEDENT> def setupUi(self): <NEW_LINE> <INDENT> self.mainLayout.addWidget(self.fileInput) <NEW_LINE> self.mainLayout.addLayout(self.outputLayout) <NEW_LINE> self.outputLayout.addWidget(self.outputPath) <NEW_LINE> self.outputLayout.addWidget(self.outputSelector) <NEW_LINE> <DEDENT> def setupConnections(self): <NEW_LINE> <INDENT> self.fileInput.lineEdit.textChanged.connect(self.dataChanged.emit) <NEW_LINE> self.outputPath.lineEdit.textChanged.connect(self.dataChanged.emit) <NEW_LINE> <DEDENT> def setCommandData(self, commandData): <NEW_LINE> <INDENT> self.fileInput.setText(commandData.get('nukeFile', '')) <NEW_LINE> self.outputPath.setText(commandData.get('outputPath', '')) <NEW_LINE> self.outputSelector.setChecked(commandData.get('outputFormat', '')) <NEW_LINE> <DEDENT> def getCommandData(self): <NEW_LINE> <INDENT> return { 'blenderFile': self.fileInput.text(), 'outputPath': self.outputPath.text(), 'outputFormat': self.outputSelector.text() } | Standard Blender settings widget to be used from outside Blender. | 625990af627d3e7fe0e08f3f |
class Serializable(metaclass=ABCMeta): <NEW_LINE> <INDENT> @classmethod <NEW_LINE> def from_bytes(cls, data, index=0, **kwargs): <NEW_LINE> <INDENT> obj = cls(**kwargs) <NEW_LINE> index = obj.load_in_place(data, index) <NEW_LINE> return obj, index <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def to_bytes(self): <NEW_LINE> <INDENT> return bytes() <NEW_LINE> <DEDENT> @abstractmethod <NEW_LINE> def load_in_place(self, data, index=0): <NEW_LINE> <INDENT> return index <NEW_LINE> <DEDENT> def copy(self): <NEW_LINE> <INDENT> new = type(self)() <NEW_LINE> new.load_in_place(self.to_bytes()) <NEW_LINE> return new | A Serializable type is a type that can be converted to and from a bytes object.
Each type must define it's own to_bytes and from_bytes methods. As each type clearly defines how it is stored,
in contrast to storing data with pickle, the data should be compact, easily loadable, and easily interpreted from
other programming languages. | 625990af091ae35668706cfe |
class Feed(MergeableDocumentElement, Source): <NEW_LINE> <INDENT> __tag__ = 'feed' <NEW_LINE> __xmlns__ = ATOM_XMLNS <NEW_LINE> entries = Child('entry', Entry, xmlns=ATOM_XMLNS, multiple=True) | Atom feed document, acting as a container for metadata and data
associated with the feed.
It corresponds to ``atom:feed`` element of :rfc:`4287#section-4.1.1`
(section 4.1.1). | 625990b0187af65679d2ac4f |
class Treater: <NEW_LINE> <INDENT> def __init__(self,path): <NEW_LINE> <INDENT> paths = [] <NEW_LINE> for path, _, files in os.walk(path): <NEW_LINE> <INDENT> for afile in files: <NEW_LINE> <INDENT> if afile.split('.')[1] == 'txt': <NEW_LINE> <INDENT> paths.append(f"{path}{afile}") <NEW_LINE> <DEDENT> <DEDENT> <DEDENT> self.paths = paths <NEW_LINE> <DEDENT> def treat_input(self, afile): <NEW_LINE> <INDENT> func_transition = [] <NEW_LINE> afile_treated = afile.split('\n') <NEW_LINE> alphabet = afile_treated[0].split(',') <NEW_LINE> alphabet = [alphabet[i].split('|')[0].strip() for i in range(0,len(alphabet))] <NEW_LINE> func_transition.append(alphabet) <NEW_LINE> for step in range(1,len(afile_treated)): <NEW_LINE> <INDENT> ftransition = afile_treated[step].split('-') <NEW_LINE> transitions = ftransition[1].split(',') <NEW_LINE> func_transition.append(transitions) <NEW_LINE> <DEDENT> return func_transition <NEW_LINE> <DEDENT> def get_transition_functions(self): <NEW_LINE> <INDENT> transition_funcs = [] <NEW_LINE> name_files = [] <NEW_LINE> for path in self.paths: <NEW_LINE> <INDENT> with open(path,'r') as outfile: <NEW_LINE> <INDENT> name_files.append(path.split('/')[1]) <NEW_LINE> transition_funcs.append(self.treat_input(outfile.read())) <NEW_LINE> <DEDENT> <DEDENT> return name_files, transition_funcs | This class is for just treat txt files. | 625990b0091ae35668706d06 |
class TestResponse: <NEW_LINE> <INDENT> @pytest.fixture <NEW_LINE> def mock_response(self) -> MagicMock: <NEW_LINE> <INDENT> mock_response = MagicMock(RequestsResponse) <NEW_LINE> mock_response.url = "https://fake.com" <NEW_LINE> mock_response.json.return_value = { "status_code": 1, "number_of_page_results": 1, "number_of_total_results": 1, "results": [{"id": 1, "description": "Great Game"}], } <NEW_LINE> return mock_response <NEW_LINE> <DEDENT> def test_response_factory(self, mock_response: MagicMock) -> None: <NEW_LINE> <INDENT> res = Response.from_response_data(mock_response) <NEW_LINE> assert res.uri == mock_response.url <NEW_LINE> mock_res_json = mock_response.json() <NEW_LINE> assert res.num_page_results == mock_res_json["number_of_page_results"] <NEW_LINE> assert res.results == mock_res_json["results"] <NEW_LINE> assert res.num_total_results == mock_res_json["number_of_total_results"] | Tests for the Response. | 625990b0627d3e7fe0e08f4b |
class MultiDict(object): <NEW_LINE> <INDENT> def __init__(self, initial, group_callback=None): <NEW_LINE> <INDENT> self.items_list = [] <NEW_LINE> self.items_dict = {} <NEW_LINE> for item in initial: <NEW_LINE> <INDENT> if group_callback is not None: <NEW_LINE> <INDENT> group = group_callback(item) <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> group = item.group <NEW_LINE> <DEDENT> self.add(item, group) <NEW_LINE> <DEDENT> <DEDENT> def __unicode__(self): <NEW_LINE> <INDENT> return unicode(self.items_list) + '\n' + unicode(self.items_dict) <NEW_LINE> <DEDENT> def __len__(self): <NEW_LINE> <INDENT> return len(self.items_list) <NEW_LINE> <DEDENT> def __getitem__(self, key): <NEW_LINE> <INDENT> if isinstance(key, int): <NEW_LINE> <INDENT> return self.items_list[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return self.items_dict[key] <NEW_LINE> <DEDENT> <DEDENT> def add(self, item, key): <NEW_LINE> <INDENT> self.items_list.append(item) <NEW_LINE> if not key in self.items_dict: <NEW_LINE> <INDENT> self.items_dict[key] = [item] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.items_dict[key].append(item) <NEW_LINE> <DEDENT> <DEDENT> def get(self, key, default): <NEW_LINE> <INDENT> if key in self.items_dict: <NEW_LINE> <INDENT> return self[key] <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> return default | Given a queryset or a list, group it by the group attribute of each item
but still preserv the original list navigation, giving a bidimensional
access. | 625990b0091ae35668706d0a |
class YetiBinarySensor(YetiEntity, BinarySensorEntity): <NEW_LINE> <INDENT> def __init__( self, api: Yeti, coordinator: DataUpdateCoordinator, name: str, description: BinarySensorEntityDescription, server_unique_id: str, ) -> None: <NEW_LINE> <INDENT> super().__init__(api, coordinator, name, server_unique_id) <NEW_LINE> self.entity_description = description <NEW_LINE> self._attr_name = f"{name} {description.name}" <NEW_LINE> self._attr_unique_id = f"{server_unique_id}/{description.key}" <NEW_LINE> <DEDENT> @property <NEW_LINE> def is_on(self) -> bool: <NEW_LINE> <INDENT> return cast(bool, self.api.data[self.entity_description.key] == 1) | Representation of a Goal Zero Yeti sensor. | 625990b0c4546d3d9def830c |
class Error(Schema): <NEW_LINE> <INDENT> codes: List[str] | **email_already_use**: на данную почту уже зарегистрирован аккаунт;
**password_too_short**: введенный пароль сильно короткий;
**password_entirely_numeric**: пароль состоит только из цифр;
**invalid_email**: введен некорректный email
**invalid_full_name**: неправильный формат ФИО
**invalid_password**: неправильный пароль | 625990b0627d3e7fe0e08f51 |
class MissingRuntimeError(Error): <NEW_LINE> <INDENT> pass | Raised when the `runtime` field is omitted for a non-VM. | 625990b0187af65679d2ac57 |
class reify(): <NEW_LINE> <INDENT> def __init__(self, wrapped): <NEW_LINE> <INDENT> self.wrapped = wrapped <NEW_LINE> update_wrapper(self, wrapped) <NEW_LINE> <DEDENT> def __get__(self, inst, objtype=None): <NEW_LINE> <INDENT> if inst is None: <NEW_LINE> <INDENT> return self <NEW_LINE> <DEDENT> val = self.wrapped(inst) <NEW_LINE> setattr(inst, self.wrapped.__name__, val) <NEW_LINE> return val | From https://github.com/Pylons/pyramid and their BSD-style license | 625990b0091ae35668706d14 |
class CommandManager(cliff.commandmanager.CommandManager): <NEW_LINE> <INDENT> def __init__(self, namespace, convert_underscores=True): <NEW_LINE> <INDENT> self.group_list = [] <NEW_LINE> super(CommandManager, self).__init__(namespace, convert_underscores) <NEW_LINE> <DEDENT> def load_commands(self, namespace): <NEW_LINE> <INDENT> for ep in pkg_resources.iter_entry_points(namespace): <NEW_LINE> <INDENT> cmd_name = (ep.name.replace('_', ' ') if self.convert_underscores else ep.name) <NEW_LINE> self.commands[cmd_name] = ep <NEW_LINE> <DEDENT> self.group_list.append(namespace) <NEW_LINE> <DEDENT> def add_command(self, name, command_class): <NEW_LINE> <INDENT> if command_class is not None: <NEW_LINE> <INDENT> self.commands[name] = EntryPointWrapper(name, command_class) <NEW_LINE> return <NEW_LINE> <DEDENT> namespace = "ContrailCli" <NEW_LINE> for ep in pkg_resources.iter_entry_points(namespace): <NEW_LINE> <INDENT> if ep.name != name: <NEW_LINE> <INDENT> continue <NEW_LINE> <DEDENT> self.commands[name] = ep <NEW_LINE> <DEDENT> <DEDENT> def del_command(self, name): <NEW_LINE> <INDENT> if name in self.commands: <NEW_LINE> <INDENT> del self.commands[name] <NEW_LINE> <DEDENT> <DEDENT> def get_command_groups(self): <NEW_LINE> <INDENT> return self.group_list | Add additional functionality to cliff.CommandManager
Load additional command groups after initialization
Add _command_group() methods | 625990b0627d3e7fe0e08f59 |
class Card: <NEW_LINE> <INDENT> def __init__(self, rank, suit): <NEW_LINE> <INDENT> if rank not in all_ranks: <NEW_LINE> <INDENT> raise InvalidRank('Invalid rank input to constructor: {}' .format(rank)) <NEW_LINE> <DEDENT> if suit not in all_suits: <NEW_LINE> <INDENT> raise InvalidSuit('Invalid suit input to constructor: {}' .format(suit)) <NEW_LINE> <DEDENT> self.rank = rank <NEW_LINE> self.suit = suit <NEW_LINE> if suit in ['S', 'C']: <NEW_LINE> <INDENT> self.color = 'black' <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> self.color = 'red' <NEW_LINE> <DEDENT> <DEDENT> def __str__(self): <NEW_LINE> <INDENT> return f'{self.rank}{self.suit.lower()}' <NEW_LINE> <DEDENT> def goes_above(self, card): <NEW_LINE> <INDENT> assert isinstance(card, Card), 'Argument is not an instance of the ' 'card class' <NEW_LINE> if card.suit == self.suit: <NEW_LINE> <INDENT> if all_ranks.index(self.rank) - all_ranks.index(card.rank) == 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False <NEW_LINE> <DEDENT> def goes_below(self, card): <NEW_LINE> <INDENT> assert isinstance(card, Card), 'Argument is not an instance of the ' 'card class' <NEW_LINE> if card.color != self.color: <NEW_LINE> <INDENT> if all_ranks.index(card.rank) - all_ranks.index(self.rank) == 1: <NEW_LINE> <INDENT> return True <NEW_LINE> <DEDENT> <DEDENT> return False | Instances of this class represent a single card in a deck of 52. | 625990b0c4546d3d9def8313 |
class Player(Spectator): <NEW_LINE> <INDENT> @classproperty <NEW_LINE> def MODEL_RULES(cls): <NEW_LINE> <INDENT> rules = super(Player, cls).MODEL_RULES <NEW_LINE> rules.update({ 'hand': ('hand', DataModel, lambda x: x.model), 'team': ('team', str, None), 'abandoned': ('abandoned', bool, None) }) <NEW_LINE> return rules <NEW_LINE> <DEDENT> @classproperty <NEW_LINE> def INIT_DEFAULTS(cls): <NEW_LINE> <INDENT> defaults = super(Player, cls).INIT_DEFAULTS <NEW_LINE> defaults.update({ 'abandoned': False }) <NEW_LINE> return defaults <NEW_LINE> <DEDENT> @combomethod <NEW_LINE> def delete_cache(rec, data_store, uid=None): <NEW_LINE> <INDENT> if isinstance(rec, Player): <NEW_LINE> <INDENT> rec.hand.delete_cache(data_store) <NEW_LINE> <DEDENT> super(Player, rec).delete_cache(data_store, uid) <NEW_LINE> <DEDENT> @combomethod <NEW_LINE> def delete(rec, data_store, uid=None): <NEW_LINE> <INDENT> if isinstance(rec, Player): <NEW_LINE> <INDENT> rec.hand.delete(data_store) <NEW_LINE> <DEDENT> super(Player, rec).delete(data_store, uid) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def restore(cls, data_store, data_model, **kwargs): <NEW_LINE> <INDENT> kwargs.update({ 'team': data_model.team, 'hand': CardHolder.restore(data_store, data_model.hand), 'abandoned': data_model.abandoned }) <NEW_LINE> return super(Player, cls).restore(data_store, data_model, **kwargs) <NEW_LINE> <DEDENT> @classmethod <NEW_LINE> def new(cls, user, team, data_store=None, **kwargs): <NEW_LINE> <INDENT> kwargs.update({ 'team': team, 'hand': CardHolder.new(None, data_store, sort_method='suit') }) <NEW_LINE> return super(Player, cls).new(user, data_store, **kwargs) <NEW_LINE> <DEDENT> def __init__(self, *args, **kwargs): <NEW_LINE> <INDENT> super(Player, self).__init__(*args, **kwargs) <NEW_LINE> self.hand.on_change('*', ( lambda model, key, instruction: self._call_listener('hand', instruction, {'property': key}))) <NEW_LINE> <DEDENT> def new_user(self, user): <NEW_LINE> <INDENT> if not self.abandoned: <NEW_LINE> <INDENT> raise ValueError('Cannot change user of unabandoned player.') <NEW_LINE> <DEDENT> self.user = user <NEW_LINE> self.name = user.profile_name <NEW_LINE> self.abandoned = False | Game player.
Init Parameters:
user -- The user object.
team -- The player's team.
Properties:
:type team: str -- The player's team identifier. | 625990b0187af65679d2ac5b |
class DjContext(object): <NEW_LINE> <INDENT> def get_context(self): <NEW_LINE> <INDENT> raise NotImplemented() | Rappresenta una classe astratta da utilizzare per ottenere un
oggetto Context | 625990b0c4546d3d9def8315 |
class Interpolation: <NEW_LINE> <INDENT> POLY = 'poly' <NEW_LINE> LINEAR = 'linear' <NEW_LINE> SPLINE = 'spline' <NEW_LINE> def __init__(self, px: np.ndarray, py: np.ndarray, typ): <NEW_LINE> <INDENT> self.px = px <NEW_LINE> self.py = py <NEW_LINE> if typ == self.POLY: <NEW_LINE> <INDENT> self.ipfunc = _PolynomialInterpolation(px, py) <NEW_LINE> <DEDENT> elif typ == self.LINEAR: <NEW_LINE> <INDENT> self.ipfunc = _LinearInterpolation(px, py) <NEW_LINE> <DEDENT> elif typ == self.SPLINE: <NEW_LINE> <INDENT> self.ipfunc = _SplineInterpolation(px, py) <NEW_LINE> <DEDENT> <DEDENT> def __call__(self, x: float): <NEW_LINE> <INDENT> return self.ipfunc(x) | 插值算法 | 625990b0c4546d3d9def8317 |
class TestParserRejectsEmptyCondition(_ATestParserRejects): <NEW_LINE> <INDENT> pass | Tests if the parser rejects an empty condition string. | 625990b1091ae35668706d25 |
class PipelineService(): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self._pipeline = PennPipeline() <NEW_LINE> <DEDENT> def parse_text(self, request): <NEW_LINE> <INDENT> text = request.in_ <NEW_LINE> rospy.loginfo("NLP pipeline request: %r" % text) <NEW_LINE> response = self._pipeline.parse_text(text) <NEW_LINE> rospy.loginfo("NLP pipeline response: %r" % response) <NEW_LINE> return response | Provides a connection to the pipeline via a ROS service. | 625990b1627d3e7fe0e08f67 |
class Timeservers(NodeConfigFileSection): <NEW_LINE> <INDENT> keys = ("OVIRT_NTP",) <NEW_LINE> @NodeConfigFileSection.map_and_update_defaults_decorator <NEW_LINE> def update(self, servers): <NEW_LINE> <INDENT> assert type(servers) is list <NEW_LINE> servers = [i.strip() for i in servers] <NEW_LINE> servers = [i for i in servers if i not in ["", None]] <NEW_LINE> validator = lambda v: valid.FQDNOrIPAddress() <NEW_LINE> map(validator, servers) <NEW_LINE> return {"OVIRT_NTP": ",".join(servers) or None } <NEW_LINE> <DEDENT> def retrieve(self): <NEW_LINE> <INDENT> cfg = dict(NodeConfigFileSection.retrieve(self)) <NEW_LINE> cfg.update({"servers": cfg["servers"].split(",") if cfg["servers"] else None }) <NEW_LINE> return cfg <NEW_LINE> <DEDENT> def transaction(self): <NEW_LINE> <INDENT> m = Timeservers().retrieve() <NEW_LINE> servers = m["servers"] <NEW_LINE> class WriteConfiguration(utils.Transaction.Element): <NEW_LINE> <INDENT> title = "Writing timeserver configuration" <NEW_LINE> def commit(self): <NEW_LINE> <INDENT> aug = AugeasWrapper() <NEW_LINE> p = "/files/etc/ntp.conf" <NEW_LINE> aug.remove(p, False) <NEW_LINE> aug.set(p + "/driftfile", "/var/lib/ntp/drift", False) <NEW_LINE> aug.set(p + "/includefile", "/etc/ntp/crypto/pw", False) <NEW_LINE> aug.set(p + "/keys", "/etc/ntp/keys", False) <NEW_LINE> aug.save() <NEW_LINE> config.network.timeservers(servers) <NEW_LINE> utils.fs.Config().persist("/etc/ntp.conf") <NEW_LINE> <DEDENT> <DEDENT> class ApplyConfiguration(utils.Transaction.Element): <NEW_LINE> <INDENT> title = "Restarting time services" <NEW_LINE> def commit(self): <NEW_LINE> <INDENT> system.service("ntpd", "stop", False) <NEW_LINE> system.service("ntpdate", "start", False) <NEW_LINE> system.service("ntpd", "start", False) <NEW_LINE> <DEDENT> <DEDENT> tx = utils.Transaction("Configuring timeservers") <NEW_LINE> tx.append(WriteConfiguration()) <NEW_LINE> tx.append(ApplyConfiguration()) <NEW_LINE> return tx | Configure timeservers
>>> from ovirt.node.utils import fs
>>> n = Timeservers(fs.FakeFs.File("dst"))
>>> servers = ["10.0.0.4", "10.0.0.5", "0.example.com"]
>>> n.update(servers)
>>> data = n.retrieve()
>>> all([servers[idx] == s for idx, s in enumerate(data["servers"])])
True
>>> n.update([])
>>> n.retrieve()
{'servers': None} | 625990b1c4546d3d9def8319 |
class StartOperation(VDOOperation): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> super(StartOperation, self).__init__(checkBinaries=True) <NEW_LINE> <DEDENT> @exclusivelock <NEW_LINE> def execute(self, args): <NEW_LINE> <INDENT> self.applyToVDOs(args, self._startVDO, readonly=False) <NEW_LINE> <DEDENT> @transactional <NEW_LINE> def _startVDO(self, args, vdo): <NEW_LINE> <INDENT> vdo.start(args.forceRebuild) <NEW_LINE> vdo.announceReady(False) | Implements the start command. | 625990b1627d3e7fe0e08f69 |
class CT_ChartLines(BaseOxmlElement): <NEW_LINE> <INDENT> spPr = ZeroOrOne("c:spPr", successors=()) | Used for `c:majorGridlines` and `c:minorGridlines`.
Specifies gridlines visual properties such as color and width. | 625990b1c4546d3d9def831a |
class CompleteCertificateResponse(AbstractModel): <NEW_LINE> <INDENT> def __init__(self): <NEW_LINE> <INDENT> self.CertificateId = None <NEW_LINE> self.RequestId = None <NEW_LINE> <DEDENT> def _deserialize(self, params): <NEW_LINE> <INDENT> self.CertificateId = params.get("CertificateId") <NEW_LINE> self.RequestId = params.get("RequestId") | CompleteCertificate返回参数结构体
| 625990b1187af65679d2ac63 |
class TestMpiInstitution(BaseTest): <NEW_LINE> <INDENT> def test_subprocess_called_correctly(self): <NEW_LINE> <INDENT> fix = MpiInstitution('var_components.nc', '/a') <NEW_LINE> fix.apply_fix() <NEW_LINE> self.mock_subprocess.assert_called_once_with( "ncatted -h -a institution,global,o,c," "'Max Planck Institute for Meteorology, Hamburg 20146, Germany' " "/a/var_components.nc", stderr=subprocess.STDOUT, shell=True ) | Test MpiInstitution | 625990b1627d3e7fe0e08f71 |
class Movie(Video): <NEW_LINE> <INDENT> MPAA_RATINGS = ['G', 'PG', 'PG-13', 'R', ''] <NEW_LINE> def __init__( self, title, synopsis, rating, poster_url, duration, starring, trailer_id, mpaa_rating, year ): <NEW_LINE> <INDENT> Video.__init__( self, title, synopsis, rating, poster_url, duration, starring ) <NEW_LINE> self.trailer_id = trailer_id <NEW_LINE> self.year = year <NEW_LINE> self._validate_rating(mpaa_rating) <NEW_LINE> <DEDENT> def _validate_rating(self, rating): <NEW_LINE> <INDENT> if rating.upper() in self.MPAA_RATINGS: <NEW_LINE> <INDENT> self.mpaa_rating = rating <NEW_LINE> <DEDENT> else: <NEW_LINE> <INDENT> raise ValueError('This is not a valid MPAA rating.') <NEW_LINE> <DEDENT> <DEDENT> def __repr__(self): <NEW_LINE> <INDENT> return '<Movie | {} | {}>' .format(self.title, self.mpaa_rating) | This class provides a way to store movie-related information.
Parameters:
----------
title (str) - Title of the movie
synopsis (str) - Short summary of the movie
rating (str) - Rating from viewers of the movie
poster_url (str) - URL pointing to the poster image of the movie
duration (int) - Length of the movie in minutes
starring (list[str]) - List of the first two actors in the cast
trailer_id (str) - ID for the IMDB trailer video
mpaa_rating (str) - MPAA rating for the movie
year (str) - The year in which the movie was released | 625990b1627d3e7fe0e08f73 |
class InvalidDurationError(Error): <NEW_LINE> <INDENT> pass | Raised when an invalid EXEMPTION_DURATION is provided. | 625990b1187af65679d2ac6a |