repository_name
stringlengths
7
107
function_path
stringlengths
4
190
function_identifier
stringlengths
1
236
language
stringclasses
1 value
function
stringlengths
9
647k
docstring
stringlengths
5
488k
function_url
stringlengths
71
285
context
stringlengths
0
2.51M
license
stringclasses
5 values
2ndwatch/cloudendure-python
cloudendure/cloudendure_api/models/cloud_endure_list_users_results.py
CloudEndureListUsersResults.items
python
def items(self, items): self._items = items
Sets the items of this CloudEndureListUsersResults. :param items: The items of this CloudEndureListUsersResults. # noqa: E501 :type: list[CloudEndureListUsersResult]
https://github.com/2ndwatch/cloudendure-python/blob/f81d1be1422b7c19adedb06c584803eaaa811919/cloudendure/cloudendure_api/models/cloud_endure_list_users_results.py#L58-L66
import pprint import re import six from cloudendure.cloudendure_api.models.cloud_endure_list_users_result import ( CloudEndureListUsersResult, ) class CloudEndureListUsersResults: """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = {"items": "list[CloudEndureListUsersResult]"} attribute_map = {"items": "items"} def __init__(self, items=None): self._items = None self.discriminator = None if items is not None: self.items = items @property def items(self): return self._items @items.setter
MIT License
oamg/leapp
leapp/snactor/utils.py
inspect
python
def inspect(tree_file, collected_types=None, type_infos=None): tree, filename = tree_file collected_types = collected_types or {} type_infos = type_infos or {} if not tree: return ['Unable to parse: {}'.format(filename)] errors = [] for node in ast.walk(tree): if isinstance(node, ast.ClassDef): base_classes, err = get_base_classes(node.bases, filename) errors += err if base_classes & collected_types['models']: collected_types['models'].add(node.name) type_infos['models'].append({ 'name': node.name, 'bases': list(base_classes), 'file': filename }) if base_classes & collected_types['actors']: collected_types['actors'].add(node.name) type_infos['actors'].append({ 'name': node.name, 'bases': list(base_classes), 'file': filename }) if base_classes & collected_types['tags']: collected_types['tags'].add(node.name) type_infos['tags'].append({ 'name': node.name, 'bases': list(base_classes), 'file': filename }) return errors
Inspect and collect data from AST tree
https://github.com/oamg/leapp/blob/c8faeb1599edb3e7265b5451c55ce6c792519078/leapp/snactor/utils.py#L59-L93
from __future__ import print_function import ast import functools import itertools import os def print_section(data, section, pivot): type_data = data[section] print('{}'.format(section.capitalize())) for td in type_data: fp = format_file_path(pivot, td['file']) first_part = ' - {}({})'.format(td['name'], ', '.join(td['bases'])) pad = '.' * (60 - len(first_part)) print('{} {} {}'.format(first_part, pad, fp)) print('') def format_file_path(pivot, path): if not pivot or pivot == '.': pivot = os.getcwd() return os.path.relpath(path, pivot) def get_candidate_files(start='.'): for root, unused, files in os.walk(start): for f in files: if not f.endswith('py'): continue yield os.path.join(root, f) def ast_parse_file(filename): with open(filename, mode='r') as fp: try: return ast.parse(fp.read(), filename), filename except (SyntaxError, TypeError, ValueError): return None, filename def get_base_classes(bases, via): bases_set = set() errors = [] for base in bases: if isinstance(base, ast.Name): bases_set.add(base.id) else: errors.append('Unknown base: {} via {}'.format(base.__class__.__name__, via)) return bases_set, errors
Apache License 2.0
demisto/demisto-py
demisto_client/demisto_api/models/update_response.py
UpdateResponse.updated_ids
python
def updated_ids(self): return self._updated_ids
Gets the updated_ids of this UpdateResponse. # noqa: E501 :return: The updated_ids of this UpdateResponse. # noqa: E501 :rtype: list[str]
https://github.com/demisto/demisto-py/blob/95d29e07693d27c133f7fe6ef9da13e4b6dbf542/demisto_client/demisto_api/models/update_response.py#L77-L84
import pprint import re import six class UpdateResponse(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'not_updated': 'int', 'updated_ids': 'list[str]' } attribute_map = { 'not_updated': 'notUpdated', 'updated_ids': 'updatedIds' } def __init__(self, not_updated=None, updated_ids=None): self._not_updated = None self._updated_ids = None self.discriminator = None if not_updated is not None: self.not_updated = not_updated if updated_ids is not None: self.updated_ids = updated_ids @property def not_updated(self): return self._not_updated @not_updated.setter def not_updated(self, not_updated): self._not_updated = not_updated @property
Apache License 2.0
winhamwr/neckbeard
neckbeard/cloud_provisioners/aws/ec2.py
Ec2NodeDeployment.create_new_node
python
def create_new_node(self): logger.info("Launching new ec2 instance") ec2_instance = self.launch() if self.is_active: self.deployment.set_active_node( 'ec2', env.node_name, ec2_instance) else: self.deployment.set_pending_node( 'ec2', env.node_name, ec2_instance) ebs_debug = self.ebs_confs.get('debug', {}) if not ebs_debug.get('attach_ebs_volumes', True): logger.warning("Debug ebs_conf found") logger.warning("Not attaching configured EBS volumes.") return self.get_node() vol_confs = self.ebs_confs.get('vols', {}) self.attached_volumes = self._attach_ebs_vols( ec2_instance_id=ec2_instance.id, vol_confs=vol_confs, seed_ebs_snapshots=self.seed_ebs_snapshots) node = self.get_node() self.wait_until_created(node) node.refresh_boto_instance() env.host_string = node.boto_instance.public_dns_name self._ensure_ebs_vols_mounted(ec2_instance.id, vol_confs) return self.get_node()
Launch a new ec2 instance from the appropriate AMI and configure any EBS volumes and first-run-only configurations.
https://github.com/winhamwr/neckbeard/blob/2fbeda8f217c43936c941bb9ce3ba6d23be675f9/neckbeard/cloud_provisioners/aws/ec2.py#L169-L212
import ConfigParser import logging import os.path import time from collections import namedtuple, defaultdict from tempfile import NamedTemporaryFile from boto import ec2 from fabric.api import sudo, env, require, put, hide, run from fabric.contrib.files import upload_template from neckbeard.cloud_provisioners import BaseNodeDeployment from neckbeard.output import fab_out_opts LOG_DIR = '/var/log/pstat' AWS_METADATA_SERVICE = 'http://169.254.169.254/latest/meta-data/' LAUNCH_REFRESH = 15 logger = logging.getLogger('aws.ec2') fab_output_hides = fab_out_opts[logger.getEffectiveLevel()] fab_quiet = fab_output_hides + ['stderr'] EC2_FSTAB_TPL = ( """ # /etc/fstab: static file system information. # <device> <mount_point> <fs_type> <options> <dump_freq> <pass_num> {% for entry in fstab_entries -%} """ "{{ entry.device_name}} {{ entry.mount_point}} {{ entry.fs_type }} " "{{ entry.options }} {{ entry.dump_freq}} {{ entry.pass_num}}" """ """ "{%- endfor %}" ) fstabEntry = namedtuple( 'fstabEntry', [ 'device_name', 'mount_point', 'fs_type', 'options', 'dump_freq', 'pass_num', ] ) EC2_FSTAB_DEFAULTS = [ fstabEntry('proc', '/proc', 'proc', 'nodev,noexec,nosuid', '0', '0'), fstabEntry('LABEL=cloudimg-rootfs', '/', 'ext3', 'defaults', '0', '0'), fstabEntry( '/dev/sda2', '/mnt', 'auto', 'defaults,nobootwait,comment=cloudconfig', '0', '0', ), fstabEntry( '/dev/sda3', 'none', 'swap', 'sw,comment=cloudconfig', '0', '0', ), ] class Ec2NodeDeployment(BaseNodeDeployment): def __init__(self, *args, **kwargs): conf = kwargs['conf'] self.ebs_confs = conf.get('ebs', {'vols': {}}) super(Ec2NodeDeployment, self).__init__(*args, **kwargs) self.seed_ebs_snapshots = {} self.attached_volumes = {} def get_seed_data(self): if not self.seed_deployment: return require('aws_access_key_id') require('aws_secret_access_key') self.seed_ebs_snapshots = self._get_seed_ebs_snapshots() def wait_until_created(self, node): while not self.creation_complete(node): logging.info("Instance pending. Waiting %ss", LAUNCH_REFRESH) time.sleep(LAUNCH_REFRESH) def creation_complete(self, node): start_host_string = env.host_string start_host = env.host node.refresh_boto_instance() if node.boto_instance.state == 'running': timeout = 60 tries = 0 step = LAUNCH_REFRESH while tries * step <= timeout: node.refresh_boto_instance() env.host_string = node.boto_instance.public_dns_name env.host = node.boto_instance.public_dns_name tries += 1 try: with hide('everything'): sudo('uptime', pty=True) env.host_string = start_host_string env.host = start_host return True except: logger.info( "%s not ready for SSH. Waiting %ss", node.boto_instance.public_dns_name, LAUNCH_REFRESH) time.sleep(LAUNCH_REFRESH) env.host_string = start_host_string env.host = start_host return False
BSD 3-Clause New or Revised License
darchr/gem5art
artifact/gem5art/artifact/_artifactdb.py
getDBConnection
python
def getDBConnection(uri: str = '') -> ArtifactDB: global _db if _db is not None and not uri: return _db if not uri: uri = os.environ.get("GEM5ART_DB", _default_uri) typ = _getDBType(uri) _db = typ(uri) return _db
Returns the database connection uri: a string representing the URI of the database. See _getDBType for details. If no URI is given we use the default (mongodb://localhost:27017) or the value in the GEM5ART_DB environment variable. If the connection has not been established, this will create a new connection. If the connection has been established, this will replace the connection if the uri input is non-empy.
https://github.com/darchr/gem5art/blob/4a0f47d1103ff68883bb0707cd888c6ac9ade775/artifact/gem5art/artifact/_artifactdb.py#L409-L434
from abc import ABC, abstractmethod import copy import json import os from pathlib import Path import shutil from typing import Any, Dict, Iterable, Union, Type, List, Tuple from urllib.parse import urlparse from uuid import UUID try: import gridfs from pymongo import MongoClient MONGO_SUPPORT = True except ModuleNotFoundError: MONGO_SUPPORT = False class ArtifactDB(ABC): @abstractmethod def __init__(self, uri: str) -> None: pass @abstractmethod def put(self, key: UUID, artifact: Dict[str,Union[str,UUID]]) -> None: pass @abstractmethod def upload(self, key: UUID, path: Path) -> None: pass @abstractmethod def __contains__(self, key: Union[UUID, str]) -> bool: pass @abstractmethod def get(self, key: Union[UUID,str]) -> Dict[str,str]: pass @abstractmethod def downloadFile(self, key: UUID, path: Path) -> None: pass def searchByName(self, name: str, limit: int) -> Iterable[Dict[str, Any]]: raise NotImplementedError() def searchByType(self, typ: str, limit: int) -> Iterable[Dict[str, Any]]: raise NotImplementedError() def searchByNameType(self, name: str, typ: str, limit: int) -> Iterable[Dict[str, Any]]: raise NotImplementedError() def searchByLikeNameType(self, name: str, typ: str, limit: int) -> Iterable[Dict[str, Any]]: raise NotImplementedError() class ArtifactMongoDB(ArtifactDB): def __init__(self, uri :str) -> None: self.db = MongoClient(host=uri, connect=False).artifact_database self.artifacts = self.db.artifacts self.fs = gridfs.GridFSBucket(self.db, disable_md5=True) def put(self, key: UUID, artifact: Dict[str,Union[str,UUID]]) -> None: assert artifact['_id'] == key self.artifacts.insert_one(artifact) def upload(self, key: UUID, path: Path) -> None: with open(path, 'rb') as f: self.fs.upload_from_stream_with_id(key, str(path), f) def __contains__(self, key: Union[UUID, str]) -> bool: if isinstance(key, UUID): count = self.artifacts.count_documents({'_id': key}, limit = 1) else: count = self.artifacts.count_documents({'hash': key}, limit = 1) return bool(count > 0) def get(self, key: Union[UUID,str]) -> Dict[str,str]: if isinstance(key, UUID): return self.artifacts.find_one({'_id': key}, limit = 1) else: return self.artifacts.find_one({'hash': key}, limit = 1) def downloadFile(self, key: UUID, path: Path) -> None: with open(path, 'wb') as f: self.fs.download_to_stream(key, f) def searchByName(self, name: str, limit: int) -> Iterable[Dict[str, Any]]: for d in self.artifacts.find({'name': name}, limit=limit): yield d def searchByType(self, typ: str, limit: int) -> Iterable[Dict[str, Any]]: for d in self.artifacts.find({'type':typ}, limit=limit): yield d def searchByNameType(self, name: str, typ: str, limit: int) -> Iterable[Dict[str, Any]]: for d in self.artifacts.find({'type':typ, 'name': name}, limit=limit): yield d def searchByLikeNameType(self, name: str, typ: str, limit: int) -> Iterable[Dict[str, Any]]: data = self.artifacts.find({'type': typ, 'name': {'$regex': '{}'.format(name)} }, limit=limit) for d in data: yield d class ArtifactFileDB(ArtifactDB): class ArtifactEncoder(json.JSONEncoder): def default(self, obj): if isinstance(obj, UUID): return str(obj) return ArtifactFileDB.ArtifactEncoder(self, obj) _json_file: Path _uuid_artifact_map: Dict[str, Dict[str,str]] _hash_uuid_map: Dict[str, List[str]] _storage_enabled: bool _storage_path: Path def __init__(self, uri: str) -> None: parsed_uri = urlparse(uri) self._json_file = Path(parsed_uri.netloc) / Path(parsed_uri.path) storage_path = os.environ.get("GEM5ART_STORAGE", "") self._storage_enabled = True if storage_path else False self._storage_path = Path(storage_path) if self._storage_enabled and self._storage_path.exists() and not self._storage_path.is_dir(): raise Exception(f"GEM5ART_STORAGE={storage_path} exists and is not" f" a directory") if self._storage_enabled: os.makedirs(self._storage_path, exist_ok = True) self._uuid_artifact_map, self._hash_uuid_map = self._load_from_file(self._json_file) def put(self, key: UUID, artifact: Dict[str,Union[str,UUID]]) -> None: assert artifact['_id'] == key assert isinstance(artifact['hash'], str) self.insert_artifact(key, artifact['hash'], artifact) def upload(self, key: UUID, path: Path) -> None: if not self._storage_enabled: return src_path = path dst_path = self._storage_path / str(key) if not dst_path.exists(): shutil.copy2(src_path, dst_path) def __contains__(self, key: Union[UUID, str]) -> bool: if isinstance(key, UUID): return self.has_uuid(key) return self.has_hash(key) def get(self, key: Union[UUID,str]) -> Dict[str,str]: artifact: List[Dict[str, str]] = [] if isinstance(key, UUID): artifact = list(self.get_artifact_by_uuid(key)) else: artifact = list(self.get_artifact_by_hash(key)) return artifact[0] def downloadFile(self, key: UUID, path: Path) -> None: assert(path.exists()) if not self._storage_enabled: return src_path = self._storage_path / str(key) dst_path = path shutil.copy2(src_path, dst_path) def _load_from_file(self, json_file: Path) -> Tuple[Dict[str, Dict[str,str]], Dict[str, List[str]]]: uuid_mapping: Dict[str, Dict[str,str]] = {} hash_mapping: Dict[str, List[str]] = {} if json_file.exists(): with open(json_file, 'r') as f: j = json.load(f) for an_artifact in j: the_uuid = an_artifact['_id'] the_hash = an_artifact['hash'] uuid_mapping[the_uuid] = an_artifact if not the_hash in hash_mapping: hash_mapping[the_hash] = [] hash_mapping[the_hash].append(the_uuid) return uuid_mapping, hash_mapping def _save_to_file(self, json_file: Path) -> None: content = list(self._uuid_artifact_map.values()) with open(json_file, 'w') as f: json.dump(content, f, indent=4, cls=ArtifactFileDB.ArtifactEncoder) def has_uuid(self, the_uuid: UUID) -> bool: return str(the_uuid) in self._uuid_artifact_map def has_hash(self, the_hash: str) -> bool: return the_hash in self._hash_uuid_map def get_artifact_by_uuid(self, the_uuid: UUID) -> Iterable[Dict[str,str]]: uuid_str = str(the_uuid) if not uuid_str in self._uuid_artifact_map: return yield self._uuid_artifact_map[uuid_str] def get_artifact_by_hash(self, the_hash: str) -> Iterable[Dict[str,str]]: if not the_hash in self._hash_uuid_map: return for the_uuid in self._hash_uuid_map[the_hash]: yield self._uuid_artifact_map[the_uuid] def insert_artifact(self, the_uuid: UUID, the_hash: str, the_artifact: Dict[str,Union[str,UUID]]) -> bool: uuid_str = str(the_uuid) if uuid_str in self._uuid_artifact_map: return False artifact_copy = copy.deepcopy(the_artifact) artifact_copy['_id'] = str(artifact_copy['_id']) self._uuid_artifact_map[uuid_str] = artifact_copy if not the_hash in self._hash_uuid_map: self._hash_uuid_map[the_hash] = [] self._hash_uuid_map[the_hash].append(uuid_str) self._save_to_file(self._json_file) return True def find_exact(self, attr: Dict[str, str], limit: int) -> Iterable[Dict[str, Any]]: count = 0 if count >= limit: return for artifact in self._uuid_artifact_map.values(): if attr.items() <= artifact.items(): yield artifact _db = None if MONGO_SUPPORT: _default_uri = "mongodb://localhost:27017" else: _default_uri = "file://db.json" _default_storage = "" _db_schemes : Dict[str, Type[ArtifactDB]] = { 'file': ArtifactFileDB } if MONGO_SUPPORT: _db_schemes['mongodb'] = ArtifactMongoDB def _getDBType(uri: str) -> Type[ArtifactDB]: result = urlparse(uri) if result.scheme in _db_schemes: return _db_schemes[result.scheme] else: raise Exception(f"Cannot find DB type for {uri}")
BSD 3-Clause New or Revised License
kelvinguu/lang2program
strongsup/evaluation.py
FmtD
python
def FmtD(x): if abs(x - round(x)) < 1e-40: return str(int(x)) if abs(x) < 1e-3: return "{:.2e}".format(x) return "{:.3f}".format(x)
Return a nicely formatted string for number x.
https://github.com/kelvinguu/lang2program/blob/dd4eb8439d29f0f72dd057946287551ed0f046a3/strongsup/evaluation.py#L228-L234
from collections import OrderedDict from codecs import open from math import sqrt import json import numpy as np import os from scipy.stats import norm class NumberSequenceStat(object): def __init__(self): self.s_count = 0 self.s_min = float('inf') self.s_max = float('-inf') self.s_min_key = None self.s_max_key = None self.s_sum = 0. self.s_sumsq = 0. def add(self, x, key=None): if isinstance(x, NumberSequenceStat): assert not key self.s_count += x.s_count self.s_sum += x.s_sum self.s_sumsq += x.s_sumsq if x.s_min < self.s_min: self.s_min = x.s_min self.s_min_key = x.s_min_key if x.s_max > self.s_max: self.s_max = x.s_max self.s_max_key = x.s_max_key elif isinstance(x, (list, tuple)): x = [float(u) for u in x] self.s_count += len(x) self.s_sum += sum(x) self.s_sumsq += sum(u*u for u in x) min_x = min(x) if min_x < self.s_min: self.s_min = min_x self.s_min_key = key max_x = max(x) if max_x > self.s_max: self.s_max = max_x self.s_max_key = key else: x = float(x) self.s_count += 1 self.s_sum += x self.s_sumsq += x * x if x < self.s_min: self.s_min = x self.s_min_key = key if x > self.s_max: self.s_max = x self.s_max_key = key @property def count(self): return self.s_count @property def mean(self): return self.s_sum / self.s_count @property def sum(self): return self.s_sum @property def variance(self): return self.s_sumsq / self.s_count - self.mean ** 2 @property def stddev(self): return self.variance ** .5 @property def min(self): return self.s_min @property def max(self): return self.s_max @property def min_key(self): return self.s_min_key @property def max_key(self): return self.s_max_key @property def range(self): return self.s_max - self.s_min def __str__(self): if not self.s_count: return "NaN (0)" return "{min}{min_key} << {mean} >> {max}{max_key} ({std} std {count} count)".format( min=FmtD(self.s_min), min_key=('@' + self.s_min_key if self.s_min_key else ''), mean=FmtD(self.mean), std=FmtD(self.stddev), max=FmtD(self.s_max), max_key=('@' + self.s_max_key if self.s_max_key else ''), count=self.s_count) def as_dict(self): if not self.s_count: return {'count': 0} return { 'count': self.s_count, 'min': self.s_min, 'mean': self.mean, 'stddev': self.stddev, 'max': self.s_max, 'sum': self.s_sum, } class BernoulliSequenceStat(NumberSequenceStat): def __init__(self, number_seq_stat=None): super(BernoulliSequenceStat, self).__init__() if number_seq_stat: self.add(number_seq_stat) def __str__(self): left, right = self.confidence_interval(0.05) ci_str = " 95% CI = [{} - {}]".format(left, right) s = super(BernoulliSequenceStat, self).__str__() return s + ci_str @classmethod def _confidence_interval_by_z_wald(cls, p_hat, n, z): increment = z * sqrt(p_hat * (1 - p_hat) / n) return p_hat - increment, p_hat + increment @classmethod def _confidence_interval_by_z_wilson(cls, p_hat, n, z): z2 = z**2 n2 = n**2 numerator = lambda sign: p_hat + z2 / (2 * n) + sign * z * sqrt(p_hat * (1 - p_hat) / n + z2 / (4 * n2)) denominator = 1 + z2 / n left = numerator(-1.) / denominator right = numerator(1.) / denominator return left, right @classmethod def _confidence_interval_by_alpha(cls, p_hat, n, alpha, method='wald'): prob = 1 - 0.5 * alpha z = norm.ppf(prob) compute_ci = cls._confidence_interval_by_z_wald if method == 'wald' else cls._confidence_interval_by_z_wilson return compute_ci(p_hat, n, z) def confidence_interval(self, alpha): p_hat = self.mean n = self.count return self._confidence_interval_by_alpha(p_hat, n, alpha) def test_bernoulli_confidence_interval(method='wilson', trials=1000, ps=None): if ps is None: ps = np.arange(0.05, 0.95, 0.05) n = 200 alpha = 0.1 alpha_hats = [] for p in ps: misses = 0. for _ in range(int(trials)): samples = np.random.random(n) <= p p_hat = np.mean(samples) left, right = BernoulliSequenceStat._confidence_interval_by_alpha(p_hat, n, alpha, method=method) if p < left or p > right: misses += 1 alpha_hat = misses / trials alpha_hats.append(alpha_hat) import matplotlib.pyplot as plt plt.plot(ps, alpha_hats)
Apache License 2.0
brikwerk/nxbt
nxbt/tui.py
ControllerTUI.toggle_auto_keypress_deactivation
python
def toggle_auto_keypress_deactivation(self, toggle): self.auto_keypress_deactivation = toggle
Toggles whether or not the ControllerTUI should deactivate a control after a period of time. :param toggle: A True/False value that toggles auto keypress deactivation :type toggle: bool
https://github.com/brikwerk/nxbt/blob/5a7a8c0ff5ee1f075bef8924051de462179a9dd5/nxbt/tui.py#L81-L90
import os import time import psutil from collections import deque import multiprocessing from blessed import Terminal from .nxbt import Nxbt, PRO_CONTROLLER class LoadingSpinner(): SPINNER_CHARS = ['■ □ □ □', '□ ■ □ □', '□ □ ■ □', '□ □ □ ■', '□ □ □ ■', '□ □ ■ □', '□ ■ □ □', '■ □ □ □'] def __init__(self): self.creation_time = time.perf_counter() self.last_update_time = self.creation_time self.current_char_index = 0 def get_spinner_char(self): current_time = time.perf_counter() delta = current_time - self.last_update_time if delta > 0.07: self.last_update_time = current_time if self.current_char_index == 7: self.current_char_index = 0 else: self.current_char_index += 1 return self.SPINNER_CHARS[self.current_char_index] class ControllerTUI(): CONTROLS = { "ZL": "◿□□□□", "L": "◿□□□□", "ZR": "□□□□◺", "R": "□□□□◺", "LS_UP": ".─.", "LS_LEFT": "(", "LS_RIGHT": ")", "LS_DOWN": "`─'", "RS_UP": ".─.", "RS_LEFT": "(", "RS_RIGHT": ")", "RS_DOWN": "`─'", "DPAD_UP": "△", "DPAD_LEFT": "◁", "DPAD_RIGHT": "▷", "DPAD_DOWN": "▽", "MINUS": "◎", "PLUS": "◎", "HOME": "□", "CAPTURE": "□", "A": "○", "B": "○", "X": "○", "Y": "○", } def __init__(self, term): self.term = term self.DEFAULT_CONTROLS = self.CONTROLS.copy() self.CONTROL_RELEASE_TIMERS = self.CONTROLS.copy() for control in self.CONTROL_RELEASE_TIMERS.keys(): self.CONTROL_RELEASE_TIMERS[control] = False self.auto_keypress_deactivation = True self.remote_connection = False
MIT License
alterway/anonymization
anonymization/Anonymization.py
Anonymization.replace_all
python
def replace_all(self, text: str, matchs: Iterable[str], provider: str) -> str: for match in matchs: text = text.replace(match, self.getFake(provider, match)) return text
Replace all occurance in matchs in text using a Faker provider
https://github.com/alterway/anonymization/blob/57e6c20f8c97e902f3513b5adfdbc211791aaef0/anonymization/Anonymization.py#L28-L35
from collections import defaultdict from typing import Iterable, Pattern, Callable, List, Any import re from faker import Factory from .lib.diff_match_patch import diff_match_patch class Anonymization: def __init__(self, locale: str): self.locale = locale self.faker = Factory.create(locale) self.anonDicts = {} def getFake(self, provider: str, match: str) -> str: if not provider in self.anonDicts: self.anonDicts[provider] = defaultdict(getattr(self.faker, provider)) return self.anonDicts[provider][match]
MIT License
ajenti/ajenti
ajenti-core/aj/util/strings.py
str_fsize
python
def str_fsize(sz): if sz < 1024: return '%.1f bytes' % sz sz /= 1024.0 if sz < 1024: return '%.1f KB' % sz sz /= 1024.0 if sz < 1024: return '%.1f MB' % sz sz /= 1024.0 if sz < 1024: return '%.1f GB' % sz sz /= 1024.0 return '%.1f TB' % sz
Formats file size as string (i.e., 1.2 Mb)
https://github.com/ajenti/ajenti/blob/61ad358f03f0ae6ef29a5d8b0a00f7cc6e54c56a/ajenti-core/aj/util/strings.py#L1-L17
MIT License
google/python-spanner-orm
spanner_orm/model.py
Model._execute_write
python
def _execute_write( cls, db_api: Callable[..., Any], transaction: Optional[spanner_transaction.Transaction], dictionaries: Iterable[Dict[str, Any]], ) -> None: columns, values = None, [] for dictionary in dictionaries: invalid_keys = set(dictionary.keys()) - set(cls.columns) if invalid_keys: raise error.SpannerError('Invalid keys set on {model}: {keys}'.format( model=cls.__name__, keys=invalid_keys)) if columns is None: columns = dictionary.keys() if columns != dictionary.keys(): raise error.SpannerError( 'Attempted to update rows with different sets of keys') for key, value in dictionary.items(): cls.validate_value(key, value, error.SpannerError) values.append([dictionary[column] for column in columns]) args = [cls.table, columns, values] if transaction is not None: return db_api(transaction, *args) else: return cls.spanner_api().run_write(db_api, *args)
Validates all write value types and commits write to Spanner.
https://github.com/google/python-spanner-orm/blob/2d73ce62e71459fc3499e1932023704fb35ffc08/spanner_orm/model.py#L574-L602
import collections import copy from typing import Any, Callable, Dict, Iterable, List, Optional, Type, TypeVar, Union from spanner_orm import api from spanner_orm import condition from spanner_orm import error from spanner_orm import foreign_key_relationship from spanner_orm import field from spanner_orm import index from spanner_orm import metadata from spanner_orm import query from spanner_orm import registry from spanner_orm import relationship from spanner_orm import table_apis from google.api_core import exceptions from google.cloud import spanner from google.cloud.spanner_v1 import transaction as spanner_transaction T = TypeVar('T') class ModelMetaclass(type): meta: metadata.ModelMetadata def __new__(mcs, name: str, bases: Any, attrs: Dict[str, Any], **kwargs: Any): parents = [base for base in bases if isinstance(base, ModelMetaclass)] model_metadata = metadata.ModelMetadata() for parent in parents: if 'meta' in vars(parent): model_metadata.add_metadata(parent.meta) non_model_attrs = {} for key, value in attrs.items(): if key == '__table__': model_metadata.table = value elif key == '__interleaved__': model_metadata.interleaved = value if isinstance(value, field.Field): model_metadata.add_field(key, value) elif isinstance(value, index.Index): model_metadata.add_index(key, value) elif isinstance(value, relationship.Relationship): model_metadata.add_relation(key, value) elif isinstance( value, foreign_key_relationship.ForeignKeyRelationship, ): model_metadata.add_foreign_key_relation(key, value) else: non_model_attrs[key] = value cls = super().__new__(mcs, name, bases, non_model_attrs, **kwargs) if model_metadata.table: model_metadata.model_class = cls model_metadata.finalize() cls.meta = model_metadata return cls def __getattr__( cls, name: str ) -> Union[field.Field, relationship.Relationship, foreign_key_relationship.ForeignKeyRelationship, index.Index]: if name in cls.fields: return cls.fields[name] elif name in cls.relations: return cls.relations[name] elif name in cls.foreign_key_relations: return cls.foreign_key_relations[name] elif name in cls.indexes: return cls.indexes[name] raise AttributeError(name) @property def column_prefix(cls) -> str: return cls.table.split('.')[-1] @property def columns(cls) -> List[str]: return cls.meta.columns @property def indexes(cls) -> Dict[str, index.Index]: return cls.meta.indexes @property def interleaved(cls) -> Optional[Type['Model']]: if cls.meta.interleaved: return registry.model_registry().get(cls.meta.interleaved) return None @property def primary_keys(cls) -> List[str]: return cls.meta.primary_keys @property def relations(cls) -> Dict[str, relationship.Relationship]: return cls.meta.relations @property def foreign_key_relations( cls) -> Dict[str, foreign_key_relationship.ForeignKeyRelationship]: return cls.meta.foreign_key_relations @property def fields(cls) -> Dict[str, field.Field]: return cls.meta.fields @property def table(cls): return cls.meta.table def validate_value(cls, field_name, value, error_type=error.SpannerError): try: cls.fields[field_name].validate(value) except error.ValidationError as ex: context = f'Validation error for field {field_name!r}' raise error_type((f'{context}: {ex.args[0]}' if ex.args else context), *ex.args[1:]) CallableReturn = TypeVar('CallableReturn') class Model(metaclass=ModelMetaclass): def __init__(self, values: Dict[str, Any], persisted: bool = False, skip_validation: bool = False): start_values = {} self.__dict__['start_values'] = start_values self.__dict__['_persisted'] = persisted if not persisted and not skip_validation: missing_keys = set(self._primary_keys) - set(values.keys()) if missing_keys: raise error.SpannerError( 'All primary keys must be specified. Missing: {keys}'.format( keys=missing_keys)) for column in self._columns: self._metaclass.validate_value(column, values.get(column), ValueError) for column in self._columns: value = values.get(column) start_values[column] = copy.copy(value) self.__dict__[column] = value for relation in self._relations: if relation in values: self.__dict__[relation] = values[relation] for foreign_key_relation in self._foreign_key_relations: if foreign_key_relation in values: self.__dict__[foreign_key_relation] = values[foreign_key_relation] def __eq__(self, other: Any) -> Union[bool, type(NotImplemented)]: if type(self) != type(other): return NotImplemented return self.values == other.values @classmethod def spanner_api(cls) -> api.SpannerApi: if not cls.table: raise error.SpannerError('Class must define a table for API calls') return api.spanner_api() @classmethod def all( cls: Type[T], *, transaction: Optional[spanner_transaction.Transaction] = None, ) -> List[T]: args = [cls.table, cls.columns, spanner.KeySet(all_=True)] results = cls._execute_read(table_apis.find, transaction, args) return cls._results_to_models(results) @classmethod def count( cls, *conditions: condition.Condition, transaction: Optional[spanner_transaction.Transaction] = None, ) -> int: builder = query.CountQuery(cls, conditions) args = [builder.sql(), builder.parameters(), builder.types()] results = cls._execute_read(table_apis.sql_query, transaction, args) return builder.process_results(results) @classmethod def count_equal( cls, *, transaction: Optional[spanner_transaction.Transaction] = None, **constraints: Any, ) -> int: conditions = [] for column, value in constraints.items(): if isinstance(value, list): conditions.append(condition.in_list(column, value)) else: conditions.append(condition.equal_to(column, value)) return cls.count(*conditions, transaction=transaction) @classmethod def find( cls: Type[T], *, transaction: Optional[spanner_transaction.Transaction] = None, **keys: Any, ) -> Optional[T]: resources = cls.find_multi([keys], transaction=transaction) return resources[0] if resources else None @classmethod def find_required( cls: Type[T], *, transaction: Optional[spanner_transaction.Transaction] = None, **keys: Any, ) -> T: result = cls.find(**keys, transaction=transaction) if result is None: raise exceptions.NotFound( f'{cls.__qualname__} has no object with primary key {keys}') return result @classmethod def find_multi( cls: Type[T], keys: Iterable[Dict[str, Any]], *, transaction: Optional[spanner_transaction.Transaction] = None, ) -> List[T]: key_values = [] for key in keys: key_values.append([key[column] for column in cls.primary_keys]) keyset = spanner.KeySet(keys=key_values) args = [cls.table, cls.columns, keyset] results = cls._execute_read(table_apis.find, transaction, args) return cls._results_to_models(results) @classmethod def where( cls: Type[T], *conditions: condition.Condition, transaction: Optional[spanner_transaction.Transaction] = None, ) -> List[T]: builder = query.SelectQuery(cls, conditions) args = [builder.sql(), builder.parameters(), builder.types()] results = cls._execute_read(table_apis.sql_query, transaction, args) return builder.process_results(results) @classmethod def where_equal( cls: Type[T], *, transaction: Optional[spanner_transaction.Transaction] = None, **constraints: Any, ) -> List[T]: conditions = [] for column, value in constraints.items(): if isinstance(value, list): conditions.append(condition.in_list(column, value)) else: conditions.append(condition.equal_to(column, value)) return cls.where(*conditions, transaction=transaction) @classmethod def _results_to_models( cls: Type[T], results: Iterable[Iterable[Any]], ) -> List[T]: items = [dict(zip(cls.columns, result)) for result in results] return [cls(item, persisted=True) for item in items] @classmethod def _execute_read( cls, db_api: Callable[..., CallableReturn], transaction: Optional[spanner_transaction.Transaction], args: List[Any], ) -> CallableReturn: if transaction is not None: return db_api(transaction, *args) else: return cls.spanner_api().run_read_only(db_api, *args) @classmethod def create( cls, *, transaction: Optional[spanner_transaction.Transaction] = None, **kwargs: Any, ) -> None: cls._execute_write(table_apis.insert, transaction, [kwargs]) @classmethod def create_or_update( cls, *, transaction: Optional[spanner_transaction.Transaction] = None, **kwargs: Any, ) -> None: cls._execute_write(table_apis.upsert, transaction, [kwargs]) @classmethod def _delete_by_keyset( cls, transaction: Optional[spanner_transaction.Transaction], keyset: spanner.KeySet, ) -> None: db_api = table_apis.delete args = [cls.table, keyset] if transaction is not None: db_api(transaction, *args) else: cls.spanner_api().run_write(db_api, *args) @classmethod def delete_batch( cls: Type[T], models: List[T], *, transaction: Optional[spanner_transaction.Transaction] = None, ) -> None: key_list = [] for model in models: key_list.append([getattr(model, column) for column in cls.primary_keys]) cls._delete_by_keyset( transaction=transaction, keyset=spanner.KeySet(keys=key_list), ) @classmethod def delete_by_key( cls, *, transaction: Optional[spanner_transaction.Transaction] = None, **keys: Any, ) -> None: cls._delete_by_keyset( transaction=transaction, keyset=spanner.KeySet( keys=[[keys[column] for column in cls.primary_keys]]), ) @classmethod def save_batch( cls: Type[T], models: List[T], *, transaction: Optional[spanner_transaction.Transaction] = None, force_write: bool = False, ) -> None: work = collections.defaultdict(list) for model in models: value = {column: getattr(model, column) for column in cls.columns} if force_write: api_method = table_apis.upsert elif model._persisted: api_method = table_apis.update else: api_method = table_apis.insert work[api_method].append(value) model._persisted = True for api_method, values in work.items(): cls._execute_write(api_method, transaction, values) @classmethod def update( cls, *, transaction: Optional[spanner_transaction.Transaction] = None, **kwargs: Any, ) -> None: cls._execute_write(table_apis.update, transaction, [kwargs]) @classmethod
Apache License 2.0
openstack/osc-lib
osc_lib/tests/utils/__init__.py
TestCommand.assertListItemEqual
python
def assertListItemEqual(self, expected, actual): self.assertEqual(len(expected), len(actual)) for item_expected, item_actual in zip(expected, actual): self.assertItemEqual(item_expected, item_actual)
Compare a list of items considering formattable columns. Each pair of observed and expected items are compared using assertItemEqual() method.
https://github.com/openstack/osc-lib/blob/415a6c7b191ac665fb7763d12d1b427b7b630586/osc_lib/tests/utils/__init__.py#L172-L180
import contextlib import copy import json as jsonutils import os from unittest import mock from cliff import columns as cliff_columns import fixtures from keystoneauth1 import loading from openstack.config import cloud_region from openstack.config import defaults from oslo_utils import importutils from requests_mock.contrib import fixture import testtools from osc_lib import clientmanager from osc_lib import shell from osc_lib.tests import fakes def fake_execute(shell, cmd): return shell.run(cmd.split()) def make_shell(shell_class=None): if shell_class is None: shell_class = shell.OpenStackShell _shell = shell_class() _shell.command_manager = mock.Mock() return _shell def opt2attr(opt): if opt.startswith('--os-'): attr = opt[5:] elif opt.startswith('--'): attr = opt[2:] else: attr = opt return attr.lower().replace('-', '_') def opt2env(opt): return opt[2:].upper().replace('-', '_') class EnvFixture(fixtures.Fixture): def __init__(self, env=None): self.new_env = env or {} def _setUp(self): self.orig_env, os.environ = os.environ, self.new_env self.addCleanup(self.revert) def revert(self): os.environ = self.orig_env class ParserException(Exception): pass class TestCase(testtools.TestCase): def setUp(self): testtools.TestCase.setUp(self) if (os.environ.get("OS_STDOUT_CAPTURE") == "True" or os.environ.get("OS_STDOUT_CAPTURE") == "1"): stdout = self.useFixture(fixtures.StringStream("stdout")).stream self.useFixture(fixtures.MonkeyPatch("sys.stdout", stdout)) if (os.environ.get("OS_STDERR_CAPTURE") == "True" or os.environ.get("OS_STDERR_CAPTURE") == "1"): stderr = self.useFixture(fixtures.StringStream("stderr")).stream self.useFixture(fixtures.MonkeyPatch("sys.stderr", stderr)) def assertNotCalled(self, m, msg=None): if m.called: if not msg: msg = 'method %s should not have been called' % m self.fail(msg) @contextlib.contextmanager def subTest(self, *args, **kwargs): try: with super(TestCase, self).subTest(*args, **kwargs): yield except TypeError: raise except AttributeError: yield class TestCommand(TestCase): def setUp(self): super(TestCommand, self).setUp() self.fake_stdout = fakes.FakeStdout() self.fake_log = fakes.FakeLog() self.app = fakes.FakeApp(self.fake_stdout, self.fake_log) self.app.client_manager = fakes.FakeClientManager() def check_parser(self, cmd, args, verify_args): cmd_parser = cmd.get_parser('check_parser') try: parsed_args = cmd_parser.parse_args(args) except SystemExit: raise ParserException("Argument parse failed") for av in verify_args: attr, value = av if attr: self.assertIn(attr, parsed_args) self.assertEqual(value, getattr(parsed_args, attr)) return parsed_args def assertItemEqual(self, expected, actual): self.assertEqual(len(expected), len(actual)) for col_expected, col_actual in zip(expected, actual): if isinstance(col_expected, cliff_columns.FormattableColumn): self.assertIsInstance(col_actual, col_expected.__class__) self.assertEqual(col_expected.human_readable(), col_actual.human_readable()) self.assertEqual(col_expected.machine_readable(), col_actual.machine_readable()) else: self.assertEqual(col_expected, col_actual)
Apache License 2.0
scidash/sciunit
sciunit/base.py
Versioned.get_repo
python
def get_repo(self, cached: bool = True) -> Repo: module = sys.modules[self.__module__] if hasattr(self.__class__, "_repo") and cached: repo = self.__class__._repo elif hasattr(module, "__file__"): path = Path(module.__file__).resolve() try: repo = git.Repo(path, search_parent_directories=True) except InvalidGitRepositoryError: repo = None else: repo = None self.__class__._repo = repo return repo
Get a git repository object for this instance. Args: cached (bool, optional): Whether to use cached data. Defaults to True. Returns: Repo: The git repo for this instance.
https://github.com/scidash/sciunit/blob/68401d88b8e47d29807f8b4f9d265a23174143d9/sciunit/base.py#L164-L187
import sys PLATFORM = sys.platform PYTHON_MAJOR_VERSION = sys.version_info.major if PYTHON_MAJOR_VERSION < 3: raise Exception("Only Python 3 is supported") import hashlib import inspect import json import logging from pathlib import Path from typing import Any, List try: import tkinter except ImportError: tkinter = None try: from importlib.metadata import version __version__ = version("sciunit") except: __version__ = None import bs4 import git from deepdiff import DeepDiff from git.cmd import Git from git.exc import GitCommandError, InvalidGitRepositoryError from git.remote import Remote from git.repo.base import Repo import jsonpickle import jsonpickle.ext.numpy as jsonpickle_numpy from jsonpickle.handlers import BaseHandler import numpy as np import quantities as pq ipy = "ipykernel" in sys.modules here = Path(__file__).resolve().parent.name logger = logging.getLogger("sciunit") logger.setLevel(logging.WARNING) class Config(dict): def __init__(self, *args, **kwargs): self.load() super().__init__(*args, **kwargs) default = { "cmap_high": 218, "cmap_low": 38, "score_log_level": 1, "log_level": logging.INFO, "prevalidate": False, "cwd": here, } _path = Path.home() / ".sciunit" / "config.json" @property def path(self): return Path(self._path) @path.setter def path(self, val): self._path = Path(val) def __getitem__(self, key): return self.get(key) def get(self, key, default=None, update_from_disk=True): key = key.lower() try: val = super().__getitem__(key) except KeyError: c = self.get_from_disk() if default is None: val = c[key] else: val = c.get(key, default) if update_from_disk: self[key.lower()] = val return val def set(self, key, val): self.__setitem__(key, val) def __setitem__(self, key, val): key = key.lower() super().__setitem__(key, val) def get_from_disk(self): try: with open(self.path, "r") as f: c = json.load(f) except FileNotFoundError: logger.warning( "Config file not found at '%s'; creating new one" % self.path ) self.create() return self.get_from_disk() except json.JSONDecodeError: logger.warning( "Config file JSON at '%s' was invalid; creating new one" % self.path ) self.create() return self.get_from_disk() return c def create(self, data: dict = None) -> bool: if not data: data = self.default success = False try: config_dir = self.path.parent config_dir.mkdir(exist_ok=True, parents=True) data["sciunit_version"] = __version__ with open(self.path, "w") as f: f.seek(0) f.truncate() json.dump(data, f) success = True except Exception as e: logger.warning("Could not create config file: %s" % e) return success def load(self): c = self.get_from_disk() for key, val in c.items(): key = key.lower() self[key] = val def save(self): self.create(data=self) config = Config() class Versioned(object):
MIT License
takelley1/ocvbot
ocvbot/behavior.py
human_behavior_rand
python
def human_behavior_rand(chance) -> None: roll = rand.randint(1, chance) log.debug("Human behavior rolled %s", roll) if roll == chance: log.info("Attempting to act human.") roll = rand.randint(1, 2) if roll == 1: check_skills() elif roll == 2: roll = rand.randint(1, 8) if roll == 1: open_side_stone("attacks") elif roll == 2: open_side_stone("quests") elif roll == 3: open_side_stone("equipment") elif roll == 4: open_side_stone("prayers") elif roll == 5: open_side_stone("spellbook") elif roll == 6: open_side_stone("music") elif roll == 7: open_side_stone("friends") elif roll == 8: open_side_stone("settings") return return
Randomly chooses from a list of human behaviors if the roll passes. This is done to make the bot appear more human. Args: chance (int): The number that must be rolled for a random behavior to be triggered. For example, if this parameter is 25, then there is a 1 in 25 chance for the roll to pass.
https://github.com/takelley1/ocvbot/blob/1c196a65a92c29a4630e47f62ee7b3af50516ff6/ocvbot/behavior.py#L523-L561
import logging as log import pathlib import random as rand import sys import time import cv2 import numpy as np import pyautogui as pag from ocvbot import inputs from ocvbot import misc from ocvbot import startup as start from ocvbot import vision as vis def login_basic( username_file=start.config["main"]["username_file"], password_file=start.config["main"]["password_file"], cred_sleep_range: tuple[int, int] = (800, 5000), ) -> bool: username = open(username_file, "r", encoding="utf-8").read() username = str(username.replace("\n", "")) password = open(password_file, "r", encoding="utf-8").read() password = str(password.replace("\n", "")) for _ in range(1, 3): log.info("Logging in.") ok_button = vis.Vision( region=vis.client, needle="./needles/login-menu/ok-button.png", loop_num=1 ).click_needle() existing_user_button = vis.Vision( region=vis.client, needle="./needles/login-menu/existing-user-button.png", loop_num=1, ).click_needle() if existing_user_button is True or ok_button is True: credential_screen = vis.Vision( region=vis.client, needle="./needles/login-menu/login-cancel-buttons.png", loop_num=5, ).wait_for_needle() if credential_screen is True: inputs.Mouse( region=( vis.login_field_left, vis.login_field_top, start.LOGIN_FIELD_WIDTH, start.LOGIN_FIELD_HEIGHT, ) ).click_coord() misc.sleep_rand(cred_sleep_range[0], cred_sleep_range[1]) inputs.Keyboard(log_keys=False).typewriter(username) misc.sleep_rand(cred_sleep_range[0], cred_sleep_range[1]) inputs.Mouse( region=( vis.pass_field_left, vis.pass_field_top, start.LOGIN_FIELD_WIDTH, start.LOGIN_FIELD_HEIGHT, ) ).click_coord() inputs.Keyboard(log_keys=False).typewriter(password) misc.sleep_rand(cred_sleep_range[0], cred_sleep_range[1]) inputs.Keyboard().keypress(key="enter") return True log.critical("Could perform login!") return False def login_full( login_sleep_range: tuple[int, int] = (500, 5000), postlogin_sleep_range: tuple[int, int] = (500, 5000), username_file=start.config["main"]["username_file"], password_file=start.config["main"]["password_file"], ) -> bool: log.info("Attempting to login.") for _ in range(1, 3): login = login_basic(username_file, password_file) if login is False: raise Exception("Could not perform initial login!") misc.sleep_rand(login_sleep_range[0], login_sleep_range[1]) postlogin_screen_button = vis.Vision( region=vis.display, needle="./needles/login-menu/orient-postlogin.png", conf=0.8, loop_num=10, loop_sleep_range=(1000, 2000), ).click_needle() if postlogin_screen_button is True: misc.sleep_rand(postlogin_sleep_range[0], postlogin_sleep_range[1]) logged_in = vis.Vision( region=vis.display, needle="./needles/minimap/orient.png", loop_num=50, loop_sleep_range=(1000, 2000), ).wait_for_needle() if logged_in is True: start.start_time = time.time() pag.keyDown("Up") misc.sleep_rand(3000, 7000) pag.keyUp("Up") return True raise Exception("Could not detect login after postlogin screen!") log.warning("Cannot find postlogin screen!") invalid_credentials = vis.Vision( region=vis.display, needle="./needles/login-menu/invalid-credentials.png", loop_num=1, ).wait_for_needle() if invalid_credentials is True: raise Exception("Invalid user credentials!") log.critical("Cannot find postlogin screen!") raise Exception("Unable to login!") def logout() -> bool: if vis.orient()[0] == "logged_out": log.warning("Client already logged out!") return True log.info("Attempting to logout.") open_side_stone("logout") logout_button_world_switcher = False logout_button_highlighted = False logout_button = False for _ in range(1, 5): logout_button = vis.Vision( region=vis.inv, needle="./needles/side-stones/logout/logout.png", conf=0.9, loop_num=1, ).wait_for_needle(get_tuple=True) if isinstance(logout_button, tuple) is True: break logout_button_highlighted = vis.Vision( region=vis.inv, needle="./needles/side-stones/logout/logout-highlighted.png", conf=0.9, loop_num=1, ).wait_for_needle(get_tuple=True) if isinstance(logout_button_highlighted, tuple) is True: logout_button = logout_button_highlighted break logout_button_world_switcher = vis.Vision( region=vis.side_stones, needle="./needles/side-stones/logout/logout-world-switcher.png", conf=0.9, loop_num=1, ).wait_for_needle(get_tuple=True) if isinstance(logout_button_world_switcher, tuple) is True: logout_button = logout_button_world_switcher break if ( logout_button is False and logout_button_highlighted is False and logout_button_world_switcher is False ): raise Exception("Failed to find logout button!") inputs.Mouse(region=logout_button).click_coord(move_away=True) for tries in range(5): logged_out = vis.Vision( region=vis.client, needle="./needles/login-menu/orient-logged-out.png", loop_num=5, loop_sleep_range=(1000, 1200), ).wait_for_needle() if logged_out is True: log.info("Logged out after trying %s times(s)", tries) return True else: log.info("Unable to log out, trying again.") inputs.Mouse(region=logout_button).click_coord(move_away=True) raise Exception("Could not logout!") def logout_break_range() -> bool: current_time = round(time.time()) if current_time >= start.checkpoint_1 and start.checkpoint_1_checked is False: log.info("Rolling for checkpoint 1...") start.checkpoint_1_checked = True logout_break_roll(5) elif current_time >= start.checkpoint_2 and start.checkpoint_2_checked is False: log.info("Rolling for checkpoint 2...") start.checkpoint_2_checked = True logout_break_roll(5) elif current_time >= start.checkpoint_3 and start.checkpoint_3_checked is False: log.info("Rolling for checkpoint 3...") start.checkpoint_3_checked = True logout_break_roll(5) elif current_time >= start.checkpoint_4 and start.checkpoint_4_checked is False: log.info("Rolling checkpoint 4...") start.checkpoint_4_checked = True logout_break_roll(5) elif current_time >= start.checkpoint_5: start.checkpoint_1_checked = False start.checkpoint_2_checked = False start.checkpoint_3_checked = False start.checkpoint_4_checked = False logout_break_roll(1) else: if start.checkpoint_1_checked is False: log.info("Checkpoint 1 is at %s", time.ctime(start.checkpoint_1)) elif start.checkpoint_1_checked is True and start.checkpoint_2_checked is False: log.info("Checkpoint 2 is at %s", time.ctime(start.checkpoint_2)) elif start.checkpoint_2_checked is True and start.checkpoint_3_checked is False: log.info("Checkpoint 3 is at %s", time.ctime(start.checkpoint_3)) elif start.checkpoint_3_checked is True and start.checkpoint_4_checked is False: log.info("Checkpoint 4 is at %s", time.ctime(start.checkpoint_4)) elif start.checkpoint_4_checked is True: log.info("Checkpoint 5 is at %s", time.ctime(start.checkpoint_5)) return True def logout_break_roll( chance, min_break_duration=int(start.config["main"]["min_break_duration"]), max_break_duration=int(start.config["main"]["max_break_duration"]), ) -> None: logout_roll = rand.randint(1, chance) log.info("Logout roll was %s", logout_roll) if logout_roll == chance: log.info("Random logout called.") logout() start.session_num += 1 log.info("Completed session %s/%s", start.session_num, start.session_total) if start.session_num >= start.session_total: log.info("Final session completed! Script done.") sys.exit(0) else: min_break_duration *= 60000 max_break_duration *= 60000 wait_time_seconds = misc.rand_seconds( min_break_duration, max_break_duration ) wait_time_minutes = wait_time_seconds / 60 current_time = time.time() stop_time = current_time + (current_time + wait_time_seconds) stop_time_human = time.localtime(stop_time) log.info( "Sleeping for %s minutes. Break will be over at %s:%s:%s", round(wait_time_minutes), stop_time_human[3], stop_time_human[4], stop_time_human[5], ) time.sleep(wait_time_seconds) else: return def open_side_stone(side_stone) -> bool: side_stone_open = "./needles/side-stones/open/" + side_stone + ".png" side_stone_closed = "./needles/side-stones/closed/" + side_stone + ".png" stone_open = vis.Vision( region=vis.side_stones, needle=side_stone_open, loop_num=1, conf=0.98 ).wait_for_needle() if stone_open is True: log.debug("Side stone already open.") return True log.debug("Opening side stone...") for tries in range(1, 5): vis.Vision( region=vis.side_stones, needle=side_stone_closed, loop_num=3, loop_sleep_range=(100, 300), ).click_needle(sleep_range=(0, 200, 0, 200), move_away=True) stone_open = vis.Vision( region=vis.side_stones, needle=side_stone_open, loop_num=3, conf=0.98, loop_sleep_range=(100, 200), ).wait_for_needle() if stone_open is True: log.info("Opened side stone after %s tries.", tries) return True vis.Vision( region=vis.game_screen, needle="./needles/buttons/close.png", loop_num=1 ).click_needle() raise Exception("Could not open side stone!") def check_skills() -> bool: open_side_stone("skills") inputs.Mouse(region=vis.inv).move_to() misc.sleep_rand(1000, 7000) return True
MIT License
capitalone/rubicon
rubicon_ml/ui/model.py
RubiconModel.selected_project
python
def selected_project(self): return self._selected_project
The currently selected Rubicon project. Returns ------- rubicon.Project or rubicon.client.asynchronous.Project
https://github.com/capitalone/rubicon/blob/86278a98cf5fd0b7e179a2949fce5a12e42fd7be/rubicon_ml/ui/model.py#L231-L238
import asyncio import numpy as np import pandas as pd from rubicon_ml.client.asynchronous import Rubicon as AsynRubicon class RubiconModel: def __init__(self, rubicon): self._rubicon = rubicon self._projects = [] self._selected_project = None self._experiment_table_dfs = {} self._experiment_comparison_root_dfs = {} def _maybe_run_async(self, rubicon_func, *args, **kwargs): if isinstance(self._rubicon, AsynRubicon): return asyncio.run_coroutine_threadsafe( rubicon_func(*args, **kwargs), loop=self._rubicon.repository.filesystem.loop ).result() else: return rubicon_func(*args, **kwargs) def get_anchor_options(self, commit_hash): anchors = list(self._experiment_comparison_root_dfs[commit_hash].columns) return [{"label": a, "value": a} for a in anchors] def get_dimensions(self, commit_hash, selected_experiment_ids, hidden_columns, anchor): dimensions = [] root_df = self.get_experiment_comparison_root_df(commit_hash) if hidden_columns is not None: root_df = root_df.drop(columns=hidden_columns, errors="ignore") experiment_comparison_df = root_df.loc[selected_experiment_ids] experiment_comparison_df = experiment_comparison_df.compute().convert_dtypes() for column in experiment_comparison_df.columns: clean_column = experiment_comparison_df[column].dropna() if isinstance(experiment_comparison_df[column].dtype, pd.StringDtype): unique_values = clean_column.unique() values = clean_column.map(lambda value: np.where(unique_values == value)[0][0]) dimension = dict( label=column, ticktext=unique_values, tickvals=list(range(0, len(unique_values))), values=values, ) elif isinstance(experiment_comparison_df[column].dtype, pd.BooleanDtype): values = clean_column.astype(int) dimension = dict( label=column, ticktext=["False", "True"], tickvals=[0, 1], values=values, ) else: values = clean_column dimension = dict(label=column, values=values) if column == anchor: anchor_dimension = dimension anchor_data = values else: dimensions.append(dimension) dimensions.append(anchor_dimension) return anchor_data, dimensions def get_experiment_table_df(self, commit_hash): return self._experiment_table_dfs[commit_hash] def get_experiment_comparison_root_df(self, commit_hash): return self._experiment_comparison_root_dfs[commit_hash] def get_model_names(self, commit_hash): return list(self._experiment_table_dfs[commit_hash]["model_name"].unique()) def update_projects(self): self._projects = self._maybe_run_async(self._rubicon.projects) def update_selected_project(self, selected_project_name): self._selected_project, *_ = [p for p in self._projects if p.name == selected_project_name] self._experiment_table_dfs = {} grouped_experiment_dfs = self._maybe_run_async( self._selected_project.to_dask_df, group_by="commit_hash" ) et_commit_hash_display_length = 7 et_drop_columns = ["description", "name", "created_at"] et_type_castings = {"tags": "str"} ec_drop_columns = ["commit_hash", "model_name", "tags"] ec_index = "id" if len(grouped_experiment_dfs.items()) == 0: self._experiment_table_dfs = {} for commit_hash, df in grouped_experiment_dfs.items(): experiment_table_df = df.drop(columns=et_drop_columns) experiment_table_df = experiment_table_df.astype(et_type_castings) experiment_table_df["commit_hash"] = experiment_table_df["commit_hash"].map( lambda ch: ch[:et_commit_hash_display_length] if ch is not None else None ) self._experiment_table_dfs[commit_hash] = experiment_table_df experiment_comparison_df = experiment_table_df.set_index(ec_index) experiment_comparison_df = experiment_comparison_df.drop(columns=ec_drop_columns) self._experiment_comparison_root_dfs[commit_hash] = experiment_comparison_df @property def project_options(self): return [{"label": p.name, "value": p.name} for p in self._projects] @property
Apache License 2.0
kzhai/infvoclda
src/infvoc/nchar.py
NcharModel.logprob
python
def logprob(self, word, context): return - log(self.prob(word, context), 2)
Evaluate the (negative) log probability of this word in this context.
https://github.com/kzhai/infvoclda/blob/05a87890d613b07f7b0c2d2bb6c79aad39e2f75d/src/infvoc/nchar.py#L271-L275
import random import string import nltk from itertools import chain from math import log, pow import math; import numpy; import re; import sys import time; from nltk.probability import ConditionalProbDist, ConditionalFreqDist, MLEProbDist, LaplaceProbDist, SimpleGoodTuringProbDist from nltk.util import ngrams def _estimator(fdist, bins): return SimpleGoodTuringProbDist(fdist); class NcharModel(ModelI): def __init__(self, n, train, smoothing=1e9, maximum_length=20, minimum_length=3, char_set=string.lowercase + string.punctuation + string.digits, patch_char='#'): self._smoothing = smoothing; self._n = n self._maximum_length = maximum_length; self._minimum_length = minimum_length; self._char_set = char_set; estimator = lambda fdist, bins: nltk.probability.LidstoneProbDist(fdist, self._smoothing, len(self._char_set)+1); cfd = ConditionalFreqDist() self._ngrams = set() self._patch_char = patch_char; self._prefix = (self._patch_char,) * (n - 1) length = nltk.probability.FreqDist(); word_freq_dist = nltk.probability.FreqDist(); char_list = []; for word in train: word = word.strip().lower(); if len(word)<self._minimum_length or len(word)>self._maximum_length: continue; length.inc(len(word)); word_freq_dist.inc(word, 1); char_list.extend(self._prefix); char_list.extend([char for char in word if char in self._char_set]); self._length = nltk.probability.WittenBellProbDist(length, length.B()+1); for nchar in ngrams(char_list, n): self._ngrams.add(nchar) context = tuple(nchar[:-1]) token = nchar[-1] cfd[context].inc(token) ''' if n==3: cond = 0; for x in self._char_set: for y in self._char_set: print (x, y), context_freq_dist[(x, y)], self._context.prob((x, y)); cond += self._context.prob((x, y)); print 'cond is', cond ''' self._model = ConditionalProbDist(cfd, estimator, len(self._char_set) ** (n - 1)); ''' consonant_freq_dist = nltk.probability.FreqDist(); consonant_condition_freq_dist = nltk.probability.ConditionalFreqDist(); for word in train: #word = re.sub(r'aeiou', ' ', word); word = word[0] + re.sub('aeiouy', ' ', word[1:]); consonant_list = word.split(); #consonant_list = ['#', '#'] + consonant_list; for temp in consonant_list: consonant_freq_dist.inc(temp, 1); consonant_freq_dist.plot() ''' word_prob_dist = nltk.probability.MLEProbDist(word_freq_dist); word_model_empirical_frequency = numpy.zeros((1, self._maximum_length - self._minimum_length + 1)) + 1e-300; word_model_square = numpy.zeros((1, self._maximum_length - self._minimum_length + 1)) + 1e-300; total_outcomes = 0; for x in xrange(self._minimum_length, self._maximum_length+1): total_outcomes += len(self._char_set) ** x; for word in word_freq_dist.keys(): word_model_empirical_frequency[0, len(word)-self._minimum_length] += word_prob_dist.prob(word) * self.probability_without_length(word); word_model_square[0, len(word)-self._minimum_length] += self.probability_without_length(word) ** 2; if True: lagrangian_parameter = 2 * (1-numpy.sum(word_model_empirical_frequency / word_model_square))/numpy.sum(1.0/word_model_square) else: lagrangian_parameter = 1.; self._multinomial_length = (word_model_empirical_frequency - lagrangian_parameter / 2) / word_model_square; self._multinomial_length /= numpy.sum(self._multinomial_length); assert numpy.all(self._multinomial_length>=0), self._multinomial_length; if n > 1: self._backoff = NcharModel(n-1, train, self._smoothing, maximum_length, minimum_length, self._char_set, self._patch_char); def probability(self, word): geometric_mean=False; if len(word)<self._minimum_length or len(word)>self._maximum_length: return 0; if geometric_mean: prob = 1.0; else: prob = self._multinomial_length[0, len(word)-self._minimum_length]; word = [char for char in word]; word = tuple(word); word = self._prefix + word; for i in xrange(len(word) - 1, 1, -1): prob *= self._model[word[i - (self._n - 1):i]].prob(word[i]); if geometric_mean: prob = pow(prob, 1.0/(len(word) - len(self._prefix))); return prob; ''' # This is the old version of the code, using backoff models prob = 1; for i in xrange(len(word)-1, -1, -1): if i-(self._n-1)<0: #print word[i], "|", word[:i] prob *= self.prob(word[i], word[:i]); else: #print word[i], '|', word[i-(self._n-1):i]; prob *= self.prob(word[i], word[i-(self._n-1):i]); return prob; ''' def probability_without_length(self, word): prob = 1.0; word = [char for char in word]; word = tuple(word); word = self._prefix + word; for i in xrange(len(word) - 1, 1, -1): prob *= self._model[word[i - (self._n - 1):i]].prob(word[i]); return prob; ''' Katz Backoff probability @deprecated ''' def prob(self, charactor, context): context = [char for char in context]; context = tuple(context); context = self._prefix + context; context = context[-(self._n - 1):]; if (context + (charactor,) in self._ngrams) or (self._n == 1): return self._model[context].prob(charactor) else: return self._alpha(context) * self._backoff.prob(charactor, context[1:]) ''' # This is the original code. context = tuple(context) if context + (charactor,) in self._ngrams: return self[context].prob(charactor) elif self._n > 1: return self._alpha(context) * self._backoff.prob(charactor, context[1:]) else: raise RuntimeError("No probability mass assigned to charactor %s in " "context %s" % (charactor, ' '.join(context))) ''' def _alpha(self, tokens): return self._beta(tokens) / self._backoff._beta(tokens[1:]) def _beta(self, tokens): if len(tokens) > 0 and tokens in self: return self[tokens].discount() else: return 1
Apache License 2.0
luckydonald/pytgbot
code_generation/output/telegram_bot_api_server/generated/funcs.py
get_webhook_info
python
async def get_webhook_info( token: str = TOKEN_VALIDATION, ) -> JSONableResponse: from .....main import _get_bot bot = await _get_bot(token) result = await bot.get_webhook_info( ) data = await to_web_api(result, bot) return r_success(data.to_array())
Use this method to get current webhook status. Requires no parameters. On success, returns a WebhookInfo object. If the bot is using getUpdates, will return an object with the url field empty. https://core.telegram.org/bots/api#getwebhookinfo
https://github.com/luckydonald/pytgbot/blob/e29a0b5f8f8331bd347c8e2b8e75af19b12d1bc5/code_generation/output/telegram_bot_api_server/generated/funcs.py#L179-L196
from pytgbot.api_types.sendable.reply_markup import InlineKeyboardMarkup from pytgbot.api_types.sendable.reply_markup import ReplyKeyboardMarkup from pytgbot.api_types.sendable.reply_markup import ReplyKeyboardRemove from pytgbot.api_types.sendable.reply_markup import ForceReply from pytgbot.api_types.sendable.input_media import InputMediaDocument from pytgbot.api_types.sendable.input_media import InputMediaAudio from pytgbot.api_types.sendable.input_media import InputMediaPhoto from pytgbot.api_types.sendable.input_media import InputMediaVideo from pytgbot.api_types.sendable.input_media import InputMedia from pytgbot.api_types.receivable.stickers import MaskPosition from pytgbot.api_types.sendable.passport import PassportElementError from pytgbot.api_types.sendable.payments import ShippingOption from pytgbot.api_types.sendable.payments import LabeledPrice from pytgbot.api_types.receivable.media import MessageEntity from pytgbot.api_types.sendable.command import BotCommandScope from pytgbot.api_types.sendable.command import BotCommand from pytgbot.api_types.receivable.peer import ChatPermissions from pytgbot.api_types.sendable.inline import InlineQueryResult from pytgbot.api_types.sendable.files import InputFile from telethon.tl.functions.messages import SetTypingRequest from luckydonaldUtils.logger import logging from telethon.client.chats import _ChatAction from telethon.tl.types import TypeSendMessageAction from telethon.errors import BotMethodInvalidError from fastapi.params import Query from serializer import to_web_api, get_entity from telethon import TelegramClient from fastapi import APIRouter, HTTPException from typing import Union, List, Optional from enum import Enum from .....tools.responses import r_success, JSONableResponse from .....constants import TOKEN_VALIDATION from ..generated.models import * __author__ = 'luckydonald' logger = logging.getLogger(__name__) if __name__ == '__main__': logging.add_colored_handler(level=logging.DEBUG) routes = APIRouter() FAST_API_ISSUE_884_IS_FIXED = False if FAST_API_ISSUE_884_IS_FIXED: from pydantic import Json def parse_obj_as(_, obj, *__, **___): return obj else: class __JsonWrapper: from pydantic import Json def __getitem__(self, item): return self.Json Json = __JsonWrapper() from pydantic import parse_obj_as @routes.api_route('/{token}/getUpdates', methods=['GET', 'POST'], tags=['official']) async def get_updates( token: str = TOKEN_VALIDATION, offset: Optional[int] = Query(None, description='Identifier of the first update to be returned. Must be greater by one than the highest among the identifiers of previously received updates. By default, updates starting with the earliest unconfirmed update are returned. An update is considered confirmed as soon as getUpdates is called with an offset higher than its update_id. The negative offset can be specified to retrieve updates starting from -offset update from the end of the updates queue. All previous updates will forgotten.'), limit: Optional[int] = Query(None, description='Limits the number of updates to be retrieved. Values between 1-100 are accepted. Defaults to 100.'), timeout: Optional[int] = Query(None, description='Timeout in seconds for long polling. Defaults to 0, i.e. usual short polling. Should be positive, short polling should be used for testing purposes only.'), allowed_updates: Optional[List[str]] = Query(None, description='A JSON-serialized list of the update types you want your bot to receive. For example, specify ["message", "edited_channel_post", "callback_query"] to only receive updates of these types. See Update for a complete list of available update types. Specify an empty list to receive all update types except chat_member (default). If not specified, the previous setting will be used.Please note that this parameter doesn\'t affect updates created before the call to the getUpdates, so unwanted updates may be received for a short period of time.'), ) -> JSONableResponse: from .....main import _get_bot bot = await _get_bot(token) result = await bot.get_updates( offset=offset, limit=limit, timeout=timeout, allowed_updates=allowed_updates, ) data = await to_web_api(result, bot) return r_success(data.to_array()) @routes.api_route('/{token}/setWebhook', methods=['GET', 'POST'], tags=['official']) async def set_webhook( token: str = TOKEN_VALIDATION, url: str = Query(..., description='HTTPS url to send updates to. Use an empty string to remove webhook integration'), certificate: Optional[Json['InputFileModel']] = Query(None, description='Upload your public key certificate so that the root certificate in use can be checked. See our self-signed guide for details.'), ip_address: Optional[str] = Query(None, description='The fixed IP address which will be used to send webhook requests instead of the IP address resolved through DNS'), max_connections: Optional[int] = Query(None, description="Maximum allowed number of simultaneous HTTPS connections to the webhook for update delivery, 1-100. Defaults to 40. Use lower values to limit the load on your bot's server, and higher values to increase your bot's throughput."), allowed_updates: Optional[List[str]] = Query(None, description='A JSON-serialized list of the update types you want your bot to receive. For example, specify ["message", "edited_channel_post", "callback_query"] to only receive updates of these types. See Update for a complete list of available update types. Specify an empty list to receive all update types except chat_member (default). If not specified, the previous setting will be used.Please note that this parameter doesn\'t affect updates created before the call to the setWebhook, so unwanted updates may be received for a short period of time.'), drop_pending_updates: Optional[bool] = Query(None, description='Pass True to drop all pending updates'), ) -> JSONableResponse: certificate: Optional[InputFileModel] = parse_obj_as( Optional[InputFileModel], obj=certificate, ) from .....main import _get_bot bot = await _get_bot(token) result = await bot.set_webhook( url=url, certificate=certificate, ip_address=ip_address, max_connections=max_connections, allowed_updates=allowed_updates, drop_pending_updates=drop_pending_updates, ) data = await to_web_api(result, bot) return r_success(data.to_array()) @routes.api_route('/{token}/deleteWebhook', methods=['GET', 'POST'], tags=['official']) async def delete_webhook( token: str = TOKEN_VALIDATION, drop_pending_updates: Optional[bool] = Query(None, description='Pass True to drop all pending updates'), ) -> JSONableResponse: from .....main import _get_bot bot = await _get_bot(token) result = await bot.delete_webhook( drop_pending_updates=drop_pending_updates, ) data = await to_web_api(result, bot) return r_success(data.to_array()) @routes.api_route('/{token}/getWebhookInfo', methods=['GET', 'POST'], tags=['official'])
MIT License
oauthlib/oauthlib
oauthlib/oauth2/rfc6749/clients/service_application.py
ServiceApplicationClient.prepare_request_body
python
def prepare_request_body(self, private_key=None, subject=None, issuer=None, audience=None, expires_at=None, issued_at=None, extra_claims=None, body='', scope=None, include_client_id=False, **kwargs): import jwt key = private_key or self.private_key if not key: raise ValueError('An encryption key must be supplied to make JWT' ' token requests.') claim = { 'iss': issuer or self.issuer, 'aud': audience or self.audience, 'sub': subject or self.subject, 'exp': int(expires_at or time.time() + 3600), 'iat': int(issued_at or time.time()), } for attr in ('iss', 'aud', 'sub'): if claim[attr] is None: raise ValueError( 'Claim must include %s but none was given.' % attr) if 'not_before' in kwargs: claim['nbf'] = kwargs.pop('not_before') if 'jwt_id' in kwargs: claim['jti'] = kwargs.pop('jwt_id') claim.update(extra_claims or {}) assertion = jwt.encode(claim, key, 'RS256') assertion = to_unicode(assertion) kwargs['client_id'] = self.client_id kwargs['include_client_id'] = include_client_id scope = self.scope if scope is None else scope return prepare_token_request(self.grant_type, body=body, assertion=assertion, scope=scope, **kwargs)
Create and add a JWT assertion to the request body. :param private_key: Private key used for signing and encrypting. Must be given as a string. :param subject: (sub) The principal that is the subject of the JWT, i.e. which user is the token requested on behalf of. For example, ``[email protected]. :param issuer: (iss) The JWT MUST contain an "iss" (issuer) claim that contains a unique identifier for the entity that issued the JWT. For example, ``[email protected]``. :param audience: (aud) A value identifying the authorization server as an intended audience, e.g. ``https://provider.com/oauth2/token``. :param expires_at: A unix expiration timestamp for the JWT. Defaults to an hour from now, i.e. ``time.time() + 3600``. :param issued_at: A unix timestamp of when the JWT was created. Defaults to now, i.e. ``time.time()``. :param extra_claims: A dict of additional claims to include in the JWT. :param body: Existing request body (URL encoded string) to embed parameters into. This may contain extra paramters. Default ''. :param scope: The scope of the access request. :param include_client_id: `True` to send the `client_id` in the body of the upstream request. This is required if the client is not authenticating with the authorization server as described in `Section 3.2.1`_. False otherwise (default). :type include_client_id: Boolean :param not_before: A unix timestamp after which the JWT may be used. Not included unless provided. * :param jwt_id: A unique JWT token identifier. Not included unless provided. * :param kwargs: Extra credentials to include in the token request. Parameters marked with a `*` above are not explicit arguments in the function signature, but are specially documented arguments for items appearing in the generic `**kwargs` keyworded input. The "scope" parameter may be used, as defined in the Assertion Framework for OAuth 2.0 Client Authentication and Authorization Grants [I-D.ietf-oauth-assertions] specification, to indicate the requested scope. Authentication of the client is optional, as described in `Section 3.2.1`_ of OAuth 2.0 [RFC6749] and consequently, the "client_id" is only needed when a form of client authentication that relies on the parameter is used. The following non-normative example demonstrates an Access Token Request with a JWT as an authorization grant (with extra line breaks for display purposes only): .. code-block: http POST /token.oauth2 HTTP/1.1 Host: as.example.com Content-Type: application/x-www-form-urlencoded grant_type=urn%3Aietf%3Aparams%3Aoauth%3Agrant-type%3Ajwt-bearer &assertion=eyJhbGciOiJFUzI1NiJ9. eyJpc3Mi[...omitted for brevity...]. J9l-ZhwP[...omitted for brevity...] .. _`Section 3.2.1`: https://tools.ietf.org/html/rfc6749#section-3.2.1
https://github.com/oauthlib/oauthlib/blob/f655d73f9dcbc1f7a1475038d6703870ef99c1fb/oauthlib/oauth2/rfc6749/clients/service_application.py#L64-L189
import time from oauthlib.common import to_unicode from ..parameters import prepare_token_request from .base import Client class ServiceApplicationClient(Client): grant_type = 'urn:ietf:params:oauth:grant-type:jwt-bearer' def __init__(self, client_id, private_key=None, subject=None, issuer=None, audience=None, **kwargs): super().__init__(client_id, **kwargs) self.private_key = private_key self.subject = subject self.issuer = issuer self.audience = audience
BSD 3-Clause New or Revised License
zulip/zulip-terminal
tests/conftest.py
no_asynch
python
def no_asynch(mocker: MockerFixture) -> None: mocker.patch("zulipterminal.helper.asynch")
Make all function calls synchronous.
https://github.com/zulip/zulip-terminal/blob/fc80aa7b038176d0a0297f7674c517d6de38388a/tests/conftest.py#L35-L39
from collections import OrderedDict, defaultdict from copy import deepcopy from typing import Any, Callable, Dict, List, Optional, Set, Tuple, Union import pytest from pytest_mock import MockerFixture from urwid import Widget from zulipterminal.api_types import Message from zulipterminal.config.keys import ( ZT_TO_URWID_CMD_MAPPING, keys_for_command, primary_key_for_command, ) from zulipterminal.helper import Index, TidiedUserInfo from zulipterminal.helper import initial_index as helper_initial_index from zulipterminal.ui_tools.boxes import MessageBox from zulipterminal.ui_tools.buttons import StreamButton, TopicButton, UserButton from zulipterminal.urwid_types import urwid_Size from zulipterminal.version import ( MINIMUM_SUPPORTED_SERVER_VERSION, SUPPORTED_SERVER_VERSIONS, ) @pytest.fixture(autouse=True) def no_requests(monkeypatch: pytest.MonkeyPatch) -> None: monkeypatch.delattr("requests.sessions.Session.request") @pytest.fixture(autouse=True)
Apache License 2.0
amarvin/fantasy-football-bot
ffbot/utils.py
load
python
def load(): folder = join(".", "data") files = [f for f in listdir(folder) if isfile(join(folder, f))] latest_file = max([join(folder, f) for f in files], key=getctime) latest_filename = split(latest_file)[1] week = re.findall(r"\d+", latest_filename)[-1] week = int(week) df = pd.read_csv(latest_file) return df, week
Load latest scraped data
https://github.com/amarvin/fantasy-football-bot/blob/9600a7c973ed634830c399568b8e6b10b9a566b3/ffbot/utils.py#L25-L40
from datetime import datetime from os import listdir, makedirs from os.path import exists, getctime, isfile, join, split import re import pandas as pd def save(df, week): folder = "data" if not exists(folder): makedirs(folder) startTime = datetime.now() filename = join(folder, "{:%Y-%m-%d %H%M} week {}.csv".format(startTime, week)) df.to_csv(filename, index=False)
MIT License
beer-garden/beer-garden
src/app/beer_garden/scheduler.py
run_job
python
def run_job(job_id, request_template, **kwargs): import beer_garden.router request_template.metadata["_bg_job_id"] = job_id if "event" in kwargs and kwargs["event"] is not None: try: injection_dict = InjectionDict() build_injection_dict(injection_dict, kwargs["event"], prefix="event") try: db_job = db.query_unique(Job, id=job_id) if db_job: build_injection_dict( injection_dict, db_job.trigger, prefix="trigger" ) except Exception as ex: logger.exception(f"Could not fetch job for parameter injection: {ex}") inject_values(request_template.parameters, injection_dict) except Exception as ex: logger.exception(f"Could not inject parameters: {ex}") db_job = db.query_unique(Job, id=job_id) wait_event = threading.Event() if not db_job: logger.error(f"Could not find job {job_id} in database, job will not be run") return try: logger.debug(f"About to execute {db_job!r}") request = beer_garden.router.route( Operation( operation_type="REQUEST_CREATE", model=request_template, model_type="RequestTemplate", kwargs={"wait_event": wait_event}, ) ) timeout = db_job.timeout or None if not wait_event.wait(timeout=timeout): logger.warning(f"Execution of job {db_job} timed out.") return request = get_request(request.id) updates = {} if request.status == "ERROR": updates["inc__error_count"] = 1 logger.debug(f"{db_job!r} request completed with ERROR status") elif request.status == "SUCCESS": logger.debug(f"{db_job!r} request completed with SUCCESS status") updates["inc__success_count"] = 1 db.modify(db_job, **updates) except Exception as ex: logger.exception(f"Error executing {db_job}: {ex}") job = beer_garden.application.scheduler.get_job(job_id) if ( job and job.next_run_time is not None and getattr(job.trigger, "reschedule_on_finish", False) ): beer_garden.application.scheduler.reschedule_job(job_id, trigger=job.trigger)
Spawned by the scheduler, this will kick off a new request. This method is meant to be run in a separate process. Args: job_id: The Beer-Garden job ID that triggered this event. request_template: Request template specified by the job.
https://github.com/beer-garden/beer-garden/blob/2ea0944d7528a8127bc1b79d16d8fdc668f1c8e4/src/app/beer_garden/scheduler.py#L432-L515
import json import logging import threading from datetime import datetime, timedelta from operator import attrgetter, methodcaller from os.path import isdir from typing import Dict, List, Optional import mongoengine import pymongo import pymongo.database from apscheduler.schedulers.background import BackgroundScheduler from apscheduler.triggers.interval import IntervalTrigger as APInterval from brewtils.models import Event, Events, FileTrigger, Job, Operation from brewtils.schema_parser import SchemaParser from bson import ObjectId, json_util from pathtools.patterns import match_any_paths from pymongo.client_session import ClientSession from pymongo.collection import Collection from pymongo.cursor import Cursor from pymongo.results import InsertManyResult from watchdog.events import ( EVENT_TYPE_CREATED, EVENT_TYPE_DELETED, EVENT_TYPE_MODIFIED, EVENT_TYPE_MOVED, PatternMatchingEventHandler, ) from watchdog.observers.polling import PollingObserver as Observer from watchdog.utils import has_attribute, unicode_paths import beer_garden import beer_garden.config as config import beer_garden.db.api as db from beer_garden.db.mongo.api import from_brewtils from beer_garden.db.mongo.jobstore import construct_trigger from beer_garden.events import publish_event from beer_garden.requests import get_request logger = logging.getLogger(__name__) class InjectionDict(dict): def __missing__(self, key): return "{" + key + "}" def build_injection_dict(dictionary, obj, prefix="", separator="/"): for item in dir(obj): if not callable(getattr(obj, item)): if prefix != "": dictionary[prefix + separator + item] = getattr(obj, item) else: dictionary[item] = getattr(obj, item) def inject_values(request, dictionary): if isinstance(request, dict): for k, v in request.items(): try: request[k] = inject_values(v, dictionary) except (ReferenceError, IndexError): pass return request elif isinstance(request, str): try: return request.format_map(dictionary) except (AttributeError, KeyError, ValueError): return request elif isinstance(request, list): for i, item in enumerate(request): try: request[i] = inject_values(item, dictionary) except IndexError: pass return request else: return request class PatternMatchingEventHandlerWithArgs(PatternMatchingEventHandler): _args = [] _kwargs = {} _coalesce = False _src_path_timing = {} _min_delta_time = timedelta(microseconds=500_000) def __init__(self, args=None, kwargs=None, coalesce=False, **thru): self._args = args if args is not None else [] self._kwargs = kwargs if kwargs is not None else {} self._coalesce = coalesce super().__init__(**thru) def dispatch(self, event): current_time = datetime.now() if self.ignore_directories and event.is_directory: return paths = [] if has_attribute(event, "dest_path"): paths.append(unicode_paths.decode(event.dest_path)) if event.src_path: paths.append(unicode_paths.decode(event.src_path)) if match_any_paths( paths, included_patterns=self.patterns, excluded_patterns=self.ignore_patterns, case_sensitive=self.case_sensitive, ): _method_map = { EVENT_TYPE_MODIFIED: self.on_modified, EVENT_TYPE_MOVED: self.on_moved, EVENT_TYPE_CREATED: self.on_created, EVENT_TYPE_DELETED: self.on_deleted, } event_type = event.event_type event_tuple = (event.src_path, event_type) if not self._coalesce: self.on_any_event(*self._args, event=event, **self._kwargs) _method_map[event_type](*self._args, event=event, **self._kwargs) elif event_tuple in self._src_path_timing: if ( current_time - self._src_path_timing[event_tuple] > self._min_delta_time ): self._src_path_timing[event_tuple] = datetime.now() self.on_any_event(*self._args, event=event, **self._kwargs) _method_map[event_type](*self._args, event=event, **self._kwargs) else: self._src_path_timing[event_tuple] = datetime.now() self.on_any_event(*self._args, event=event, **self._kwargs) _method_map[event_type](*self._args, event=event, **self._kwargs) def on_created(self, *args, event=None, **kwargs): super().on_created(event) def on_any_event(self, *args, event=None, **kwargs): super().on_any_event(event) def on_deleted(self, *args, event=None, **kwargs): super().on_deleted(event) def on_modified(self, *args, event=None, **kwargs): super().on_modified(event) def on_moved(self, *args, event=None, **kwargs): super().on_moved(event) def pass_through(class_objects=None): def wrapper(my_class): for obj in class_objects: scheduler = getattr(my_class, obj, None) if scheduler is not None: method_list = [ func for func in dir(scheduler) if callable(getattr(scheduler, func)) ] for name in method_list: if name[0] != "_" and not hasattr(my_class, name): method = getattr(scheduler, name) setattr(my_class, name, method) return my_class return wrapper @pass_through(class_objects=["_sync_scheduler", "_async_scheduler"]) class MixedScheduler(object): _sync_scheduler = BackgroundScheduler() _async_scheduler = Observer() _async_jobs = {} _async_paused_jobs = set() running = False def _process_watches(self, jobs): for job in jobs: if isinstance(job.trigger, FileTrigger): self.add_job( run_job, trigger=job.trigger, coalesce=job.coalesce, kwargs={"job_id": job.id, "request_template": job.request_template}, ) def __init__(self, interval_config=None): self._sync_scheduler.configure(**interval_config) def initialize_from_db(self): all_jobs = db.query(Job, filter_params={"trigger_type": "file"}) self._process_watches(all_jobs) def start(self): self._sync_scheduler.start() self._async_scheduler.start() self.running = True def shutdown(self, **kwargs): self.stop(**kwargs) def stop(self, **kwargs): self._sync_scheduler.shutdown(**kwargs) self._async_scheduler.stop() self.running = False def reschedule_job(self, job_id, **kwargs): if job_id not in self._async_jobs: self._sync_scheduler.reschedule_job(job_id, **kwargs) def get_job(self, job_id): if job_id in self._async_jobs: return db.query_unique(Job, id=job_id) else: return self._sync_scheduler.get_job(job_id) def pause_job(self, job_id, **kwargs): if job_id in self._async_jobs: if job_id not in self._async_paused_jobs: (event_handler, watch) = self._async_jobs.get(job_id) self._async_scheduler.remove_handler_for_watch(event_handler, watch) self._async_paused_jobs.add(job_id) else: self._sync_scheduler.pause_job(job_id, **kwargs) def resume_job(self, job_id, **kwargs): if job_id in self._async_jobs: if job_id in self._async_paused_jobs: (event_handler, watch) = self._async_jobs.get(job_id) self._async_scheduler.add_handler_for_watch(event_handler, watch) self._async_paused_jobs.remove(job_id) else: self._sync_scheduler.resume_job(job_id, **kwargs) def remove_job(self, job_id, **kwargs): if job_id in self._async_jobs: self._async_jobs.pop(job_id) if job_id in self._async_paused_jobs: self._async_paused_jobs.remove(job_id) db.delete(db.query_unique(Job, id=job_id)) else: self._sync_scheduler.remove_job(job_id, **kwargs) def _add_triggers(self, handler, triggers, func): for name in triggers.keys(): if hasattr(handler, name) and triggers.get(name): setattr(handler, name, func) return handler def add_job(self, func, trigger=None, **kwargs): if trigger is None: logger.exception("Scheduler called with None-type trigger.") return if not isinstance(trigger, FileTrigger): self._sync_scheduler.add_job( func, trigger=construct_trigger(kwargs.pop("trigger_type"), trigger), **kwargs, ) else: if not isdir(trigger.path): logger.exception(f"User passed an invalid trigger path {trigger.path}") return args = [ kwargs.get("kwargs").get("job_id"), kwargs.get("kwargs").get("request_template"), ] event_handler = PatternMatchingEventHandlerWithArgs( args=args, coalesce=kwargs.get("coalesce", False), patterns=trigger.pattern, ) event_handler = self._add_triggers(event_handler, trigger.callbacks, func) if trigger.path is not None and event_handler is not None: watch = self._async_scheduler.schedule( event_handler, trigger.path, recursive=trigger.recursive ) self._async_jobs[args[0]] = (event_handler, watch) class IntervalTrigger(APInterval): def __init__(self, *args, **kwargs): self.reschedule_on_finish = kwargs.pop("reschedule_on_finish", False) super(IntervalTrigger, self).__init__(*args, **kwargs)
MIT License
apache/allura
Allura/allura/model/auth.py
User.send_user_mention_notification
python
def send_user_mention_notification(self, mentioned_by, artifact): tmpl = g.jinja2_env.get_template('allura:templates/mail/usermentions_email.md') subject = '[%s:%s] Your name was mentioned' % ( c.project.shortname, c.app.config.options.mount_point) item_url = artifact.url() if artifact.type_s == 'Post': item_url = artifact.url_paginated() tmpl_context = { 'site_domain': config['domain'], 'base_url': config['base_url'], 'user': c.user, 'artifact_link': h.absurl(item_url), 'artifact_linktext': artifact.link_text(), 'mentioned_by': mentioned_by } allura.tasks.mail_tasks.sendsimplemail.post( toaddr=self.get_pref('email_address'), fromaddr=g.noreply, reply_to=g.noreply, message_id=h.gen_message_id(), subject=subject, text=tmpl.render(tmpl_context))
Send user mention notification to {self} user.
https://github.com/apache/allura/blob/04f14f15a9a9364e18c61f68acdaa241a470186b/Allura/allura/model/auth.py#L480-L504
from __future__ import unicode_literals from __future__ import absolute_import import logging import calendar import typing import six from markupsafe import Markup from six.moves.urllib.parse import urlparse from email import header from hashlib import sha256 from datetime import timedelta, datetime, time import os import re from pytz import timezone import pymongo from pymongo.errors import DuplicateKeyError from bson import ObjectId from tg import config from tg import tmpl_context as c, app_globals as g from tg import request from ming import schema as S from ming import Field from ming.orm import session, state from ming.orm import FieldProperty, RelationProperty, ForeignIdProperty from ming.orm.declarative import MappedClass from ming.orm.ormsession import ThreadLocalORMSession from ming.utils import LazyProperty import allura.tasks.mail_tasks from allura.lib import helpers as h from allura.lib import plugin from allura.lib import utils from allura.lib.decorators import memoize from allura.lib.search import SearchIndexable from .session import main_orm_session, main_explicitflush_orm_session from .session import project_orm_session from .timeline import ActivityNode, ActivityObject if typing.TYPE_CHECKING: from ming.odm.mapper import Query log = logging.getLogger(__name__) class AlluraUserProperty(ForeignIdProperty): def __init__(self, **kwargs): super(AlluraUserProperty, self).__init__('User', allow_none=True, **kwargs) class EmailAddress(MappedClass): re_format = re.compile(r'^.*\s+<(.*)>\s*$') class __mongometa__: name = str('email_address') session = main_orm_session indexes = ['nonce', ] unique_indexes = [('email', 'claimed_by_user_id'), ] query: 'Query[EmailAddress]' _id = FieldProperty(S.ObjectId) email = FieldProperty(str) claimed_by_user_id = FieldProperty(S.ObjectId, if_missing=None) confirmed = FieldProperty(bool, if_missing=False) nonce = FieldProperty(str) valid_address = FieldProperty(bool, if_missing=None) valid_details = FieldProperty(S.Anything, if_missing=None) valid_check_date = FieldProperty(datetime, if_missing=None) @classmethod def get(cls, **kw): if kw.get('email'): email = cls.canonical(kw['email']) if email is not None: kw['email'] = email else: return None return cls.query.get(**kw) @classmethod def find(cls, q=None): if q: if q.get('email'): email = cls.canonical(q['email']) if email is not None: q['email'] = email else: return utils.EmptyCursor() return cls.query.find(q) return cls.query.find() def claimed_by_user(self, include_pending=False): q = {'_id': self.claimed_by_user_id, 'disabled': False, 'pending': False} if include_pending: q.pop('pending', None) return User.query.get(**q) @classmethod def create(cls, addr): addr = cls.canonical(addr) if addr is not None: return cls(email=addr) @classmethod def canonical(cls, addr): mo = cls.re_format.match(addr) if mo: addr = mo.group(1) if '@' in addr: try: user, domain = addr.strip().split('@') return '%s@%s' % (user, domain.lower()) except ValueError: return addr.strip() else: return None def send_claim_attempt(self): confirmed_email = self.find(dict(email=self.email, confirmed=True)).all() if confirmed_email: log.info('Sending claim attempt email to %s', self.email) text = g.jinja2_env.get_template('allura:templates/mail/claimed_existing_email.txt').render(dict( email=self, user=confirmed_email[0].claimed_by_user(), config=config )) allura.tasks.mail_tasks.send_system_mail_to_user(self.email, '%s - Email address claim attempt' % config['site_name'], text) def set_nonce_hash(self): self.nonce = sha256(os.urandom(10)).hexdigest() return True def send_verification_link(self): self.set_nonce_hash() log.info('Sending verification link to %s', self.email) text = ''' To verify the email address %s belongs to the user %s, please visit the following URL: %s ''' % (self.email, self.claimed_by_user(include_pending=True).username, h.absurl('/auth/verify_addr?a={}'.format(h.urlquote(self.nonce))), ) log.info('Verification email:\n%s', text) allura.tasks.mail_tasks.sendsimplemail.post( fromaddr=g.noreply, reply_to=g.noreply, toaddr=self.email, subject='%s - Email address verification' % config['site_name'], message_id=h.gen_message_id(), text=text) class AuthGlobals(MappedClass): class __mongometa__: name = str('auth_globals') session = main_orm_session query: 'Query[AuthGlobals]' _id = FieldProperty(int) next_uid = FieldProperty(int, if_missing=10000) @classmethod def upsert(cls): r = cls.query.get() if r is not None: return r try: r = cls(_id=0) session(r).flush(r) return r except pymongo.errors.DuplicateKeyError: session(r).flush(r) r = cls.query.get() return r @classmethod def get_next_uid(cls): cls.upsert() g = cls.query.find_and_modify( query={}, update={'$inc': {'next_uid': 1}}, new=True) return g.next_uid class FieldPropertyDisplayName(FieldProperty): def __get__(self, instance, cls=None): if instance is None: return self try: display_name = instance._cache_display_name except AttributeError: display_name = instance._cache_display_name = instance.get_pref('display_name') return display_name class User(MappedClass, ActivityNode, ActivityObject, SearchIndexable): SALT_LEN = 8 class __mongometa__: name = str('user') session = main_orm_session indexes = ['tool_data.AuthPasswordReset.hash'] unique_indexes = ['username'] custom_indexes = [ dict(fields=('tool_data.phone_verification.number_hash',), sparse=True), ] query: 'Query[User]' type_s = 'User' _id = FieldProperty(S.ObjectId) sfx_userid = FieldProperty(S.Deprecated) username = FieldProperty(str) email_addresses = FieldProperty([str]) password = FieldProperty(str) last_password_updated = FieldProperty(datetime) projects = FieldProperty(S.Deprecated) tool_preferences = FieldProperty(S.Deprecated) tool_data = FieldProperty({str: {str: None}}) disabled = FieldProperty(bool, if_missing=False) pending = FieldProperty(bool, if_missing=False) preferences = FieldProperty(dict( results_per_page=int, email_address=str, email_format=str, disable_user_messages=bool, mention_notifications=bool, multifactor=bool, )) display_name: str = FieldPropertyDisplayName(str) sex = FieldProperty( S.OneOf('Male', 'Female', 'Other', 'Unknown', if_missing='Unknown')) birthdate = FieldProperty(S.DateTime, if_missing=None) availability = FieldProperty([dict( week_day=str, start_time=dict(h=int, m=int), end_time=dict(h=int, m=int))]) localization = FieldProperty(dict(city=str, country=str)) timezone = FieldProperty(str) sent_user_message_times = FieldProperty([S.DateTime]) inactiveperiod = FieldProperty([dict( start_date=S.DateTime, end_date=S.DateTime)]) socialnetworks = FieldProperty([dict(socialnetwork=str, accounturl=str)]) telnumbers = FieldProperty([str]) skypeaccount = FieldProperty(str) webpages = FieldProperty([str]) skills = FieldProperty([dict( category_id=S.ObjectId, level=S.OneOf('low', 'high', 'medium'), comment=str)]) stats_id = FieldProperty(S.ObjectId, if_missing=None) last_access = FieldProperty(dict( login_date=S.DateTime, login_ip=str, login_ua=str, session_date=S.DateTime, session_ip=str, session_ua=str)) def __repr__(self): return ('<User username={s.username!r} display_name={s.display_name!r} _id={s._id!r} ' 'disabled={s.disabled!r} pending={s.pending!r}>'.format(s=self)) def index(self): provider = plugin.AuthenticationProvider.get(None) localization = '%s/%s' % ( self.get_pref('localization')['country'], self.get_pref('localization')['city']) socialnetworks = ' '.join(['%s: %s' % (n['socialnetwork'], n['accounturl']) for n in self.get_pref('socialnetworks')]) fields = dict( id=self.index_id(), title='User %s' % self.username, url_s=self.url(), type_s=self.type_s, username_s=self.username, email_addresses_t=' '.join([e for e in self.email_addresses if e]), last_password_updated_dt=self.last_password_updated, disabled_b=self.disabled, pending_b=self.pending, results_per_page_i=self.get_pref('results_per_page'), email_address_s=self.get_pref('email_address'), email_format_s=self.get_pref('email_format'), disable_user_messages_b=self.get_pref('disable_user_messages'), display_name_t=self.get_pref('display_name'), sex_s=self.get_pref('sex'), birthdate_dt=self.get_pref('birthdate'), localization_s=localization, timezone_s=self.get_pref('timezone'), socialnetworks_t=socialnetworks, telnumbers_t=' '.join([t for t in self.get_pref('telnumbers') if t]), skypeaccount_s=self.get_pref('skypeaccount'), webpages_t=' '.join([p for p in self.get_pref('webpages') if p]), skills_t=' '.join([s['skill'].fullpath for s in self.get_skills() if s.get('skill')]), last_access_login_date_dt=self.last_access['login_date'], last_access_login_ip_s=self.last_access['login_ip'], last_access_login_ua_t=self.last_access['login_ua'], last_access_session_date_dt=self.last_access['session_date'], last_access_session_ip_s=self.last_access['session_ip'], last_access_session_ua_t=self.last_access['session_ua'], ) return dict(provider.index_user(self), **fields) def track_login(self, req): user_ip = utils.ip_address(req) user_agent = req.headers.get('User-Agent') self.last_access['login_date'] = datetime.utcnow() self.last_access['login_ip'] = user_ip self.last_access['login_ua'] = user_agent session(self).flush(self) def track_active(self, req): user_ip = utils.ip_address(req) user_agent = req.headers.get('User-Agent') now = datetime.utcnow() last_date = self.last_access['session_date'] date_changed = last_date is None or last_date.date() != now.date() ip_changed = user_ip != self.last_access['session_ip'] ua_changed = user_agent != self.last_access['session_ua'] if date_changed or ip_changed or ua_changed: self.last_access['session_date'] = datetime.utcnow() self.last_access['session_ip'] = user_ip self.last_access['session_ua'] = user_agent session(self).flush(self) def add_login_detail(self, detail): try: session(detail).flush(detail) except DuplicateKeyError: session(detail).expunge(detail) def backfill_login_details(self, auth_provider): msg_regex = re.compile(r'.*^({})'.format('|'.join([re.escape(line_prefix) for line_prefix in auth_provider.trusted_auditlog_line_prefixes])), re.MULTILINE | re.DOTALL) for auditlog in AuditLog.for_user(self, message=msg_regex): if not msg_regex.search(auditlog.message): continue login_detail = auth_provider.login_details_from_auditlog(auditlog) if login_detail: self.add_login_detail(login_detail) def send_password_reset_email(self, email_address=None, subject_tmpl='{site_name} Password recovery'): if email_address is None: email_address = self.get_pref('email_address') reset_url = self.make_password_reset_url() log.info('Sending password recovery link to %s', email_address) subject = subject_tmpl.format(site_name=config['site_name']) text = g.jinja2_env.get_template('allura:templates/mail/forgot_password.txt').render(dict( user=self, config=config, reset_url=reset_url, )) allura.tasks.mail_tasks.send_system_mail_to_user(email_address, subject, text) def make_password_reset_url(self): hash = h.nonce(42) self.set_tool_data('AuthPasswordReset', hash=hash, hash_expiry=datetime.utcnow() + timedelta(seconds=int(config.get('auth.recovery_hash_expiry_period', 600)))) reset_url = h.absurl('/auth/forgotten_password/{}'.format(hash)) return reset_url def can_send_user_message(self): now = datetime.utcnow() time_interval = timedelta(seconds=g.user_message_time_interval) self.sent_user_message_times = [t for t in self.sent_user_message_times if t + time_interval > now] return len(self.sent_user_message_times) < g.user_message_max_messages def time_to_next_user_message(self): if self.can_send_user_message(): return 0 return (self.sent_user_message_times[0] + timedelta(seconds=g.user_message_time_interval) - datetime.utcnow()) def send_user_message(self, user, subject, message, cc): tmpl = g.jinja2_env.get_template( 'allura:ext/user_profile/templates/message.html') tmpl_context = { 'message_text': message, 'site_name': config['site_name'], 'base_url': config['base_url'], 'user': c.user, } allura.tasks.mail_tasks.sendsimplemail.post( toaddr=user.get_pref('email_address'), fromaddr=self.get_pref('email_address'), reply_to=self.get_pref('email_address'), message_id=h.gen_message_id(), subject=subject, text=tmpl.render(tmpl_context), cc=cc) self.sent_user_message_times.append(datetime.utcnow())
Apache License 2.0
steemit/hivemind
hive/server/condenser_api/methods.py
get_replies_by_last_update
python
async def get_replies_by_last_update(context, start_author: str = None, start_permlink: str = '', limit: int = 20, truncate_body: int = 0): assert start_author, '`start_author` cannot be blank' ids = await cursor.pids_by_replies_to_account( context['db'], valid_account(start_author), valid_permlink(start_permlink, allow_empty=True), valid_limit(limit, 100)) return await load_posts(context['db'], ids, truncate_body=truncate_body)
Get all replies made to any of author's posts.
https://github.com/steemit/hivemind/blob/d99b852e1ad321aeb67eaec5fb03f7bfb32c75d6/hive/server/condenser_api/methods.py#L310-L319
from functools import wraps import hive.server.condenser_api.cursor as cursor from hive.server.condenser_api.objects import load_posts, load_posts_reblogs from hive.server.common.helpers import ( ApiError, return_error_info, valid_account, valid_permlink, valid_tag, valid_offset, valid_limit, valid_follow_type) @return_error_info async def get_account_votes(context, account): raise ApiError("get_account_votes is no longer supported, for details see " "https://steemit.com/steemit/@steemitdev/additional-public-api-change") def _legacy_follower(follower, following, follow_type): return dict(follower=follower, following=following, what=[follow_type]) def _legacy_follower_with_reputation (follower, reputation, following, follow_type): what = ['',''] if follow_type & 1 != 0: what[0] = 'blog' if follow_type & 2 != 0: what[1] = 'ignore' return dict(follower=follower, reputation=reputation, following=following, what=what) @return_error_info async def get_followers(context, account: str, start: str, follow_type: str = None, limit: int = None, **kwargs): if not follow_type and 'type' in kwargs: follow_type = kwargs['type'] if not follow_type: follow_type = 'blog' followers = await cursor.get_followers( context['db'], valid_account(account), valid_account(start, allow_empty=True), valid_follow_type(follow_type), valid_limit(limit, 1000)) return [_legacy_follower_with_reputation(row['name'], row['reputation'],account,row['state']) for row in followers] @return_error_info async def get_followers_by_page(context, account: str, page: int, page_size: int = None, follow_type: str = None, **kwargs): if not follow_type and 'type' in kwargs: follow_type = kwargs['type'] if not follow_type: follow_type = 'blog' followers = await cursor.get_followers_by_page( context['db'], valid_account(account), valid_offset(page), valid_limit(page_size, 100), valid_follow_type(follow_type)) return [_legacy_follower_with_reputation(row['name'], row['reputation'],account,row['state']) for row in followers] @return_error_info async def get_following(context, account: str, start: str, follow_type: str = None, limit: int = None, **kwargs): if not follow_type and 'type' in kwargs: follow_type = kwargs['type'] if not follow_type: follow_type = 'blog' following = await cursor.get_following( context['db'], valid_account(account), valid_account(start, allow_empty=True), valid_follow_type(follow_type), valid_limit(limit, 1000)) return [_legacy_follower_with_reputation(account,row['reputation'],row['name'],row['state']) for row in following] @return_error_info async def get_following_by_page(context, account: str, page: int, page_size: int = None, follow_type: str = None, **kwargs): if not follow_type and 'type' in kwargs: follow_type = kwargs['type'] if not follow_type: follow_type = 'blog' following = await cursor.get_following_by_page( context['db'], valid_account(account), valid_offset(page), valid_limit(page_size, 100), valid_follow_type(follow_type)) return [_legacy_follower_with_reputation(account,row['reputation'],row['name'],row['state']) for row in following] @return_error_info async def get_follow_count(context, account: str): count = await cursor.get_follow_counts( context['db'], valid_account(account)) return dict(account=account, following_count=count['following'], follower_count=count['followers']) @return_error_info async def get_reblogged_by(context, author: str, permlink: str): return await cursor.get_reblogged_by( context['db'], valid_account(author), valid_permlink(permlink)) @return_error_info async def get_account_reputations(context, account_lower_bound: str = None, limit: int = None): return {'reputations': await cursor.get_account_reputations( context['db'], account_lower_bound, valid_limit(limit, 1000))} @return_error_info async def get_content(context, author: str, permlink: str): db = context['db'] valid_account(author) valid_permlink(permlink) post_id = await cursor.get_post_id(db, author, permlink) if not post_id: return {'id': 0, 'author': '', 'permlink': ''} posts = await load_posts(db, [post_id]) assert posts, 'post was not found in cache' return posts[0] @return_error_info async def get_content_replies(context, author: str, permlink: str): db = context['db'] valid_account(author) valid_permlink(permlink) parent_id = await cursor.get_post_id(db, author, permlink) if parent_id: child_ids = await cursor.get_child_ids(db, parent_id) if child_ids: return await load_posts(db, child_ids) return [] def nested_query_compat(function): @wraps(function) def wrapper(*args, **kwargs): if args and not kwargs and len(args) == 2 and isinstance(args[1], dict): return function(args[0], **args[1]) return function(*args, **kwargs) return wrapper @return_error_info @nested_query_compat async def get_discussions_by_trending(context, start_author: str = '', start_permlink: str = '', limit: int = 20, tag: str = None, truncate_body: int = 0, filter_tags: list = None): assert not filter_tags, 'filter_tags not supported' ids = await cursor.pids_by_query( context['db'], 'trending', valid_account(start_author, allow_empty=True), valid_permlink(start_permlink, allow_empty=True), valid_limit(limit, 100), valid_tag(tag, allow_empty=True)) return await load_posts(context['db'], ids, truncate_body=truncate_body) @return_error_info @nested_query_compat async def get_discussions_by_hot(context, start_author: str = '', start_permlink: str = '', limit: int = 20, tag: str = None, truncate_body: int = 0, filter_tags: list = None): assert not filter_tags, 'filter_tags not supported' ids = await cursor.pids_by_query( context['db'], 'hot', valid_account(start_author, allow_empty=True), valid_permlink(start_permlink, allow_empty=True), valid_limit(limit, 100), valid_tag(tag, allow_empty=True)) return await load_posts(context['db'], ids, truncate_body=truncate_body) @return_error_info @nested_query_compat async def get_discussions_by_promoted(context, start_author: str = '', start_permlink: str = '', limit: int = 20, tag: str = None, truncate_body: int = 0, filter_tags: list = None): assert not filter_tags, 'filter_tags not supported' ids = await cursor.pids_by_query( context['db'], 'promoted', valid_account(start_author, allow_empty=True), valid_permlink(start_permlink, allow_empty=True), valid_limit(limit, 100), valid_tag(tag, allow_empty=True)) return await load_posts(context['db'], ids, truncate_body=truncate_body) @return_error_info @nested_query_compat async def get_discussions_by_created(context, start_author: str = '', start_permlink: str = '', limit: int = 20, tag: str = None, truncate_body: int = 0, filter_tags: list = None): assert not filter_tags, 'filter_tags not supported' ids = await cursor.pids_by_query( context['db'], 'created', valid_account(start_author, allow_empty=True), valid_permlink(start_permlink, allow_empty=True), valid_limit(limit, 100), valid_tag(tag, allow_empty=True)) return await load_posts(context['db'], ids, truncate_body=truncate_body) @return_error_info @nested_query_compat async def get_discussions_by_blog(context, tag: str = None, start_author: str = '', start_permlink: str = '', limit: int = 20, truncate_body: int = 0, filter_tags: list = None): assert tag, '`tag` cannot be blank' assert not filter_tags, 'filter_tags not supported' ids = await cursor.pids_by_blog( context['db'], valid_account(tag), valid_account(start_author, allow_empty=True), valid_permlink(start_permlink, allow_empty=True), valid_limit(limit, 100)) return await load_posts(context['db'], ids, truncate_body=truncate_body) @return_error_info @nested_query_compat async def get_discussions_by_feed(context, tag: str = None, start_author: str = '', start_permlink: str = '', limit: int = 20, truncate_body: int = 0, filter_tags: list = None): assert tag, '`tag` cannot be blank' assert not filter_tags, 'filter_tags not supported' res = await cursor.pids_by_feed_with_reblog( context['db'], valid_account(tag), valid_account(start_author, allow_empty=True), valid_permlink(start_permlink, allow_empty=True), valid_limit(limit, 100)) return await load_posts_reblogs(context['db'], res, truncate_body=truncate_body) @return_error_info @nested_query_compat async def get_discussions_by_comments(context, start_author: str = None, start_permlink: str = '', limit: int = 20, truncate_body: int = 0, filter_tags: list = None): assert start_author, '`start_author` cannot be blank' assert not filter_tags, 'filter_tags not supported' ids = await cursor.pids_by_account_comments( context['db'], valid_account(start_author), valid_permlink(start_permlink, allow_empty=True), valid_limit(limit, 100)) return await load_posts(context['db'], ids, truncate_body=truncate_body) @return_error_info @nested_query_compat
MIT License
pyopenapi/pyswagger
pyswagger/utils.py
_diff_
python
def _diff_(src, dst, ret=None, jp=None, exclude=[], include=[]): def _dict_(src, dst, ret, jp): ss, sd = set(src.keys()), set(dst.keys()) si, se = set(include or []), set(exclude or []) ss, sd = (ss & si, sd & si) if si else (ss, sd) ss, sd = (ss - se, sd - se) if se else (ss, sd) for k in sd - ss: ret.append((jp_compose(k, base=jp), None, None,)) for k in ss - sd: ret.append((jp_compose(k, base=jp), None, None,)) for k in ss & sd: _diff_(src[k], dst[k], ret, jp_compose(k, base=jp), exclude, include) def _list_(src, dst, ret, jp): if len(src) < len(dst): ret.append((jp, len(src), len(dst),)) elif len(src) > len(dst): ret.append((jp, len(src), len(dst),)) else: if len(src) == 0: return def r(x, y): if type(y) != type(x): raise ValueError('different type: {0}, {1}'.format(type(y).__name__, type(x).__name__)) return x ts = type(functools.reduce(r, src)) td = type(functools.reduce(r, dst)) while True: if issubclass(ts, six.string_types) and issubclass(td, six.string_types): break if issubclass(ts, six.integer_types) and issubclass(td, six.integer_types): break if ts == td: break ret.append((jp, str(ts), str(td),)) return if ts != dict: ss, sd = sorted(src), sorted(dst) else: ss, sd = src, dst for idx, (s, d) in enumerate(zip(src, dst)): _diff_(s, d, ret, jp_compose(str(idx), base=jp), exclude, include) ret = [] if ret == None else ret jp = '' if jp == None else jp if isinstance(src, dict): if not isinstance(dst, dict): ret.append((jp, type(src).__name__, type(dst).__name__,)) else: _dict_(src, dst, ret, jp) elif isinstance(src, list): if not isinstance(dst, list): ret.append((jp, type(src).__name__, type(dst).__name__,)) else: _list_(src, dst, ret, jp) elif src != dst: ret.append((jp, src, dst,)) return ret
compare 2 dict/list, return a list containing json-pointer indicating what's different, and what's diff exactly. - list length diff: (jp, length of src, length of dst) - dict key diff: (jp, None, None) - when src is dict or list, and dst is not: (jp, type(src), type(dst)) - other: (jp, src, dst)
https://github.com/pyopenapi/pyswagger/blob/333c4ca08e758cd2194943d9904a3eda3fe43977/pyswagger/utils.py#L482-L565
from __future__ import absolute_import from .consts import private from .errs import CycleDetectionError import six import imp import sys import datetime import re import os import operator import functools import collections def scope_compose(scope, name, sep=private.SCOPE_SEPARATOR): if name == None: new_scope = scope else: new_scope = scope if scope else name if scope and name: new_scope = scope + sep + name return new_scope def scope_split(scope, sep=private.SCOPE_SEPARATOR): return scope.split(sep) if scope else [None] class ScopeDict(dict): def __init__(self, *a, **k): self.__sep = private.SCOPE_SEPARATOR super(ScopeDict, self).__init__(*a, **k) @property def sep(self): raise TypeError('sep property is write-only') @sep.setter def sep(self, sep): self.__sep = sep def __getitem__(self, *keys): k = six.moves.reduce(lambda k1, k2: scope_compose(k1, k2, sep=self.__sep), keys[0]) if isinstance(keys[0], tuple) else keys[0] try: return super(ScopeDict, self).__getitem__(k) except KeyError as e: ret = [] for ik in self.keys(): if ik.endswith(k): ret.append(ik) if len(ret) == 1: return super(ScopeDict, self).__getitem__(ret[0]) elif len(ret) > 1: last_k = k.rsplit(self.__sep, 1)[-1] matched = [r for r in ret if r.rsplit(self.__sep, 1)[-1] == last_k] if len(matched) == 1: return super(ScopeDict, self).__getitem__(matched[0]) raise ValueError('Multiple occurrence of key: {0}'.format(k)) raise e class CycleGuard(object): def __init__(self): self.__visited = [] def update(self, obj): if obj in self.__visited: raise CycleDetectionError('Cycle detected: {0}'.format(getattr(obj, '$ref', None))) self.__visited.append(obj) class FixedTZ(datetime.tzinfo): def __init__(self, h=0, m=0): self.__offset = datetime.timedelta(hours=h, minutes=m) def utcoffset(self, dt): return self.__offset + self.dst(dt) def tzname(self, dt): return "UTC" def dst(self, dt): return datetime.timedelta(0) _iso8601_fmt = re.compile(''.join([ '(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})', 'T', '(?P<hour>\d{2}):(?P<minute>\d{2})(:(?P<second>\d{1,2})(\.(?P<microsecond>\d{1,6}))?)?', '(?P<tz>Z|[+-]\d{2}:\d{2})?' ])) _iso8601_fmt_date = re.compile('(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})') def from_iso8601(s): m = _iso8601_fmt.match(s) if not m: m = _iso8601_fmt_date.match(s) if not m: raise ValueError('not a valid iso 8601 format string:[{0}]'.format(s)) g = m.groupdict() def _default_zero(key): v = g.get(key, None) return int(v) if v else 0 def _default_none(key): v = g.get(key, None) return int(v) if v else None year = _default_zero('year') month = _default_zero('month') day = _default_zero('day') hour = _default_none('hour') minute = _default_none('minute') second = _default_none('second') microsecond = g.get('microsecond', None) if microsecond is not None: microsecond = int(microsecond + '0' * (6 - len(microsecond))) tz_s = g.get('tz') if not (year and month and day): raise ValueError('missing y-m-d: [{0}]'.format(s)) if hour == None and minute == None and second == None: return datetime.datetime(year, month, day) tz = None if tz_s: if hour is None and minute is None: raise ValueError('missing h:m when tzinfo is provided: [{0}]'.format(s)) negtive = hh = mm = 0 if tz_s != 'Z': negtive = -1 if tz_s[0] == '-' else 1 hh = int(tz_s[1:3]) mm = int(tz_s[4:6]) if len(tz_s) > 5 else 0 tz = FixedTZ(h=hh*negtive, m=mm*negtive) return datetime.datetime( year=year, month=month, day=day, hour=hour or 0, minute=minute or 0, second=second or 0, microsecond=microsecond or 0, tzinfo=tz ) def import_string(name): mod = fp = None try: return sys.modules[name] except KeyError: pass try: fp, pathname, desc = imp.find_module(name) mod = imp.load_module(name, fp, pathname, desc) except ImportError: mod = None finally: if fp: fp.close() return mod def jp_compose(s, base=None): if s == None: return base ss = [s] if isinstance(s, six.string_types) else s ss = [s.replace('~', '~0').replace('/', '~1') for s in ss] if base: ss.insert(0, base) return '/'.join(ss) def jp_split(s): if s == '' or s == None: return [] def _decode(s): s = s.replace('~1', '/') return s.replace('~0', '~') return [_decode(ss) for ss in s.split('/')] def jr_split(s): p = six.moves.urllib.parse.urlparse(s) return ( normalize_url(six.moves.urllib.parse.urlunparse(p[:5]+('',))), '#'+p.fragment if p.fragment else '#' ) def deref(obj, guard=None): cur, guard = obj, guard or CycleGuard() guard.update(cur) while cur and getattr(cur, 'ref_obj', None) != None: cur = cur.ref_obj guard.update(cur) return cur def final(obj): return obj.final if getattr(obj, 'final', None) else obj def get_dict_as_tuple(d): for k, v in six.iteritems(d): return k, v return None def nv_tuple_list_replace(l, v): _found = False for i, x in enumerate(l): if x[0] == v[0]: l[i] = v _found = True if not _found: l.append(v) def path2url(p): if sys.version_info.major >= 3 and sys.version_info.minor >= 4: import pathlib return pathlib.Path(p).as_uri() else: return six.moves.urllib.parse.urljoin( 'file:', six.moves.urllib.request.pathname2url(p) ) _windows_path_prefix = re.compile(r'(^[A-Za-z]:\\)') def normalize_url(url): if not url: return url matched = _windows_path_prefix.match(url) if matched: return path2url(url) p = six.moves.urllib.parse.urlparse(url) if p.scheme == '': if p.netloc == '' and p.path != '': url = path2url(os.path.abspath(url)) else: raise ValueError('url should be a http-url or file path -- ' + url) return url def url_dirname(url): p = six.moves.urllib.parse.urlparse(url) for e in [private.FILE_EXT_JSON, private.FILE_EXT_YAML]: if p.path.endswith(e): return six.moves.urllib.parse.urlunparse( p[:2]+ (os.path.dirname(p.path),)+ p[3:] ) return url def url_join(url, path): p = six.moves.urllib.parse.urlparse(url) t = None if p.path and p.path[-1] == '/': if path and path[0] == '/': path = path[1:] t = ''.join([p.path, path]) else: t = ('' if path and path[0] == '/' else '/').join([p.path, path]) return six.moves.urllib.parse.urlunparse( p[:2]+ (t,)+ p[3:] ) def normalize_jr(jr, url=None): if jr == None: return jr idx = jr.find('#') path, jp = (jr[:idx], jr[idx+1:]) if idx != -1 else (jr, None) if len(path) > 0: p = six.moves.urllib.parse.urlparse(path) if p.scheme == '' and url: p = six.moves.urllib.parse.urlparse(url) path = six.moves.urllib.parse.urlunparse(p[:2]+('/'.join([os.path.dirname(p.path), path]),)+p[3:]) path = derelativise_url(path) else: path = url if path: return ''.join([path, '#', jp]) if jp else path else: return '#' + jp def _fullmatch(regex, chunk): m = re.match(regex, chunk) if m and m.span()[1] == len(chunk): return m def derelativise_url(url): parsed = six.moves.urllib.parse.urlparse(url) newpath=[] for chunk in parsed.path[1:].split('/'): if chunk == '.': continue elif chunk == '..': newpath=newpath[:-1] continue elif _fullmatch(r'\.{3,}', chunk) is not None: newpath=newpath[:-1] continue newpath += [chunk] return six.moves.urllib.parse.urlunparse(parsed[:2]+('/'+('/'.join(newpath)),)+parsed[3:]) def get_swagger_version(obj): if isinstance(obj, dict): if 'swaggerVersion' in obj: return obj['swaggerVersion'] elif 'swagger' in obj: return obj['swagger'] return None else: return obj.swaggerVersion if hasattr(obj, 'swaggerVersion') else obj.swagger def walk(start, ofn, cyc=None): ctx, stk = {}, [start] cyc = [] if cyc == None else cyc while len(stk): top = stk[-1] if top not in ctx: ctx.update({top:list(ofn(top))}) if len(ctx[top]): n = ctx[top][0] if n in stk: nc = stk[stk.index(n):] ni = nc.index(min(nc)) nc = nc[ni:] + nc[:ni] + [min(nc)] if nc not in cyc: cyc.append(nc) ctx[top].pop(0) else: stk.append(n) else: ctx.pop(top) stk.pop() if len(stk): ctx[stk[-1]].remove(top) return cyc
MIT License
cerenaut/sparse-unsupervised-capsules
models/layers/layers.py
_squash
python
def _squash(input_tensor): with tf.name_scope('norm_non_linearity'): norm = tf.norm(input_tensor, axis=2, keepdims=True) norm_squared = norm * norm return (input_tensor / norm) * (norm_squared / (1 + norm_squared))
Applies norm nonlinearity (squash) to a capsule layer. Args: input_tensor: Input tensor. Shape is [batch, num_channels, num_atoms] for a fully connected capsule layer or [batch, num_channels, num_atoms, height, width] for a convolutional capsule layer. Returns: A tensor with same shape as input (rank 3) for output of this layer.
https://github.com/cerenaut/sparse-unsupervised-capsules/blob/5b98fbb84408d566ae4ef0878008931a65832386/models/layers/layers.py#L36-L51
from __future__ import absolute_import from __future__ import division from __future__ import print_function import math import numpy as np import tensorflow as tf from models.layers import variables
Apache License 2.0
nwunderly/starlette-discord
starlette_discord/client.py
DiscordOAuthSession.cached_user
python
def cached_user(self): return self._cached_user
:class:`dict`: The session's cached user, if an `identify()` request has previously been made.
https://github.com/nwunderly/starlette-discord/blob/c1c779698c25d3295828bc3a38a54c513b10a95b/starlette_discord/client.py#L76-L78
from starlette.responses import RedirectResponse from oauthlib.common import generate_token from .oauth import OAuth2Session from .models import User, Guild, Connection DISCORD_URL = 'https://discord.com' API_URL = DISCORD_URL + '/api/v8' class DiscordOAuthSession(OAuth2Session): def __init__(self, client_id, client_secret, scope, redirect_uri, *, code, token): if (not (code or token)) or (code and token): raise ValueError("Either 'code' or 'token' parameter must be provided, but not both.") elif token: if not isinstance(token, dict): raise TypeError("Parameter 'token' must be an instance of dict with at least the 'access_token' key.'") if 'access_token' not in token: raise ValueError("Parameter 'token' requires 'access_token' key.") elif not token.get('token_type'): token['token_type'] = 'Bearer' self._discord_auth_code = code self._discord_client_secret = client_secret self._discord_token = token self._cached_user = None self._cached_guilds = None self._cached_connections = None super().__init__( client_id=client_id, scope=scope, redirect_uri=redirect_uri, token=token ) @property def token(self): return super().token @token.setter def token(self, value): self._client.token = value self._client.populate_token_attributes(value) @property
MIT License
westpa/westpa
src/oldtools/aframe/iter_range.py
IterRangeMixin.n_iter_blocks
python
def n_iter_blocks(self): npoints = self.last_iter - self.first_iter + 1 if npoints % self.iter_step == 0: return npoints // self.iter_step else: return npoints // self.iter_step + 1
Return the number of blocks of iterations (as returned by ``iter_block_iter``) selected by --first/--last/--step.
https://github.com/westpa/westpa/blob/cda177c5dea2cee571d71c4b04fcc625dc5f689c/src/oldtools/aframe/iter_range.py#L85-L91
import logging log = logging.getLogger(__name__) import numpy import westpa from oldtools.aframe import AnalysisMixin, ArgumentError class IterRangeMixin(AnalysisMixin): def __init__(self): super(IterRangeMixin,self).__init__() self.first_iter = None self.last_iter = None self.iter_step = 1 include_args = self.include_args.setdefault('IterRangeMixin',{}) include_args.setdefault('first_iter', True) include_args.setdefault('last_iter', True) include_args.setdefault('iter_step',True) def add_args(self, parser, upcall = True): if upcall: try: upfunc = super(IterRangeMixin,self).add_args except AttributeError: pass else: upfunc(parser) group = parser.add_argument_group('analysis range') if self.include_args['IterRangeMixin']['first_iter']: group.add_argument('--start', '--begin', '--first', dest='first_iter', type=int, metavar='N_ITER', default=1, help='''Begin analysis at iteration N_ITER (default: %(default)d).''') if self.include_args['IterRangeMixin']['last_iter']: group.add_argument('--stop', '--end', '--last', dest='last_iter', type=int, metavar='N_ITER', help='''Conclude analysis with N_ITER, inclusive (default: last completed iteration).''') if self.include_args['IterRangeMixin']['iter_step']: group.add_argument('--step', dest='iter_step', type=int, metavar='STEP', help='''Analyze/report in blocks of STEP iterations.''') def process_args(self, args, upcall = True): if self.include_args['IterRangeMixin']['first_iter']: self.first_iter = args.first_iter or 1 if self.include_args['IterRangeMixin']['last_iter']: self.last_iter = args.last_iter if self.include_args['IterRangeMixin']['iter_step']: self.iter_step = args.iter_step or 1 if upcall: try: upfunc = super(IterRangeMixin,self).process_args except AttributeError: pass else: upfunc(args) def check_iter_range(self): assert hasattr(self, 'data_manager') and self.data_manager is not None self.first_iter = int(max(self.first_iter, 1)) if self.last_iter is None or self.last_iter > self.data_manager.current_iteration - 1: self.last_iter = int(self.data_manager.current_iteration - 1) if self.first_iter == self.last_iter: raise ArgumentError('first and last iterations are the same') westpa.rc.pstatus('Processing iterations from {self.first_iter:d} to {self.last_iter:d}, inclusive (step size {self.iter_step:d})'.format(self=self)) def iter_block_iter(self): for blkfirst in range(self.first_iter, self.last_iter+1, self.iter_step): yield(blkfirst, min(self.last_iter, blkfirst+self.iter_step-1)+1)
MIT License
baiwenjia/ukbb_cardiac
common/image_utils.py
data_augmenter
python
def data_augmenter(image, label, shift, rotate, scale, intensity, flip): image2 = np.zeros(image.shape, dtype=np.float32) label2 = np.zeros(label.shape, dtype=np.int32) for i in range(image.shape[0]): shift_val = [np.clip(np.random.normal(), -3, 3) * shift, np.clip(np.random.normal(), -3, 3) * shift] rotate_val = np.clip(np.random.normal(), -3, 3) * rotate scale_val = 1 + np.clip(np.random.normal(), -3, 3) * scale intensity_val = 1 + np.clip(np.random.normal(), -3, 3) * intensity row, col = image.shape[1:3] M = cv2.getRotationMatrix2D((row / 2, col / 2), rotate_val, 1.0 / scale_val) M[:, 2] += shift_val for c in range(image.shape[3]): image2[i, :, :, c] = ndimage.interpolation.affine_transform(image[i, :, :, c], M[:, :2], M[:, 2], order=1) label2[i, :, :] = ndimage.interpolation.affine_transform(label[i, :, :], M[:, :2], M[:, 2], order=0) image2[i] *= intensity_val if flip: if np.random.uniform() >= 0.5: image2[i] = image2[i, ::-1, :, :] label2[i] = label2[i, ::-1, :] else: image2[i] = image2[i, :, ::-1, :] label2[i] = label2[i, :, ::-1] return image2, label2
Online data augmentation Perform affine transformation on image and label, which are 4D tensor of shape (N, H, W, C) and 3D tensor of shape (N, H, W).
https://github.com/baiwenjia/ukbb_cardiac/blob/2b6d6371be9a666a41627926324030c31897f877/common/image_utils.py#L80-L120
import cv2 import numpy as np import nibabel as nib import tensorflow as tf from scipy import ndimage import scipy.ndimage.measurements as measure def tf_categorical_accuracy(pred, truth): return tf.reduce_mean(tf.cast(tf.equal(pred, truth), dtype=tf.float32)) def tf_categorical_dice(pred, truth, k): A = tf.cast(tf.equal(pred, k), dtype=tf.float32) B = tf.cast(tf.equal(truth, k), dtype=tf.float32) return 2 * tf.reduce_sum(tf.multiply(A, B)) / (tf.reduce_sum(A) + tf.reduce_sum(B)) def crop_image(image, cx, cy, size): X, Y = image.shape[:2] r = int(size / 2) x1, x2 = cx - r, cx + r y1, y2 = cy - r, cy + r x1_, x2_ = max(x1, 0), min(x2, X) y1_, y2_ = max(y1, 0), min(y2, Y) crop = image[x1_: x2_, y1_: y2_] if crop.ndim == 3: crop = np.pad(crop, ((x1_ - x1, x2 - x2_), (y1_ - y1, y2 - y2_), (0, 0)), 'constant') elif crop.ndim == 4: crop = np.pad(crop, ((x1_ - x1, x2 - x2_), (y1_ - y1, y2 - y2_), (0, 0), (0, 0)), 'constant') else: print('Error: unsupported dimension, crop.ndim = {0}.'.format(crop.ndim)) exit(0) return crop def normalise_intensity(image, thres_roi=10.0): val_l = np.percentile(image, thres_roi) roi = (image >= val_l) mu, sigma = np.mean(image[roi]), np.std(image[roi]) eps = 1e-6 image2 = (image - mu) / (sigma + eps) return image2 def rescale_intensity(image, thres=(1.0, 99.0)): val_l, val_h = np.percentile(image, thres) image2 = image image2[image < val_l] = val_l image2[image > val_h] = val_h image2 = (image2.astype(np.float32) - val_l) / (val_h - val_l) return image2
Apache License 2.0
fangshi1991/zipline_chstock
zipline/protocol.py
SIDData.datetime
python
def datetime(self): return self.dt
Provides an alias from data['foo'].datetime -> data['foo'].dt `datetime` was previously provided by adding a seperate `datetime` member of the SIDData object via a generator that wrapped the incoming data feed and added the field to each equity event. This alias is intended to be temporary, to provide backwards compatibility with existing algorithms, but should be considered deprecated, and may be removed in the future.
https://github.com/fangshi1991/zipline_chstock/blob/7911642780fa57f92e1705b9c0acaeb837b3d98f/zipline/protocol.py#L313-L325
from copy import copy from six import iteritems, iterkeys import pandas as pd import numpy as np from .utils.enum import enum from .utils.math_utils import nanstd, nanmean, nansum from zipline.utils.algo_instance import get_algo_instance from zipline.utils.serialization_utils import ( VERSION_LABEL ) DATASOURCE_TYPE = enum( 'AS_TRADED_EQUITY', 'MERGER', 'SPLIT', 'DIVIDEND', 'TRADE', 'TRANSACTION', 'ORDER', 'EMPTY', 'DONE', 'CUSTOM', 'BENCHMARK', 'COMMISSION', 'CLOSE_POSITION' ) DIVIDEND_FIELDS = [ 'declared_date', 'ex_date', 'gross_amount', 'net_amount', 'pay_date', 'payment_sid', 'ratio', 'sid', ] DIVIDEND_PAYMENT_FIELDS = [ 'id', 'payment_sid', 'cash_amount', 'share_count', ] def dividend_payment(data=None): return pd.Series( data=data, name=data['id'] if data is not None else None, index=DIVIDEND_PAYMENT_FIELDS, dtype=object, ) class Event(object): def __init__(self, initial_values=None): if initial_values: self.__dict__ = initial_values def __getitem__(self, name): return getattr(self, name) def __setitem__(self, name, value): setattr(self, name, value) def __delitem__(self, name): delattr(self, name) def keys(self): return self.__dict__.keys() def __eq__(self, other): return hasattr(other, '__dict__') and self.__dict__ == other.__dict__ def __contains__(self, name): return name in self.__dict__ def __repr__(self): return "Event({0})".format(self.__dict__) def to_series(self, index=None): return pd.Series(self.__dict__, index=index) class Order(Event): pass class Portfolio(object): def __init__(self): self.capital_used = 0.0 self.starting_cash = 0.0 self.portfolio_value = 0.0 self.pnl = 0.0 self.returns = 0.0 self.cash = 0.0 self.positions = Positions() self.start_date = None self.positions_value = 0.0 def __getitem__(self, key): return self.__dict__[key] def __repr__(self): return "Portfolio({0})".format(self.__dict__) def __getstate__(self): state_dict = copy(self.__dict__) state_dict['positions'] = dict(self.positions) STATE_VERSION = 1 state_dict[VERSION_LABEL] = STATE_VERSION return state_dict def __setstate__(self, state): OLDEST_SUPPORTED_STATE = 1 version = state.pop(VERSION_LABEL) if version < OLDEST_SUPPORTED_STATE: raise BaseException("Portfolio saved state is too old.") self.positions = Positions() self.positions.update(state.pop('positions')) self.__dict__.update(state) class Account(object): def __init__(self): self.settled_cash = 0.0 self.accrued_interest = 0.0 self.buying_power = float('inf') self.equity_with_loan = 0.0 self.total_positions_value = 0.0 self.regt_equity = 0.0 self.regt_margin = float('inf') self.initial_margin_requirement = 0.0 self.maintenance_margin_requirement = 0.0 self.available_funds = 0.0 self.excess_liquidity = 0.0 self.cushion = 0.0 self.day_trades_remaining = float('inf') self.leverage = 0.0 self.net_leverage = 0.0 self.net_liquidation = 0.0 def __getitem__(self, key): return self.__dict__[key] def __repr__(self): return "Account({0})".format(self.__dict__) def __getstate__(self): state_dict = copy(self.__dict__) STATE_VERSION = 1 state_dict[VERSION_LABEL] = STATE_VERSION return state_dict def __setstate__(self, state): OLDEST_SUPPORTED_STATE = 1 version = state.pop(VERSION_LABEL) if version < OLDEST_SUPPORTED_STATE: raise BaseException("Account saved state is too old.") self.__dict__.update(state) class Position(object): def __init__(self, sid): self.sid = sid self.amount = 0 self.cost_basis = 0.0 self.last_sale_price = 0.0 def __getitem__(self, key): return self.__dict__[key] def __repr__(self): return "Position({0})".format(self.__dict__) def __getstate__(self): state_dict = copy(self.__dict__) STATE_VERSION = 1 state_dict[VERSION_LABEL] = STATE_VERSION return state_dict def __setstate__(self, state): OLDEST_SUPPORTED_STATE = 1 version = state.pop(VERSION_LABEL) if version < OLDEST_SUPPORTED_STATE: raise BaseException("Protocol Position saved state is too old.") self.__dict__.update(state) class Positions(dict): def __missing__(self, key): pos = Position(key) self[key] = pos return pos class SIDData(object): _history_cache_dt = None _history_cache = {} _returns_cache_dt = None _returns_cache = None _minute_bar_cache_dt = None _minute_bar_cache = {} def __init__(self, sid, initial_values=None): self._sid = sid self._freqstr = None self._initial_len = len(self.__dict__) + 1 if initial_values: self.__dict__.update(initial_values) @property
Apache License 2.0
jest-community/jest-pytest
src/__tests__/integration/home-assistant/homeassistant/components/device_tracker/linksys_ap.py
LinksysAPDeviceScanner.get_device_name
python
def get_device_name(self, device): return None
Return the name (if known) of the device. Linksys does not provide an API to get a name for a device, so we just return None
https://github.com/jest-community/jest-pytest/blob/b197b0b31e3ca5c411202d97583cbd2d2b0b92e9/src/__tests__/integration/home-assistant/homeassistant/components/device_tracker/linksys_ap.py#L65-L72
import base64 import logging import requests import voluptuous as vol import homeassistant.helpers.config_validation as cv from homeassistant.components.device_tracker import ( DOMAIN, PLATFORM_SCHEMA, DeviceScanner) from homeassistant.const import ( CONF_HOST, CONF_PASSWORD, CONF_USERNAME, CONF_VERIFY_SSL) INTERFACES = 2 DEFAULT_TIMEOUT = 10 REQUIREMENTS = ['beautifulsoup4==4.6.0'] _LOGGER = logging.getLogger(__name__) PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend({ vol.Required(CONF_HOST): cv.string, vol.Required(CONF_PASSWORD): cv.string, vol.Required(CONF_USERNAME): cv.string, vol.Optional(CONF_VERIFY_SSL, default=True): cv.boolean, }) def get_scanner(hass, config): try: return LinksysAPDeviceScanner(config[DOMAIN]) except ConnectionError: return None class LinksysAPDeviceScanner(DeviceScanner): def __init__(self, config): self.host = config[CONF_HOST] self.username = config[CONF_USERNAME] self.password = config[CONF_PASSWORD] self.verify_ssl = config[CONF_VERIFY_SSL] self.last_results = [] response = self._make_request() if not response.status_code == 200: raise ConnectionError("Cannot connect to Linksys Access Point") def scan_devices(self): self._update_info() return self.last_results
MIT License
pytransitions/transitions
transitions/core.py
State.add_callback
python
def add_callback(self, trigger, func): callback_list = getattr(self, 'on_' + trigger) callback_list.append(func)
Add a new enter or exit callback. Args: trigger (str): The type of triggering event. Must be one of 'enter' or 'exit'. func (str): The name of the callback function.
https://github.com/pytransitions/transitions/blob/1893a822763f9266a9691f0b285e1d77ff755082/transitions/core.py#L137-L145
try: from builtins import object except ImportError: pass try: from enum import Enum, EnumMeta except ImportError: class Enum: pass class EnumMeta: pass import inspect import itertools import logging from collections import OrderedDict, defaultdict, deque from functools import partial from six import string_types import warnings _LOGGER = logging.getLogger(__name__) _LOGGER.addHandler(logging.NullHandler()) warnings.filterwarnings(action='default', message=r".*transitions version.*", category=DeprecationWarning) def listify(obj): if obj is None: return [] try: return obj if isinstance(obj, (list, tuple, EnumMeta)) else [obj] except ReferenceError: return [obj] def _prep_ordered_arg(desired_length, arguments=None): arguments = listify(arguments) if arguments is not None else [None] if len(arguments) != desired_length and len(arguments) != 1: raise ValueError("Argument length must be either 1 or the same length as " "the number of transitions.") if len(arguments) == 1: return arguments * desired_length return arguments class State(object): dynamic_methods = ['on_enter', 'on_exit'] def __init__(self, name, on_enter=None, on_exit=None, ignore_invalid_triggers=None): self._name = name self.ignore_invalid_triggers = ignore_invalid_triggers self.on_enter = listify(on_enter) if on_enter else [] self.on_exit = listify(on_exit) if on_exit else [] @property def name(self): if isinstance(self._name, Enum): return self._name.name else: return self._name @property def value(self): return self._name def enter(self, event_data): _LOGGER.debug("%sEntering state %s. Processing callbacks...", event_data.machine.name, self.name) event_data.machine.callbacks(self.on_enter, event_data) _LOGGER.info("%sFinished processing state %s enter callbacks.", event_data.machine.name, self.name) def exit(self, event_data): _LOGGER.debug("%sExiting state %s. Processing callbacks...", event_data.machine.name, self.name) event_data.machine.callbacks(self.on_exit, event_data) _LOGGER.info("%sFinished processing state %s exit callbacks.", event_data.machine.name, self.name)
MIT License
cisco/mindmeld
examples/custom_action/example_server/swagger_server/models/params.py
Params.time_zone
python
def time_zone(self, time_zone: str): self._time_zone = time_zone
Sets the time_zone of this Params. The time zone of the request :param time_zone: The time_zone of this Params. :type time_zone: str
https://github.com/cisco/mindmeld/blob/d3a0606b5eaa92733dd12674438d45de4b124c63/examples/custom_action/example_server/swagger_server/models/params.py#L163-L172
from __future__ import absolute_import from typing import List from .base_model_ import Model from .. import util class Params(Model): def __init__( self, target_dialogue_state: str = None, dynamic_resource: object = None, allowed_intents: List[str] = None, time_zone: str = None, language: str = None, locale: str = None, timestamp: int = None, ): self.swagger_types = { "target_dialogue_state": str, "dynamic_resource": object, "allowed_intents": List[str], "time_zone": str, "language": str, "locale": str, "timestamp": int, } self.attribute_map = { "target_dialogue_state": "target_dialogue_state", "dynamic_resource": "dynamic_resource", "allowed_intents": "allowed_intents", "time_zone": "time_zone", "language": "language", "locale": "locale", "timestamp": "timestamp", } self._target_dialogue_state = target_dialogue_state self._dynamic_resource = dynamic_resource self._allowed_intents = allowed_intents self._time_zone = time_zone self._language = language self._locale = locale self._timestamp = timestamp @classmethod def from_dict(cls, dikt) -> "Params": return util.deserialize_model(dikt, cls) @property def target_dialogue_state(self) -> str: return self._target_dialogue_state @target_dialogue_state.setter def target_dialogue_state(self, target_dialogue_state: str): self._target_dialogue_state = target_dialogue_state @property def dynamic_resource(self) -> object: return self._dynamic_resource @dynamic_resource.setter def dynamic_resource(self, dynamic_resource: object): self._dynamic_resource = dynamic_resource @property def allowed_intents(self) -> List[str]: return self._allowed_intents @allowed_intents.setter def allowed_intents(self, allowed_intents: List[str]): self._allowed_intents = allowed_intents @property def time_zone(self) -> str: return self._time_zone @time_zone.setter
Apache License 2.0
touwastar/galaxy_plugin_bethesda
betty/aiohttp/client.py
ClientSession.version
python
def version(self) -> Tuple[int, int]: return self._version
The session HTTP protocol version.
https://github.com/touwastar/galaxy_plugin_bethesda/blob/d505f8286248cc623cd5ee9b1e0cd3bfa840c590/betty/aiohttp/client.py#L922-L924
import asyncio import base64 import hashlib import json import os import sys import traceback import warnings from types import SimpleNamespace, TracebackType from typing import ( Any, Coroutine, Generator, Generic, Iterable, List, Mapping, Optional, Set, Tuple, Type, TypeVar, Union, ) import attr from multidict import CIMultiDict, MultiDict, MultiDictProxy, istr from yarl import URL from . import hdrs, http, payload from .abc import AbstractCookieJar from .client_exceptions import ( ClientConnectionError, ClientConnectorCertificateError, ClientConnectorError, ClientConnectorSSLError, ClientError, ClientHttpProxyError, ClientOSError, ClientPayloadError, ClientProxyConnectionError, ClientResponseError, ClientSSLError, ContentTypeError, InvalidURL, ServerConnectionError, ServerDisconnectedError, ServerFingerprintMismatch, ServerTimeoutError, TooManyRedirects, WSServerHandshakeError, ) from .client_reqrep import ( ClientRequest, ClientResponse, Fingerprint, RequestInfo, _merge_ssl_params, ) from .client_ws import ClientWebSocketResponse from .connector import BaseConnector, TCPConnector, UnixConnector from .cookiejar import CookieJar from .helpers import ( DEBUG, PY_36, BasicAuth, CeilTimeout, TimeoutHandle, get_running_loop, proxies_from_env, sentinel, strip_auth_from_url, ) from .http import WS_KEY, HttpVersion, WebSocketReader, WebSocketWriter from .http_websocket import ( WSHandshakeError, WSMessage, ws_ext_gen, ws_ext_parse, ) from .streams import FlowControlDataQueue from .tracing import Trace, TraceConfig from .typedefs import JSONEncoder, LooseCookies, LooseHeaders, StrOrURL __all__ = ( 'ClientConnectionError', 'ClientConnectorCertificateError', 'ClientConnectorError', 'ClientConnectorSSLError', 'ClientError', 'ClientHttpProxyError', 'ClientOSError', 'ClientPayloadError', 'ClientProxyConnectionError', 'ClientResponseError', 'ClientSSLError', 'ContentTypeError', 'InvalidURL', 'ServerConnectionError', 'ServerDisconnectedError', 'ServerFingerprintMismatch', 'ServerTimeoutError', 'TooManyRedirects', 'WSServerHandshakeError', 'ClientRequest', 'ClientResponse', 'Fingerprint', 'RequestInfo', 'BaseConnector', 'TCPConnector', 'UnixConnector', 'ClientWebSocketResponse', 'ClientSession', 'ClientTimeout', 'request') try: from ssl import SSLContext except ImportError: SSLContext = object @attr.s(frozen=True, slots=True) class ClientTimeout: total = attr.ib(type=Optional[float], default=None) connect = attr.ib(type=Optional[float], default=None) sock_read = attr.ib(type=Optional[float], default=None) sock_connect = attr.ib(type=Optional[float], default=None) DEFAULT_TIMEOUT = ClientTimeout(total=5*60) _RetType = TypeVar('_RetType') class ClientSession: ATTRS = frozenset([ '_source_traceback', '_connector', 'requote_redirect_url', '_loop', '_cookie_jar', '_connector_owner', '_default_auth', '_version', '_json_serialize', '_requote_redirect_url', '_timeout', '_raise_for_status', '_auto_decompress', '_trust_env', '_default_headers', '_skip_auto_headers', '_request_class', '_response_class', '_ws_response_class', '_trace_configs']) _source_traceback = None _connector = None def __init__(self, *, connector: Optional[BaseConnector]=None, loop: Optional[asyncio.AbstractEventLoop]=None, cookies: Optional[LooseCookies]=None, headers: Optional[LooseHeaders]=None, skip_auto_headers: Optional[Iterable[str]]=None, auth: Optional[BasicAuth]=None, json_serialize: JSONEncoder=json.dumps, request_class: Type[ClientRequest]=ClientRequest, response_class: Type[ClientResponse]=ClientResponse, ws_response_class: Type[ClientWebSocketResponse]=ClientWebSocketResponse, version: HttpVersion=http.HttpVersion11, cookie_jar: Optional[AbstractCookieJar]=None, connector_owner: bool=True, raise_for_status: bool=False, read_timeout: Union[float, object]=sentinel, conn_timeout: Optional[float]=None, timeout: Union[object, ClientTimeout]=sentinel, auto_decompress: bool=True, trust_env: bool=False, requote_redirect_url: bool=True, trace_configs: Optional[List[TraceConfig]]=None) -> None: if loop is None: if connector is not None: loop = connector._loop loop = get_running_loop(loop) if connector is None: connector = TCPConnector(loop=loop) if connector._loop is not loop: raise RuntimeError( "Session and connector has to use same event loop") self._loop = loop if loop.get_debug(): self._source_traceback = traceback.extract_stack(sys._getframe(1)) if cookie_jar is None: cookie_jar = CookieJar(loop=loop) self._cookie_jar = cookie_jar if cookies is not None: self._cookie_jar.update_cookies(cookies) self._connector = connector self._connector_owner = connector_owner self._default_auth = auth self._version = version self._json_serialize = json_serialize if timeout is sentinel: self._timeout = DEFAULT_TIMEOUT if read_timeout is not sentinel: warnings.warn("read_timeout is deprecated, " "use timeout argument instead", DeprecationWarning, stacklevel=2) self._timeout = attr.evolve(self._timeout, total=read_timeout) if conn_timeout is not None: self._timeout = attr.evolve(self._timeout, connect=conn_timeout) warnings.warn("conn_timeout is deprecated, " "use timeout argument instead", DeprecationWarning, stacklevel=2) else: self._timeout = timeout if read_timeout is not sentinel: raise ValueError("read_timeout and timeout parameters " "conflict, please setup " "timeout.read") if conn_timeout is not None: raise ValueError("conn_timeout and timeout parameters " "conflict, please setup " "timeout.connect") self._raise_for_status = raise_for_status self._auto_decompress = auto_decompress self._trust_env = trust_env self._requote_redirect_url = requote_redirect_url if headers: headers = CIMultiDict(headers) else: headers = CIMultiDict() self._default_headers = headers if skip_auto_headers is not None: self._skip_auto_headers = frozenset([istr(i) for i in skip_auto_headers]) else: self._skip_auto_headers = frozenset() self._request_class = request_class self._response_class = response_class self._ws_response_class = ws_response_class self._trace_configs = trace_configs or [] for trace_config in self._trace_configs: trace_config.freeze() def __init_subclass__(cls: Type['ClientSession']) -> None: warnings.warn("Inheritance class {} from ClientSession " "is discouraged".format(cls.__name__), DeprecationWarning, stacklevel=2) if DEBUG: def __setattr__(self, name: str, val: Any) -> None: if name not in self.ATTRS: warnings.warn("Setting custom ClientSession.{} attribute " "is discouraged".format(name), DeprecationWarning, stacklevel=2) super().__setattr__(name, val) def __del__(self, _warnings: Any=warnings) -> None: if not self.closed: if PY_36: kwargs = {'source': self} else: kwargs = {} _warnings.warn("Unclosed client session {!r}".format(self), ResourceWarning, **kwargs) context = {'client_session': self, 'message': 'Unclosed client session'} if self._source_traceback is not None: context['source_traceback'] = self._source_traceback self._loop.call_exception_handler(context) def request(self, method: str, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager': return _RequestContextManager(self._request(method, url, **kwargs)) async def _request( self, method: str, str_or_url: StrOrURL, *, params: Optional[Mapping[str, str]]=None, data: Any=None, json: Any=None, cookies: Optional[LooseCookies]=None, headers: LooseHeaders=None, skip_auto_headers: Optional[Iterable[str]]=None, auth: Optional[BasicAuth]=None, allow_redirects: bool=True, max_redirects: int=10, compress: Optional[str]=None, chunked: Optional[bool]=None, expect100: bool=False, raise_for_status: Optional[bool]=None, read_until_eof: bool=True, proxy: Optional[StrOrURL]=None, proxy_auth: Optional[BasicAuth]=None, timeout: Union[ClientTimeout, object]=sentinel, verify_ssl: Optional[bool]=None, fingerprint: Optional[bytes]=None, ssl_context: Optional[SSLContext]=None, ssl: Optional[Union[SSLContext, bool, Fingerprint]]=None, proxy_headers: Optional[LooseHeaders]=None, trace_request_ctx: Optional[SimpleNamespace]=None ) -> ClientResponse: if self.closed: raise RuntimeError('Session is closed') ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) if data is not None and json is not None: raise ValueError( 'data and json parameters can not be used at the same time') elif json is not None: data = payload.JsonPayload(json, dumps=self._json_serialize) if not isinstance(chunked, bool) and chunked is not None: warnings.warn( 'Chunk size is deprecated #1615', DeprecationWarning) redirects = 0 history = [] version = self._version headers = self._prepare_headers(headers) proxy_headers = self._prepare_headers(proxy_headers) try: url = URL(str_or_url) except ValueError: raise InvalidURL(str_or_url) skip_headers = set(self._skip_auto_headers) if skip_auto_headers is not None: for i in skip_auto_headers: skip_headers.add(istr(i)) if proxy is not None: try: proxy = URL(proxy) except ValueError: raise InvalidURL(proxy) if timeout is sentinel: real_timeout = self._timeout else: if not isinstance(timeout, ClientTimeout): real_timeout = ClientTimeout(total=timeout) else: real_timeout = timeout tm = TimeoutHandle(self._loop, real_timeout.total) handle = tm.start() traces = [ Trace( self, trace_config, trace_config.trace_config_ctx( trace_request_ctx=trace_request_ctx) ) for trace_config in self._trace_configs ] for trace in traces: await trace.send_request_start( method, url, headers ) timer = tm.timer() try: with timer: while True: url, auth_from_url = strip_auth_from_url(url) if auth and auth_from_url: raise ValueError("Cannot combine AUTH argument with " "credentials encoded in URL") if auth is None: auth = auth_from_url if auth is None: auth = self._default_auth if (headers is not None and auth is not None and hdrs.AUTHORIZATION in headers): raise ValueError("Cannot combine AUTHORIZATION header " "with AUTH argument or credentials " "encoded in URL") session_cookies = self._cookie_jar.filter_cookies(url) if cookies is not None: tmp_cookie_jar = CookieJar() tmp_cookie_jar.update_cookies(cookies) req_cookies = tmp_cookie_jar.filter_cookies(url) if req_cookies: session_cookies.load(req_cookies) cookies = session_cookies if proxy is not None: proxy = URL(proxy) elif self._trust_env: for scheme, proxy_info in proxies_from_env().items(): if scheme == url.scheme: proxy = proxy_info.proxy proxy_auth = proxy_info.proxy_auth break req = self._request_class( method, url, params=params, headers=headers, skip_auto_headers=skip_headers, data=data, cookies=cookies, auth=auth, version=version, compress=compress, chunked=chunked, expect100=expect100, loop=self._loop, response_class=self._response_class, proxy=proxy, proxy_auth=proxy_auth, timer=timer, session=self, ssl=ssl, proxy_headers=proxy_headers, traces=traces) try: with CeilTimeout(real_timeout.connect, loop=self._loop): assert self._connector is not None conn = await self._connector.connect( req, traces=traces, timeout=real_timeout ) except asyncio.TimeoutError as exc: raise ServerTimeoutError( 'Connection timeout ' 'to host {0}'.format(url)) from exc assert conn.transport is not None assert conn.protocol is not None conn.protocol.set_response_params( timer=timer, skip_payload=method.upper() == 'HEAD', read_until_eof=read_until_eof, auto_decompress=self._auto_decompress, read_timeout=real_timeout.sock_read) try: try: resp = await req.send(conn) try: await resp.start(conn) except BaseException: resp.close() raise except BaseException: conn.close() raise except ClientError: raise except OSError as exc: raise ClientOSError(*exc.args) from exc self._cookie_jar.update_cookies(resp.cookies, resp.url) if resp.status in ( 301, 302, 303, 307, 308) and allow_redirects: for trace in traces: await trace.send_request_redirect( method, url, headers, resp ) redirects += 1 history.append(resp) if max_redirects and redirects >= max_redirects: resp.close() raise TooManyRedirects( history[0].request_info, tuple(history)) if (resp.status == 303 and resp.method != hdrs.METH_HEAD) or (resp.status in (301, 302) and resp.method == hdrs.METH_POST): method = hdrs.METH_GET data = None if headers.get(hdrs.CONTENT_LENGTH): headers.pop(hdrs.CONTENT_LENGTH) r_url = (resp.headers.get(hdrs.LOCATION) or resp.headers.get(hdrs.URI)) if r_url is None: break else: resp.release() try: r_url = URL( r_url, encoded=not self._requote_redirect_url) except ValueError: raise InvalidURL(r_url) scheme = r_url.scheme if scheme not in ('http', 'https', ''): resp.close() raise ValueError( 'Can redirect only to http or https') elif not scheme: r_url = url.join(r_url) if url.origin() != r_url.origin(): auth = None headers.pop(hdrs.AUTHORIZATION, None) url = r_url params = None resp.release() continue break if raise_for_status is None: raise_for_status = self._raise_for_status if raise_for_status: resp.raise_for_status() if handle is not None: if resp.connection is not None: resp.connection.add_callback(handle.cancel) else: handle.cancel() resp._history = tuple(history) for trace in traces: await trace.send_request_end( method, url, headers, resp ) return resp except BaseException as e: tm.close() if handle: handle.cancel() handle = None for trace in traces: await trace.send_request_exception( method, url, headers, e ) raise def ws_connect( self, url: StrOrURL, *, method: str=hdrs.METH_GET, protocols: Iterable[str]=(), timeout: float=10.0, receive_timeout: Optional[float]=None, autoclose: bool=True, autoping: bool=True, heartbeat: Optional[float]=None, auth: Optional[BasicAuth]=None, origin: Optional[str]=None, headers: Optional[LooseHeaders]=None, proxy: Optional[StrOrURL]=None, proxy_auth: Optional[BasicAuth]=None, ssl: Union[SSLContext, bool, None, Fingerprint]=None, verify_ssl: Optional[bool]=None, fingerprint: Optional[bytes]=None, ssl_context: Optional[SSLContext]=None, proxy_headers: Optional[LooseHeaders]=None, compress: int=0, max_msg_size: int=4*1024*1024) -> '_WSRequestContextManager': return _WSRequestContextManager( self._ws_connect(url, method=method, protocols=protocols, timeout=timeout, receive_timeout=receive_timeout, autoclose=autoclose, autoping=autoping, heartbeat=heartbeat, auth=auth, origin=origin, headers=headers, proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, verify_ssl=verify_ssl, fingerprint=fingerprint, ssl_context=ssl_context, proxy_headers=proxy_headers, compress=compress, max_msg_size=max_msg_size)) async def _ws_connect( self, url: StrOrURL, *, method: str=hdrs.METH_GET, protocols: Iterable[str]=(), timeout: float=10.0, receive_timeout: Optional[float]=None, autoclose: bool=True, autoping: bool=True, heartbeat: Optional[float]=None, auth: Optional[BasicAuth]=None, origin: Optional[str]=None, headers: Optional[LooseHeaders]=None, proxy: Optional[StrOrURL]=None, proxy_auth: Optional[BasicAuth]=None, ssl: Union[SSLContext, bool, None, Fingerprint]=None, verify_ssl: Optional[bool]=None, fingerprint: Optional[bytes]=None, ssl_context: Optional[SSLContext]=None, proxy_headers: Optional[LooseHeaders]=None, compress: int=0, max_msg_size: int=4*1024*1024 ) -> ClientWebSocketResponse: if headers is None: real_headers = CIMultiDict() else: real_headers = CIMultiDict(headers) default_headers = { hdrs.UPGRADE: hdrs.WEBSOCKET, hdrs.CONNECTION: hdrs.UPGRADE, hdrs.SEC_WEBSOCKET_VERSION: '13', } for key, value in default_headers.items(): real_headers.setdefault(key, value) sec_key = base64.b64encode(os.urandom(16)) real_headers[hdrs.SEC_WEBSOCKET_KEY] = sec_key.decode() if protocols: real_headers[hdrs.SEC_WEBSOCKET_PROTOCOL] = ','.join(protocols) if origin is not None: real_headers[hdrs.ORIGIN] = origin if compress: extstr = ws_ext_gen(compress=compress) real_headers[hdrs.SEC_WEBSOCKET_EXTENSIONS] = extstr ssl = _merge_ssl_params(ssl, verify_ssl, ssl_context, fingerprint) resp = await self.request(method, url, headers=real_headers, read_until_eof=False, auth=auth, proxy=proxy, proxy_auth=proxy_auth, ssl=ssl, proxy_headers=proxy_headers) try: if resp.status != 101: raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid response status', status=resp.status, headers=resp.headers) if resp.headers.get(hdrs.UPGRADE, '').lower() != 'websocket': raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid upgrade header', status=resp.status, headers=resp.headers) if resp.headers.get(hdrs.CONNECTION, '').lower() != 'upgrade': raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid connection header', status=resp.status, headers=resp.headers) key = resp.headers.get(hdrs.SEC_WEBSOCKET_ACCEPT, '') match = base64.b64encode( hashlib.sha1(sec_key + WS_KEY).digest()).decode() if key != match: raise WSServerHandshakeError( resp.request_info, resp.history, message='Invalid challenge response', status=resp.status, headers=resp.headers) protocol = None if protocols and hdrs.SEC_WEBSOCKET_PROTOCOL in resp.headers: resp_protocols = [ proto.strip() for proto in resp.headers[hdrs.SEC_WEBSOCKET_PROTOCOL].split(',')] for proto in resp_protocols: if proto in protocols: protocol = proto break notakeover = False if compress: compress_hdrs = resp.headers.get(hdrs.SEC_WEBSOCKET_EXTENSIONS) if compress_hdrs: try: compress, notakeover = ws_ext_parse(compress_hdrs) except WSHandshakeError as exc: raise WSServerHandshakeError( resp.request_info, resp.history, message=exc.args[0], status=resp.status, headers=resp.headers) else: compress = 0 notakeover = False conn = resp.connection assert conn is not None proto = conn.protocol assert proto is not None transport = conn.transport assert transport is not None reader = FlowControlDataQueue( proto, limit=2 ** 16, loop=self._loop) proto.set_parser(WebSocketReader(reader, max_msg_size), reader) writer = WebSocketWriter( proto, transport, use_mask=True, compress=compress, notakeover=notakeover) except BaseException: resp.close() raise else: return self._ws_response_class(reader, writer, protocol, resp, timeout, autoclose, autoping, self._loop, receive_timeout=receive_timeout, heartbeat=heartbeat, compress=compress, client_notakeover=notakeover) def _prepare_headers( self, headers: Optional[LooseHeaders]) -> 'CIMultiDict[str]': result = CIMultiDict(self._default_headers) if headers: if not isinstance(headers, (MultiDictProxy, MultiDict)): headers = CIMultiDict(headers) added_names = set() for key, value in headers.items(): if key in added_names: result.add(key, value) else: result[key] = value added_names.add(key) return result def get(self, url: StrOrURL, *, allow_redirects: bool=True, **kwargs: Any) -> '_RequestContextManager': return _RequestContextManager( self._request(hdrs.METH_GET, url, allow_redirects=allow_redirects, **kwargs)) def options(self, url: StrOrURL, *, allow_redirects: bool=True, **kwargs: Any) -> '_RequestContextManager': return _RequestContextManager( self._request(hdrs.METH_OPTIONS, url, allow_redirects=allow_redirects, **kwargs)) def head(self, url: StrOrURL, *, allow_redirects: bool=False, **kwargs: Any) -> '_RequestContextManager': return _RequestContextManager( self._request(hdrs.METH_HEAD, url, allow_redirects=allow_redirects, **kwargs)) def post(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': return _RequestContextManager( self._request(hdrs.METH_POST, url, data=data, **kwargs)) def put(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': return _RequestContextManager( self._request(hdrs.METH_PUT, url, data=data, **kwargs)) def patch(self, url: StrOrURL, *, data: Any=None, **kwargs: Any) -> '_RequestContextManager': return _RequestContextManager( self._request(hdrs.METH_PATCH, url, data=data, **kwargs)) def delete(self, url: StrOrURL, **kwargs: Any) -> '_RequestContextManager': return _RequestContextManager( self._request(hdrs.METH_DELETE, url, **kwargs)) async def close(self) -> None: if not self.closed: if self._connector is not None and self._connector_owner: await self._connector.close() self._connector = None @property def closed(self) -> bool: return self._connector is None or self._connector.closed @property def connector(self) -> Optional[BaseConnector]: return self._connector @property def cookie_jar(self) -> AbstractCookieJar: return self._cookie_jar @property
MIT License
jpvanhal/inflection
inflection/__init__.py
ordinalize
python
def ordinalize(number: int) -> str: return "{}{}".format(number, ordinal(number))
Turn a number into an ordinal string used to denote the position in an ordered sequence such as 1st, 2nd, 3rd, 4th. Examples:: >>> ordinalize(1) '1st' >>> ordinalize(2) '2nd' >>> ordinalize(1002) '1002nd' >>> ordinalize(1003) '1003rd' >>> ordinalize(-11) '-11th' >>> ordinalize(-1021) '-1021st'
https://github.com/jpvanhal/inflection/blob/b00d4d348b32ef5823221b20ee4cbd1d2d924462/inflection/__init__.py#L236-L257
import re import unicodedata __version__ = '0.5.1' PLURALS = [ (r"(?i)(quiz)$", r'\1zes'), (r"(?i)^(oxen)$", r'\1'), (r"(?i)^(ox)$", r'\1en'), (r"(?i)(m|l)ice$", r'\1ice'), (r"(?i)(m|l)ouse$", r'\1ice'), (r"(?i)(passer)s?by$", r'\1sby'), (r"(?i)(matr|vert|ind)(?:ix|ex)$", r'\1ices'), (r"(?i)(x|ch|ss|sh)$", r'\1es'), (r"(?i)([^aeiouy]|qu)y$", r'\1ies'), (r"(?i)(hive)$", r'\1s'), (r"(?i)([lr])f$", r'\1ves'), (r"(?i)([^f])fe$", r'\1ves'), (r"(?i)sis$", 'ses'), (r"(?i)([ti])a$", r'\1a'), (r"(?i)([ti])um$", r'\1a'), (r"(?i)(buffal|potat|tomat)o$", r'\1oes'), (r"(?i)(bu)s$", r'\1ses'), (r"(?i)(alias|status)$", r'\1es'), (r"(?i)(octop|vir)i$", r'\1i'), (r"(?i)(octop|vir)us$", r'\1i'), (r"(?i)^(ax|test)is$", r'\1es'), (r"(?i)s$", 's'), (r"$", 's'), ] SINGULARS = [ (r"(?i)(database)s$", r'\1'), (r"(?i)(quiz)zes$", r'\1'), (r"(?i)(matr)ices$", r'\1ix'), (r"(?i)(vert|ind)ices$", r'\1ex'), (r"(?i)(passer)sby$", r'\1by'), (r"(?i)^(ox)en", r'\1'), (r"(?i)(alias|status)(es)?$", r'\1'), (r"(?i)(octop|vir)(us|i)$", r'\1us'), (r"(?i)^(a)x[ie]s$", r'\1xis'), (r"(?i)(cris|test)(is|es)$", r'\1is'), (r"(?i)(shoe)s$", r'\1'), (r"(?i)(o)es$", r'\1'), (r"(?i)(bus)(es)?$", r'\1'), (r"(?i)(m|l)ice$", r'\1ouse'), (r"(?i)(x|ch|ss|sh)es$", r'\1'), (r"(?i)(m)ovies$", r'\1ovie'), (r"(?i)(s)eries$", r'\1eries'), (r"(?i)([^aeiouy]|qu)ies$", r'\1y'), (r"(?i)([lr])ves$", r'\1f'), (r"(?i)(tive)s$", r'\1'), (r"(?i)(hive)s$", r'\1'), (r"(?i)([^f])ves$", r'\1fe'), (r"(?i)(t)he(sis|ses)$", r"\1hesis"), (r"(?i)(s)ynop(sis|ses)$", r"\1ynopsis"), (r"(?i)(p)rogno(sis|ses)$", r"\1rognosis"), (r"(?i)(p)arenthe(sis|ses)$", r"\1arenthesis"), (r"(?i)(d)iagno(sis|ses)$", r"\1iagnosis"), (r"(?i)(b)a(sis|ses)$", r"\1asis"), (r"(?i)(a)naly(sis|ses)$", r"\1nalysis"), (r"(?i)([ti])a$", r'\1um'), (r"(?i)(n)ews$", r'\1ews'), (r"(?i)(ss)$", r'\1'), (r"(?i)s$", ''), ] UNCOUNTABLES = { 'equipment', 'fish', 'information', 'jeans', 'money', 'rice', 'series', 'sheep', 'species'} def _irregular(singular: str, plural: str) -> None: def caseinsensitive(string: str) -> str: return ''.join('[' + char + char.upper() + ']' for char in string) if singular[0].upper() == plural[0].upper(): PLURALS.insert(0, ( r"(?i)({}){}$".format(singular[0], singular[1:]), r'\1' + plural[1:] )) PLURALS.insert(0, ( r"(?i)({}){}$".format(plural[0], plural[1:]), r'\1' + plural[1:] )) SINGULARS.insert(0, ( r"(?i)({}){}$".format(plural[0], plural[1:]), r'\1' + singular[1:] )) else: PLURALS.insert(0, ( r"{}{}$".format(singular[0].upper(), caseinsensitive(singular[1:])), plural[0].upper() + plural[1:] )) PLURALS.insert(0, ( r"{}{}$".format(singular[0].lower(), caseinsensitive(singular[1:])), plural[0].lower() + plural[1:] )) PLURALS.insert(0, ( r"{}{}$".format(plural[0].upper(), caseinsensitive(plural[1:])), plural[0].upper() + plural[1:] )) PLURALS.insert(0, ( r"{}{}$".format(plural[0].lower(), caseinsensitive(plural[1:])), plural[0].lower() + plural[1:] )) SINGULARS.insert(0, ( r"{}{}$".format(plural[0].upper(), caseinsensitive(plural[1:])), singular[0].upper() + singular[1:] )) SINGULARS.insert(0, ( r"{}{}$".format(plural[0].lower(), caseinsensitive(plural[1:])), singular[0].lower() + singular[1:] )) def camelize(string: str, uppercase_first_letter: bool = True) -> str: if uppercase_first_letter: return re.sub(r"(?:^|_)(.)", lambda m: m.group(1).upper(), string) else: return string[0].lower() + camelize(string)[1:] def dasherize(word: str) -> str: return word.replace('_', '-') def humanize(word: str) -> str: word = re.sub(r"_id$", "", word) word = word.replace('_', ' ') word = re.sub(r"(?i)([a-z\d]*)", lambda m: m.group(1).lower(), word) word = re.sub(r"^\w", lambda m: m.group(0).upper(), word) return word def ordinal(number: int) -> str: number = abs(int(number)) if number % 100 in (11, 12, 13): return "th" else: return { 1: "st", 2: "nd", 3: "rd", }.get(number % 10, "th")
MIT License
googleworkspace/python-samples
drive/quickstart/quickstart.py
main
python
def main(): creds = None if os.path.exists('token.json'): creds = Credentials.from_authorized_user_file('token.json', SCOPES) if not creds or not creds.valid: if creds and creds.expired and creds.refresh_token: creds.refresh(Request()) else: flow = InstalledAppFlow.from_client_secrets_file( 'credentials.json', SCOPES) creds = flow.run_local_server(port=0) with open('token.json', 'w') as token: token.write(creds.to_json()) service = build('drive', 'v3', credentials=creds) results = service.files().list( pageSize=10, fields="nextPageToken, files(id, name)").execute() items = results.get('files', []) if not items: print('No files found.') else: print('Files:') for item in items: print(u'{0} ({1})'.format(item['name'], item['id']))
Shows basic usage of the Drive v3 API. Prints the names and ids of the first 10 files the user has access to.
https://github.com/googleworkspace/python-samples/blob/3dc8f66f77a11b70e72f5b368c9545a6481dd7ca/drive/quickstart/quickstart.py#L26-L60
from __future__ import print_function import os.path from googleapiclient.discovery import build from google_auth_oauthlib.flow import InstalledAppFlow from google.auth.transport.requests import Request from google.oauth2.credentials import Credentials SCOPES = ['https://www.googleapis.com/auth/drive.metadata.readonly']
Apache License 2.0
masqu3rad3/tik_manager
tik_manager/pyseq.py
Sequence.__attrs__
python
def __attrs__(self): return { 'l': self.length, 's': self.start, 'e': self.end, 'f': self.frames, 'm': self.missing, 'M': functools.partial(self._get_framerange, self.missing(), missing=True), 'd': lambda *x: self.size, 'D': self.directory, 'p': self._get_padding, 'r': functools.partial(self._get_framerange, self.frames(), missing=False), 'R': functools.partial(self._get_framerange, self.frames(), missing=True), 'h': self.head, 't': self.tail }
Replaces format directives with callables to get their values.
https://github.com/masqu3rad3/tik_manager/blob/59821670e87a2af753a59cc70924c5f0aad8ad51/tik_manager/pyseq.py#L361-L377
import os import re import logging import warnings import functools from glob import glob from glob import iglob from datetime import datetime import fnmatch __version__ = "0.5.1" global_format = '%4l %h%p%t %R' default_format = '%h%r%t' strict_pad = True digits_re = re.compile(r'\d+') format_re = re.compile(r'%(?P<pad>\d+)?(?P<var>\w+)') range_join = os.environ.get('PYSEQ_RANGE_SEP', ', ') __all__ = [ 'SequenceError', 'FormatError', 'Item', 'Sequence', 'diff', 'uncompress', 'getSequences', 'get_sequences', 'walk' ] log = logging.getLogger('pyseq') log.addHandler(logging.StreamHandler()) log.setLevel(int(os.environ.get('PYSEQ_LOG_LEVEL', logging.INFO))) warnings.simplefilter('always', DeprecationWarning) try: unicode = unicode except NameError: str = str unicode = str bytes = bytes basestring = (str,bytes) else: str = str unicode = unicode bytes = str basestring = basestring def _natural_key(x): return [int(c) if c.isdigit() else c.lower() for c in re.split("(\d+)", x)] def _ext_key(x): name, ext = os.path.splitext(x) return [ext] + _natural_key(name) def natural_sort(items): return sorted(items, key=_natural_key) class SequenceError(Exception): pass class FormatError(Exception): pass def deprecated(func): def inner(*args, **kwargs): warnings.warn("Call to deprecated method {}".format(func.__name__), category=DeprecationWarning, stacklevel=2) return func(*args, **kwargs) inner.__name__ = func.__name__ inner.__doc__ = func.__doc__ inner.__dict__.update(func.__dict__) return inner class Item(str): def __init__(self, item): super(Item, self).__init__() log.debug('adding %s', item) self.item = item self.__path = getattr(item, 'path', None) if self.__path is None: self.__path = os.path.abspath(str(item)) self.__dirname, self.__filename = os.path.split(self.__path) self.__digits = digits_re.findall(self.name) self.__parts = digits_re.split(self.name) self.__stat = None self.frame = None self.head = self.name self.tail = '' self.pad = None def __eq__(self, other): return self.path == other.path def __ne__(self, other): return self.path != other.path def __lt__(self, other): return self.frame < other.frame def __gt__(self, other): return self.frame > other.frame def __ge__(self, other): return self.frame >= other.frame def __le__(self, other): return self.frame <= other.frame def __str__(self): return str(self.name) def __repr__(self): return '<pyseq.Item "%s">' % self.name def __getattr__(self, key): return getattr(self.item, key) @property def path(self): return self.__path @property def name(self): return self.__filename @property def dirname(self): return self.__dirname @property def digits(self): return self.__digits @property def parts(self): return self.__parts @property def exists(self): return os.path.isfile(self.__path) @property def size(self): return self.stat.st_size @property def mtime(self): return self.stat.st_mtime @property def stat(self): if self.__stat is None: self.__stat = os.stat(self.__path) return self.__stat @deprecated def isSibling(self, item): return self.is_sibling(item) def is_sibling(self, item): if not isinstance(item, Item): item = Item(item) d = diff(self, item) is_sibling = (len(d) == 1) and (self.parts == item.parts) if is_sibling: frame = d[0]['frames'][0] self.frame = int(frame) self.pad = len(frame) self.head = self.name[:d[0]['start']] self.tail = self.name[d[0]['end']:] frame = d[0]['frames'][1] item.frame = int(frame) item.pad = len(frame) item.head = item.name[:d[0]['start']] item.tail = item.name[d[0]['end']:] return is_sibling class Sequence(list): def __init__(self, items): items = items[::] super(Sequence, self).__init__([Item(items.pop(0))]) self.__missing = [] self.__dirty = False self.__frames = None while items: f = Item(items.pop(0)) try: self.append(f) log.debug('+Item belongs to sequence.') except SequenceError: log.debug('-Item does not belong to sequence.') continue except KeyboardInterrupt: log.info("Stopping.") break
BSD 3-Clause New or Revised License
albertsuarez/searchly
src/searchly/helper/log.py
error
python
def error(msg): __logger_stdout.error(msg)
Log [ERROR] level log messages. :param msg: Message to error. :return: Message notified as error.
https://github.com/albertsuarez/searchly/blob/bcdb3727cb64b69a3bbe6023b93cc9e9154d051e/src/searchly/helper/log.py#L46-L52
import logging import sys __logger_stdout = logging.getLogger('searchly') __formatter = logging.Formatter('{%(name)s} - <%(asctime)s> - [%(levelname)-7s] - %(message)s') __handler_stdout = logging.StreamHandler(sys.stdout) __handler_stdout.setFormatter(__formatter) __logger_stdout.addHandler(__handler_stdout) __logger_stdout.setLevel(logging.INFO) def debug(msg): if __logger_stdout.isEnabledFor(logging.DEBUG): __logger_stdout.debug(msg) def info(msg): __logger_stdout.info(msg) def warn(msg): __logger_stdout.warning(msg)
MIT License
noirbizarre/flask-fs
tasks.py
compose
python
def compose(ctx, cmd): return ctx.run('docker-compose {0}'.format(cmd), pty=True)
Run a docker-compose command
https://github.com/noirbizarre/flask-fs/blob/092e9327384b8411c9bb38ca257ecb558584d201/tasks.py#L11-L13
from __future__ import unicode_literals, absolute_import from invoke import run, task from os.path import join, abspath, dirname ROOT = abspath(join(dirname(__file__)))
MIT License
google-tasks-backup/tasks-backup
oauth2client/client.py
credentials_from_clientsecrets_and_code
python
def credentials_from_clientsecrets_and_code(filename, scope, code, message = None, redirect_uri='postmessage', http=None, cache=None): flow = flow_from_clientsecrets(filename, scope, message=message, cache=cache, redirect_uri=redirect_uri) credentials = flow.step2_exchange(code, http=http) return credentials
Returns OAuth2Credentials from a clientsecrets file and an auth code. Will create the right kind of Flow based on the contents of the clientsecrets file or will raise InvalidClientSecretsError for unknown types of Flows. Args: filename: string, File name of clientsecrets. scope: string or list of strings, scope(s) to request. code: string, An authorization code, most likely passed down from the client message: string, A friendly string to display to the user if the clientsecrets file is missing or invalid. If message is provided then sys.exit will be called in the case of an error. If message in not provided then clientsecrets.InvalidClientSecretsError will be raised. redirect_uri: string, this is generally set to 'postmessage' to match the redirect_uri that the client specified http: httplib2.Http, optional http instance to use to do the fetch cache: An optional cache service client that implements get() and set() methods. See clientsecrets.loadfile() for details. Returns: An OAuth2Credentials object. Raises: FlowExchangeError if the authorization code cannot be exchanged for an access token UnknownClientSecretsFlowError if the file describes an unknown kind of Flow. clientsecrets.InvalidClientSecretsError if the clientsecrets file is invalid.
https://github.com/google-tasks-backup/tasks-backup/blob/ffcb2044eb6089d20e1be3f93025fa33c2efbe3e/oauth2client/client.py#L965-L1003
__author__ = '[email protected] (Joe Gregorio)' import base64 import clientsecrets import copy import datetime import httplib2 import logging import os import sys import time import urllib import urlparse from oauth2client import util from oauth2client.anyjson import simplejson HAS_OPENSSL = False try: from oauth2client.crypt import Signer from oauth2client.crypt import make_signed_jwt from oauth2client.crypt import verify_signed_jwt_with_certs HAS_OPENSSL = True except ImportError: pass try: from urlparse import parse_qsl except ImportError: from cgi import parse_qsl logger = logging.getLogger(__name__) EXPIRY_FORMAT = '%Y-%m-%dT%H:%M:%SZ' ID_TOKEN_VERIFICATON_CERTS = 'https://www.googleapis.com/oauth2/v1/certs' OOB_CALLBACK_URN = 'urn:ietf:wg:oauth:2.0:oob' class Error(Exception): pass class FlowExchangeError(Error): pass class AccessTokenRefreshError(Error): pass class UnknownClientSecretsFlowError(Error): pass class AccessTokenCredentialsError(Error): pass class VerifyJwtTokenError(Error): pass def _abstract(): raise NotImplementedError('You need to override this function') class MemoryCache(object): def __init__(self): self.cache = {} def get(self, key): return self.cache.get(key) def set(self, key, value): self.cache[key] = value def delete(self, key): self.cache.pop(key, None) class Credentials(object): NON_SERIALIZED_MEMBERS = ['store'] def authorize(self, http): _abstract() def refresh(self, http): _abstract() def apply(self, headers): _abstract() def _to_json(self, strip): t = type(self) d = copy.copy(self.__dict__) for member in strip: if member in d: del d[member] if 'token_expiry' in d and isinstance(d['token_expiry'], datetime.datetime): d['token_expiry'] = d['token_expiry'].strftime(EXPIRY_FORMAT) d['_class'] = t.__name__ d['_module'] = t.__module__ return simplejson.dumps(d) def to_json(self): return self._to_json(Credentials.NON_SERIALIZED_MEMBERS) @classmethod def new_from_json(cls, s): data = simplejson.loads(s) module = data['_module'] try: m = __import__(module) except ImportError: module = module.replace('.apiclient', '') m = __import__(module) m = __import__(module, fromlist=module.split('.')[:-1]) kls = getattr(m, data['_class']) from_json = getattr(kls, 'from_json') return from_json(s) @classmethod def from_json(cls, s): return Credentials() class Flow(object): pass class Storage(object): def acquire_lock(self): pass def release_lock(self): pass def locked_get(self): _abstract() def locked_put(self, credentials): _abstract() def locked_delete(self): _abstract() def get(self): self.acquire_lock() try: return self.locked_get() finally: self.release_lock() def put(self, credentials): self.acquire_lock() try: self.locked_put(credentials) finally: self.release_lock() def delete(self): self.acquire_lock() try: return self.locked_delete() finally: self.release_lock() class OAuth2Credentials(Credentials): @util.positional(8) def __init__(self, access_token, client_id, client_secret, refresh_token, token_expiry, token_uri, user_agent, id_token=None): self.access_token = access_token self.client_id = client_id self.client_secret = client_secret self.refresh_token = refresh_token self.store = None self.token_expiry = token_expiry self.token_uri = token_uri self.user_agent = user_agent self.id_token = id_token self.invalid = False def authorize(self, http): request_orig = http.request @util.positional(1) def new_request(uri, method='GET', body=None, headers=None, redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None): if not self.access_token: logger.info('OAuth2Credentials.authorize.new_request: Attempting refresh to obtain initial access_token') self._refresh(request_orig) if headers is None: headers = {} self.apply(headers) if self.user_agent is not None: if 'user-agent' in headers: headers['user-agent'] = self.user_agent + ' ' + headers['user-agent'] else: headers['user-agent'] = self.user_agent resp, content = request_orig(uri, method, body, headers, redirections, connection_type) if resp.status in [401, 403]: logger.info('OAuth2Credentials.authorize.new_request: Refreshing due to a %s' % str(resp.status)) self._refresh(request_orig) self.apply(headers) return request_orig(uri, method, body, headers, redirections, connection_type) else: return (resp, content) http.request = new_request setattr(http.request, 'credentials', self) return http def refresh(self, http): self._refresh(http.request) def apply(self, headers): headers['Authorization'] = 'Bearer ' + self.access_token def to_json(self): return self._to_json(Credentials.NON_SERIALIZED_MEMBERS) @classmethod def from_json(cls, s): data = simplejson.loads(s) if 'token_expiry' in data and not isinstance(data['token_expiry'], datetime.datetime): try: data['token_expiry'] = datetime.datetime.strptime( data['token_expiry'], EXPIRY_FORMAT) except: data['token_expiry'] = None retval = OAuth2Credentials( data['access_token'], data['client_id'], data['client_secret'], data['refresh_token'], data['token_expiry'], data['token_uri'], data['user_agent'], id_token=data.get('id_token', None)) retval.invalid = data['invalid'] return retval @property def access_token_expired(self): if self.invalid: return True if not self.token_expiry: return False now = datetime.datetime.utcnow() if now >= self.token_expiry: logger.info('OAuth2Credentials.access_token_expired: access_token is expired. Now: %s, token_expiry: %s', now, self.token_expiry) return True return False def set_store(self, store): self.store = store def _updateFromCredential(self, other): self.__dict__.update(other.__getstate__()) def __getstate__(self): d = copy.copy(self.__dict__) del d['store'] return d def __setstate__(self, state): self.__dict__.update(state) self.store = None def _generate_refresh_request_body(self): body = urllib.urlencode({ 'grant_type': 'refresh_token', 'client_id': self.client_id, 'client_secret': self.client_secret, 'refresh_token': self.refresh_token, }) return body def _generate_refresh_request_headers(self): headers = { 'content-type': 'application/x-www-form-urlencoded', } if self.user_agent is not None: headers['user-agent'] = self.user_agent return headers def _refresh(self, http_request): if not self.store: self._do_refresh_request(http_request) else: self.store.acquire_lock() try: new_cred = self.store.locked_get() if (new_cred and not new_cred.invalid and new_cred.access_token != self.access_token): logger.info('OAuth2Credentials._refresh: Updated access_token read from Storage') self._updateFromCredential(new_cred) else: self._do_refresh_request(http_request) finally: self.store.release_lock() def _do_refresh_request(self, http_request): body = self._generate_refresh_request_body() headers = self._generate_refresh_request_headers() logger.info('OAuth2Credentials._do_refresh_request: Refreshing access_token') resp, content = http_request( self.token_uri, method='POST', body=body, headers=headers) if resp.status == 200: d = simplejson.loads(content) self.access_token = d['access_token'] self.refresh_token = d.get('refresh_token', self.refresh_token) if 'expires_in' in d: self.token_expiry = datetime.timedelta( seconds=int(d['expires_in'])) + datetime.datetime.utcnow() else: self.token_expiry = None if self.store: self.store.locked_put(self) else: logger.info('OAuth2Credentials._do_refresh_request: Failed to retrieve access token: %s' % content) error_msg = 'Invalid response %s.' % resp['status'] try: d = simplejson.loads(content) if 'error' in d: error_msg = d['error'] self.invalid = True if self.store: self.store.locked_put(self) except StandardError: pass raise AccessTokenRefreshError(error_msg) class AccessTokenCredentials(OAuth2Credentials): def __init__(self, access_token, user_agent): super(AccessTokenCredentials, self).__init__( access_token, None, None, None, None, None, user_agent) @classmethod def from_json(cls, s): data = simplejson.loads(s) retval = AccessTokenCredentials( data['access_token'], data['user_agent']) return retval def _refresh(self, http_request): raise AccessTokenCredentialsError( "The access_token is expired or invalid and can't be refreshed.") class AssertionCredentials(OAuth2Credentials): @util.positional(2) def __init__(self, assertion_type, user_agent=None, token_uri='https://accounts.google.com/o/oauth2/token', **unused_kwargs): super(AssertionCredentials, self).__init__( None, None, None, None, None, token_uri, user_agent) self.assertion_type = assertion_type def _generate_refresh_request_body(self): assertion = self._generate_assertion() body = urllib.urlencode({ 'assertion_type': self.assertion_type, 'assertion': assertion, 'grant_type': 'assertion', }) return body def _generate_assertion(self): _abstract() if HAS_OPENSSL: class SignedJwtAssertionCredentials(AssertionCredentials): MAX_TOKEN_LIFETIME_SECS = 3600 @util.positional(4) def __init__(self, service_account_name, private_key, scope, private_key_password='notasecret', user_agent=None, token_uri='https://accounts.google.com/o/oauth2/token', **kwargs): super(SignedJwtAssertionCredentials, self).__init__( 'http://oauth.net/grant_type/jwt/1.0/bearer', user_agent=user_agent, token_uri=token_uri, ) if type(scope) is list: scope = ' '.join(scope) self.scope = scope self.private_key = base64.b64encode(private_key) self.private_key_password = private_key_password self.service_account_name = service_account_name self.kwargs = kwargs @classmethod def from_json(cls, s): data = simplejson.loads(s) retval = SignedJwtAssertionCredentials( data['service_account_name'], base64.b64decode(data['private_key']), data['scope'], private_key_password=data['private_key_password'], user_agent=data['user_agent'], token_uri=data['token_uri'], **data['kwargs'] ) retval.invalid = data['invalid'] retval.access_token = data['access_token'] return retval def _generate_assertion(self): now = long(time.time()) payload = { 'aud': self.token_uri, 'scope': self.scope, 'iat': now, 'exp': now + SignedJwtAssertionCredentials.MAX_TOKEN_LIFETIME_SECS, 'iss': self.service_account_name } payload.update(self.kwargs) logger.debug("SignedJwtAssertionCredentials._generate_assertion ==>") logger.debug(str(payload)) private_key = base64.b64decode(self.private_key) return make_signed_jwt( Signer.from_string(private_key, self.private_key_password), payload) _cached_http = httplib2.Http(MemoryCache()) @util.positional(2) def verify_id_token(id_token, audience, http=None, cert_uri=ID_TOKEN_VERIFICATON_CERTS): if http is None: http = _cached_http resp, content = http.request(cert_uri) if resp.status == 200: certs = simplejson.loads(content) return verify_signed_jwt_with_certs(id_token, certs, audience) else: raise VerifyJwtTokenError('Status code: %d' % resp.status) def _urlsafe_b64decode(b64string): b64string = b64string.encode('ascii') padded = b64string + '=' * (4 - len(b64string) % 4) return base64.urlsafe_b64decode(padded) def _extract_id_token(id_token): segments = id_token.split('.') if (len(segments) != 3): raise VerifyJwtTokenError( 'Wrong number of segments in token: %s' % id_token) return simplejson.loads(_urlsafe_b64decode(segments[1])) def _parse_exchange_token_response(content): resp = {} try: resp = simplejson.loads(content) except StandardError: resp = dict(parse_qsl(content)) if resp and 'expires' in resp: resp['expires_in'] = resp.pop('expires') return resp @util.positional(4) def credentials_from_code(client_id, client_secret, scope, code, redirect_uri='postmessage', http=None, user_agent=None, token_uri='https://accounts.google.com/o/oauth2/token'): flow = OAuth2WebServerFlow(client_id, client_secret, scope, redirect_uri=redirect_uri, user_agent=user_agent, auth_uri='https://accounts.google.com/o/oauth2/auth', token_uri=token_uri) credentials = flow.step2_exchange(code, http=http) return credentials @util.positional(3)
Apache License 2.0
jest-community/jest-pytest
src/__tests__/integration/home-assistant/homeassistant/helpers/template.py
DomainStates.__len__
python
def __len__(self): return len(self._hass.states.async_entity_ids(self._domain))
Return number of states.
https://github.com/jest-community/jest-pytest/blob/b197b0b31e3ca5c411202d97583cbd2d2b0b92e9/src/__tests__/integration/home-assistant/homeassistant/helpers/template.py#L249-L251
from datetime import datetime import json import logging import math import random import re import jinja2 from jinja2 import contextfilter from jinja2.sandbox import ImmutableSandboxedEnvironment from homeassistant.const import ( ATTR_LATITUDE, ATTR_LONGITUDE, ATTR_UNIT_OF_MEASUREMENT, MATCH_ALL, STATE_UNKNOWN) from homeassistant.core import State, valid_entity_id from homeassistant.exceptions import TemplateError from homeassistant.helpers import location as loc_helper from homeassistant.loader import bind_hass from homeassistant.util import convert from homeassistant.util import dt as dt_util from homeassistant.util import location as loc_util from homeassistant.util.async_ import run_callback_threadsafe _LOGGER = logging.getLogger(__name__) _SENTINEL = object() DATE_STR_FORMAT = "%Y-%m-%d %H:%M:%S" _RE_NONE_ENTITIES = re.compile(r"distance\(|closest\(", re.I | re.M) _RE_GET_ENTITIES = re.compile( r"(?:(?:states\.|(?:is_state|is_state_attr|state_attr|states)" r"\((?:[\ \'\"]?))([\w]+\.[\w]+)|([\w]+))", re.I | re.M ) @bind_hass def attach(hass, obj): if isinstance(obj, list): for child in obj: attach(hass, child) elif isinstance(obj, dict): for child in obj.values(): attach(hass, child) elif isinstance(obj, Template): obj.hass = hass def render_complex(value, variables=None): if isinstance(value, list): return [render_complex(item, variables) for item in value] elif isinstance(value, dict): return {key: render_complex(item, variables) for key, item in value.items()} return value.async_render(variables) def extract_entities(template, variables=None): if template is None or _RE_NONE_ENTITIES.search(template): return MATCH_ALL extraction = _RE_GET_ENTITIES.findall(template) extraction_final = [] for result in extraction: if result[0] == 'trigger.entity_id' and 'trigger' in variables and 'entity_id' in variables['trigger']: extraction_final.append(variables['trigger']['entity_id']) elif result[0]: extraction_final.append(result[0]) if variables and result[1] in variables and isinstance(variables[result[1]], str) and valid_entity_id(variables[result[1]]): extraction_final.append(variables[result[1]]) if extraction_final: return list(set(extraction_final)) return MATCH_ALL class Template(object): def __init__(self, template, hass=None): if not isinstance(template, str): raise TypeError('Expected template to be a string') self.template = template self._compiled_code = None self._compiled = None self.hass = hass def ensure_valid(self): if self._compiled_code is not None: return try: self._compiled_code = ENV.compile(self.template) except jinja2.exceptions.TemplateSyntaxError as err: raise TemplateError(err) def extract_entities(self, variables=None): return extract_entities(self.template, variables) def render(self, variables=None, **kwargs): if variables is not None: kwargs.update(variables) return run_callback_threadsafe( self.hass.loop, self.async_render, kwargs).result() def async_render(self, variables=None, **kwargs): if self._compiled is None: self._ensure_compiled() if variables is not None: kwargs.update(variables) try: return self._compiled.render(kwargs).strip() except jinja2.TemplateError as err: raise TemplateError(err) def render_with_possible_json_value(self, value, error_value=_SENTINEL): return run_callback_threadsafe( self.hass.loop, self.async_render_with_possible_json_value, value, error_value).result() def async_render_with_possible_json_value(self, value, error_value=_SENTINEL): if self._compiled is None: self._ensure_compiled() variables = { 'value': value } try: variables['value_json'] = json.loads(value) except ValueError: pass try: return self._compiled.render(variables).strip() except jinja2.TemplateError as ex: _LOGGER.error("Error parsing value: %s (value: %s, template: %s)", ex, value, self.template) return value if error_value is _SENTINEL else error_value def _ensure_compiled(self): self.ensure_valid() assert self.hass is not None, 'hass variable not set on template' template_methods = TemplateMethods(self.hass) global_vars = ENV.make_globals({ 'closest': template_methods.closest, 'distance': template_methods.distance, 'is_state': self.hass.states.is_state, 'is_state_attr': template_methods.is_state_attr, 'state_attr': template_methods.state_attr, 'states': AllStates(self.hass), }) self._compiled = jinja2.Template.from_code( ENV, self._compiled_code, global_vars, None) return self._compiled def __eq__(self, other): return (self.__class__ == other.__class__ and self.template == other.template and self.hass == other.hass) class AllStates(object): def __init__(self, hass): self._hass = hass def __getattr__(self, name): return DomainStates(self._hass, name) def __iter__(self): return iter( _wrap_state(state) for state in sorted(self._hass.states.async_all(), key=lambda state: state.entity_id)) def __len__(self): return len(self._hass.states.async_entity_ids()) def __call__(self, entity_id): state = self._hass.states.get(entity_id) return STATE_UNKNOWN if state is None else state.state class DomainStates(object): def __init__(self, hass, domain): self._hass = hass self._domain = domain def __getattr__(self, name): return _wrap_state( self._hass.states.get('{}.{}'.format(self._domain, name))) def __iter__(self): return iter(sorted( (_wrap_state(state) for state in self._hass.states.async_all() if state.domain == self._domain), key=lambda state: state.entity_id))
MIT License
azure/azure-devops-cli-extension
azure-devops/azext_devops/devops_sdk/v5_0/feed/feed_client.py
FeedClient.create_feed
python
def create_feed(self, feed): content = self._serialize.body(feed, 'Feed') response = self._send(http_method='POST', location_id='c65009a7-474a-4ad1-8b42-7d852107ef8c', version='5.0-preview.1', content=content) return self._deserialize('Feed', response)
CreateFeed. [Preview API] Create a feed, a container for various package types. :param :class:`<Feed> <azure.devops.v5_0.feed.models.Feed>` feed: A JSON object containing both required and optional attributes for the feed. Name is the only required value. :rtype: :class:`<Feed> <azure.devops.v5_0.feed.models.Feed>`
https://github.com/azure/azure-devops-cli-extension/blob/5f33f7d81a9c2d2990044fbd9ffa6b535cbda528/azure-devops/azext_devops/devops_sdk/v5_0/feed/feed_client.py#L82-L93
 from msrest import Serializer, Deserializer from ...client import Client from . import models class FeedClient(Client): def __init__(self, base_url=None, creds=None): super(FeedClient, self).__init__(base_url, creds) client_models = {k: v for k, v in models.__dict__.items() if isinstance(v, type)} self._serialize = Serializer(client_models) self._deserialize = Deserializer(client_models) resource_area_identifier = '7ab4e64e-c4d8-4f50-ae73-5ef2e21642a5' def get_badge(self, feed_id, package_id): route_values = {} if feed_id is not None: route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str') if package_id is not None: route_values['packageId'] = self._serialize.url('package_id', package_id, 'str') response = self._send(http_method='GET', location_id='61d885fd-10f3-4a55-82b6-476d866b673f', version='5.0-preview.1', route_values=route_values) return self._deserialize('str', response) def get_feed_change(self, feed_id): route_values = {} if feed_id is not None: route_values['feedId'] = self._serialize.url('feed_id', feed_id, 'str') response = self._send(http_method='GET', location_id='29ba2dad-389a-4661-b5d3-de76397ca05b', version='5.0-preview.1', route_values=route_values) return self._deserialize('FeedChange', response) def get_feed_changes(self, include_deleted=None, continuation_token=None, batch_size=None): query_parameters = {} if include_deleted is not None: query_parameters['includeDeleted'] = self._serialize.query('include_deleted', include_deleted, 'bool') if continuation_token is not None: query_parameters['continuationToken'] = self._serialize.query('continuation_token', continuation_token, 'long') if batch_size is not None: query_parameters['batchSize'] = self._serialize.query('batch_size', batch_size, 'int') response = self._send(http_method='GET', location_id='29ba2dad-389a-4661-b5d3-de76397ca05b', version='5.0-preview.1', query_parameters=query_parameters) return self._deserialize('FeedChangesResponse', response)
MIT License
rsokl/mygrad
tests/nnet/layers/test_conv.py
convolve_numpy
python
def convolve_numpy(input_image, conv_filter, stride, dilation=None): conv_shape = conv_filter.shape[1:] in_shape = input_image.shape[1:] if dilation is None: dilation = (1,) * len(stride) out_shape = tuple(get_outshape(in_shape, conv_shape, stride, dilation)) out = np.empty(out_shape, np.float32) for ind in np.ndindex(out_shape): slices = (slice(None),) + tuple( slice(i * s, i * s + w * d, d) for i, w, s, d in zip(ind, conv_shape, stride, dilation) ) out[ind] = np.sum(conv_filter * input_image[slices]) return out
Convolve `input_image` with `conv_filter` at a stride of `stride`. Parameters ---------- input_image : numpy.ndarray, shape=(C, H, ...) The input over which to perform convolution. conv_filter : numpy.ndarray, shape=(C, Hf, ...) The convolutional filter to slide across the image. stride : Sequence[int] The stride at which to apply `conv_filter` across `input_image`. Returns ------- numpy.ndarray, shape=(H', ...) The result of convolving `input_image` with `conv_filter` at a stride of `stride`, where (H', W') is the result of `get_outshape`.
https://github.com/rsokl/mygrad/blob/4256b6f286e52eaa819fd6657decc705ce3dd32a/tests/nnet/layers/test_conv.py#L99-L131
from typing import Tuple import hypothesis.extra.numpy as hnp import hypothesis.strategies as st import numpy as np import pytest from hypothesis import HealthCheck, assume, given, settings from numpy.testing import assert_allclose from pytest import raises import mygrad as mg from mygrad import Tensor from mygrad.nnet.layers import conv_nd from ...utils.numerical_gradient import numerical_gradient_full from ...wrappers.uber import backprop_test_factory, fwdprop_test_factory @pytest.mark.parametrize( "shapes", [ st.tuples( hnp.array_shapes(min_dims=0, max_dims=2), hnp.array_shapes(min_dims=3) ), hnp.array_shapes(min_dims=3).flatmap( lambda x: st.tuples( st.just(x), hnp.array_shapes(min_dims=2).filter(lambda s: len(s) != len(x)), ) ), hnp.array_shapes(min_dims=3).flatmap( lambda x: st.tuples( st.just(x), hnp.array_shapes(min_dims=len(x), max_dims=len(x)).filter( lambda s: s[1] != x[1] ), ) ), ], ) @given(data=st.data()) def test_input_validation( shapes: st.SearchStrategy[Tuple[Tuple[int, ...], Tuple[int, ...]]], data: st.DataObject, ): x_shape, k_shape = data.draw(shapes, label="x_shape, k_shape") x = mg.zeros(x_shape, dtype="float") k = mg.zeros(k_shape, dtype="float") with raises(ValueError): conv_nd(x, k, stride=1) def get_outshape(x_shape, w_shape, stride, dilation): x_shape = np.array(x_shape) w_shape = np.array(w_shape) stride = np.array(stride) dilation = np.array(dilation) out_shape = (x_shape - ((w_shape - 1) * dilation + 1)) / stride + 1 if not all(i.is_integer() and i > 0 for i in out_shape): msg = "Stride and kernel dimensions are incompatible: \n" msg += "Input dimensions: {}\n".format(tuple(x_shape)) msg += "Stride dimensions: {}\n".format(tuple(stride)) msg += "Kernel dimensions: {}\n".format(tuple(w_shape)) msg += "Dilation dimensions: {}\n".format(tuple(dilation)) return None return out_shape.astype(np.int32)
MIT License
agdsn/pycroft
pycroft/helpers/facilities.py
determine_building
python
def determine_building(shortname=None, id=None): if shortname: return Building.q.filter(Building.short_name == shortname).one() if id: return Building.get(id) raise ValueError("Either shortname or id must be given to identify the building!")
Determine building from shortname or id in this order. :param str shortname: The short name of the building :param int id: The id of the building :return: The unique building :raises: ValueError if none of both provided
https://github.com/agdsn/pycroft/blob/ed34865c15d6c81dff3b9142af13badfa9404261/pycroft/helpers/facilities.py#L27-L44
import re from pycroft.model.facilities import Building def sort_buildings(buildings): def make_sort_key(building): s = re.split(r'(\d+)([a-zA-Z]?)', building.number) if len(s) != 4: return building.street, building.number return building.street, (int(s[1]), s[2].lower()) sorted_buildings = sorted(buildings, key=make_sort_key) return sorted_buildings
Apache License 2.0
pmelchior/scarlet
scarlet/interpolation.py
get_filter_bounds
python
def get_filter_bounds(coords): z = np.zeros((len(coords),), dtype=int) y_start = np.max([z, coords[:, 0]], axis=0) y_end = -np.min([z, coords[:, 0]], axis=0) x_start = np.max([z, coords[:, 1]], axis=0) x_end = -np.min([z, coords[:, 1]], axis=0) return y_start, y_end, x_start, x_end
Get the slices in x and y to apply a filter Parameters ---------- coords: array The coordinates of the filter, defined by `get_filter_coords`. Returns ------- y_start, y_end, x_start, x_end: int The start and end of each slice that is passed to `apply_filter`.
https://github.com/pmelchior/scarlet/blob/134fac69465c2eea46b6909c6f401e1b17cdd85b/scarlet/interpolation.py#L44-L65
import numpy as np from .cache import Cache from .wavelet import apply_wavelet_denoising from . import fft def get_filter_coords(filter_values, center=None): if len(filter_values.shape) != 2: raise ValueError("`filter_values` must be 2D") if center is None: if filter_values.shape[0] % 2 == 0 or filter_values.shape[1] % 2 == 0: msg = """Ambiguous center of the `filter_values` array, you must use a `filter_values` array with an odd number of rows and columns or calculate `coords` on your own.""" raise ValueError(msg) center = [filter_values.shape[0] // 2, filter_values.shape[1] // 2] x = np.arange(filter_values.shape[1]) y = np.arange(filter_values.shape[0]) x, y = np.meshgrid(x, y) x -= center[1] y -= center[0] coords = np.dstack([y, x]) return coords
MIT License
thu-ml/zhusuan
zhusuan/utils.py
log_sum_exp
python
def log_sum_exp(x, axis=None, keepdims=False): x = tf.convert_to_tensor(x) x_max = tf.reduce_max(x, axis=axis, keepdims=True) ret = tf.log(tf.reduce_sum(tf.exp(x - x_max), axis=axis, keepdims=True)) + x_max if not keepdims: ret = tf.reduce_sum(ret, axis=axis) return ret
Deprecated: Use tf.reduce_logsumexp(). Tensorflow numerically stable log sum of exps across the `axis`. :param x: A Tensor. :param axis: An int or list or tuple. The dimensions to reduce. If `None` (the default), reduces all dimensions. :param keepdims: Bool. If true, retains reduced dimensions with length 1. Default to be False. :return: A Tensor after the computation of log sum exp along given axes of x.
https://github.com/thu-ml/zhusuan/blob/4386b2a12ae4f4ed8e694e504e51d7dcdfd6f22a/zhusuan/utils.py#L153-L174
from __future__ import absolute_import from __future__ import division from functools import wraps import tensorflow as tf __all__ = [ 'TensorArithmeticMixin', 'log_mean_exp', 'merge_dicts', ] class TensorArithmeticMixin(object): def __abs__(self): return tf.abs(self) def __neg__(self): return tf.negative(self) def __add__(self, other): return tf.add(self, other) def __radd__(self, other): return tf.add(other, self) def __sub__(self, other): return tf.subtract(self, other) def __rsub__(self, other): return tf.subtract(other, self) def __mul__(self, other): return tf.multiply(self, other) def __rmul__(self, other): return tf.multiply(other, self) def __div__(self, other): return tf.divide(self, other) def __rdiv__(self, other): return tf.divide(other, self) def __truediv__(self, other): return tf.truediv(self, other) def __rtruediv__(self, other): return tf.truediv(other, self) def __floordiv__(self, other): return tf.floordiv(self, other) def __rfloordiv__(self, other): return tf.floordiv(other, self) def __mod__(self, other): return tf.mod(self, other) def __rmod__(self, other): return tf.mod(other, self) def __pow__(self, other): return tf.pow(self, other) def __rpow__(self, other): return tf.pow(other, self) def __invert__(self): return tf.logical_not(self) def __and__(self, other): return tf.logical_and(self, other) def __rand__(self, other): return tf.logical_and(other, self) def __or__(self, other): return tf.logical_or(self, other) def __ror__(self, other): return tf.logical_or(other, self) def __xor__(self, other): return tf.logical_xor(self, other) def __rxor__(self, other): return tf.logical_xor(other, self) def __lt__(self, other): return tf.less(self, other) def __le__(self, other): return tf.less_equal(self, other) def __gt__(self, other): return tf.greater(self, other) def __ge__(self, other): return tf.greater_equal(self, other) def __getitem__(self, item): return (tf.convert_to_tensor(self))[item] def __hash__(self): return id(self) def __eq__(self, other): return id(self) == id(other) def __iter__(self): raise TypeError( "{} object is not iterable.".format(self.__class__.__name__)) def __bool__(self): raise TypeError( "Using a `{}` object as a Python `bool` is not allowed. " "Use `if t is not None:` instead of `if t:` to test if a " "tensor is defined, and use TensorFlow ops such as " "tf.cond to execute subgraphs conditioned on the value of " "a tensor.".format(self.__class__.__name__) ) def __nonzero__(self): raise TypeError( "Using a `{}` object as a Python `bool` is not allowed. " "Use `if t is not None:` instead of `if t:` to test if a " "tensor is defined, and use TensorFlow ops such as " "tf.cond to execute subgraphs conditioned on the value of " "a tensor.".format(self.__class__.__name__) )
MIT License
spcl/dace
dace/symbolic.py
pystr_to_symbolic
python
def pystr_to_symbolic(expr, symbol_map=None, simplify=None): from dace.frontend.python.astutils import unparse if isinstance(expr, (SymExpr, sympy.Basic)): return expr if isinstance(expr, str) and dtypes.validate_name(expr): return symbol(expr) symbol_map = symbol_map or {} locals = { 'min': sympy.Min, 'max': sympy.Max, 'True': sympy.true, 'False': sympy.false, 'GtE': sympy.Ge, 'LtE': sympy.Le, 'NotEq': sympy.Ne, 'And': sympy.Function('AND'), 'Or': sympy.Function('OR'), 'var': sympy.Symbol('var'), 'root': sympy.Symbol('root'), } locals.update(_sympy_clash) if isinstance(expr, str) and re.search( r'\bnot\b|\band\b|\bor\b|\bNone\b|==|!=', expr): expr = unparse(SympyBooleanConverter().visit(ast.parse(expr).body[0])) try: return sympy_to_dace(sympy.sympify(expr, locals, evaluate=simplify), symbol_map) except TypeError: expr = expr.replace('[', '(') expr = expr.replace(']', ')') return sympy_to_dace(sympy.sympify(expr, locals, evaluate=simplify), symbol_map)
Takes a Python string and converts it into a symbolic expression.
https://github.com/spcl/dace/blob/4c6695daaa43df22548b987024d1b681e92c7983/dace/symbolic.py#L779-L822
import ast from functools import lru_cache import sympy import pickle import re from typing import Any, Callable, Dict, Optional, Set, Tuple, Union import warnings import numpy import sympy.abc import sympy.printing.str from dace import dtypes DEFAULT_SYMBOL_TYPE = dtypes.int32 _sympy_clash = {k: v if v else getattr(sympy.abc, k) for k, v in sympy.abc._clash.items()} class symbol(sympy.Symbol): s_currentsymbol = 0 def __new__(cls, name=None, dtype=DEFAULT_SYMBOL_TYPE, **assumptions): if name is None: name = "sym_" + str(symbol.s_currentsymbol) symbol.s_currentsymbol += 1 elif name.startswith('__DACE'): raise NameError('Symbols cannot start with __DACE') elif not dtypes.validate_name(name): raise NameError('Invalid symbol name "%s"' % name) if not isinstance(dtype, dtypes.typeclass): raise TypeError('dtype must be a DaCe type, got %s' % str(dtype)) dkeys = [k for k, v in dtypes.DTYPE_TO_TYPECLASS.items() if v == dtype] is_integer = [ issubclass(k, int) or issubclass(k, numpy.integer) for k in dkeys ] if 'integer' in assumptions or not numpy.any(is_integer): self = sympy.Symbol.__xnew__(cls, name, **assumptions) else: self = sympy.Symbol.__xnew__(cls, name, integer=True, **assumptions) self.dtype = dtype self._constraints = [] self.value = None return self def set(self, value): warnings.warn('symbol.set is deprecated, use keyword arguments', DeprecationWarning) if value is not None: self.check_constraints(value) self.value = self.dtype(value) def __getstate__(self): return dict( self.assumptions0, **{ 'value': self.value, 'dtype': self.dtype, '_constraints': self._constraints }) def is_initialized(self): return self.value is not None def get(self): warnings.warn('symbol.get is deprecated, use keyword arguments', DeprecationWarning) if self.value is None: raise UnboundLocalError('Uninitialized symbol value for \'' + self.name + '\'') return self.value def set_constraints(self, constraint_list): try: iter(constraint_list) self._constraints = constraint_list except TypeError: self._constraints = [constraint_list] if symbol.s_values[self.name] is not None: try: self.check_constraints(symbol.s_values[self.name]) except RuntimeError: self.reset() raise def add_constraints(self, constraint_list): try: iter(constraint_list) symbol.s_constraints[self.name].extend(constraint_list) except TypeError: symbol.s_constraints[self.name].append(constraint_list) if symbol.s_values[self.name] is not None: try: self.check_constraints(symbol.s_values[self.name]) except RuntimeError: self.reset() raise @property def constraints(self): return self._constraints def check_constraints(self, value): fail = None for constraint in self.constraints: try: eval_cons = constraint.subs({self: value}) if not eval_cons: fail = constraint break except (AttributeError, TypeError, ValueError): raise RuntimeError( 'Cannot validate constraint %s for symbol %s' % (str(constraint), self.name)) if fail is not None: raise RuntimeError( 'Value %s invalidates constraint %s for symbol %s' % (str(value), str(fail), self.name)) def get_or_return(self, uninitialized_ret): return self.value or uninitialized_ret class SymExpr(object): def __init__(self, main_expr: Union[str, 'SymExpr'], approx_expr: Optional[Union[str, 'SymExpr']] = None): self._main_expr = pystr_to_symbolic(main_expr) if approx_expr is None: self._approx_expr = self._main_expr else: self._approx_expr = pystr_to_symbolic(approx_expr) def __new__(cls, *args, **kwargs): main_expr, approx_expr = None, None if len(args) == 0: if 'main_expr' in kwargs: main_expr = kwargs['main_expr'] if 'approx_expr' in kwargs: approx_expr = kwargs['approx_expr'] if len(args) == 1: main_expr = args[0] if 'approx_expr' in kwargs: approx_expr = kwargs['approx_expr'] if len(args) == 2: main_expr, approx_expr = args if main_expr and (approx_expr is None or main_expr == approx_expr): if isinstance(main_expr, str): return pystr_to_symbolic(main_expr) return main_expr return super(SymExpr, cls).__new__(cls) @property def expr(self): return self._main_expr @property def approx(self): return self._approx_expr def subs(self, repldict): return SymExpr(self._main_expr.subs(repldict), self._approx_expr.subs(repldict)) def match(self, *args, **kwargs): return self._main_expr.match(*args, **kwargs) def __hash__(self): return hash((self.expr, self.approx)) def __str__(self): if self.expr != self.approx: return str(self.expr) + " (~" + str(self.approx) + ")" else: return str(self.expr) def __add__(self, other): if isinstance(other, SymExpr): return SymExpr(self.expr + other.expr, self.approx + other.approx) if isinstance(other, sympy.Expr): return SymExpr(self.expr + other, self.approx + other) return self + pystr_to_symbolic(other) __radd__ = __add__ def __sub__(self, other): if isinstance(other, SymExpr): return SymExpr(self.expr - other.expr, self.approx - other.approx) if isinstance(other, sympy.Expr): return SymExpr(self.expr - other, self.approx - other) return self - pystr_to_symbolic(other) def __rsub__(self, other): if isinstance(other, SymExpr): return SymExpr(other.expr - self.expr, other.approx - self.approx) if isinstance(other, sympy.Expr): return SymExpr(other - self.expr, other - self.approx) return pystr_to_symbolic(other) - self def __mul__(self, other): if isinstance(other, SymExpr): return SymExpr(self.expr * other.expr, self.approx * other.approx) if isinstance(other, sympy.Expr): return SymExpr(self.expr * other, self.approx * other) return self * pystr_to_symbolic(other) __rmul__ = __mul__ def __div__(self, other): if isinstance(other, SymExpr): return SymExpr(self.expr / other.expr, self.approx / other.approx) if isinstance(other, sympy.Expr): return SymExpr(self.expr / other, self.approx / other) return self / pystr_to_symbolic(other) __truediv__ = __div__ def __floordiv__(self, other): if isinstance(other, SymExpr): return SymExpr(self.expr // other.expr, self.approx // other.approx) if isinstance(other, sympy.Expr): return SymExpr(self.expr // other, self.approx // other) return self // pystr_to_symbolic(other) def __mod__(self, other): if isinstance(other, SymExpr): return SymExpr(self.expr % other.expr, self.approx % other.approx) if isinstance(other, sympy.Expr): return SymExpr(self.expr % other, self.approx % other) return self % pystr_to_symbolic(other) def __pow__(self, other): if isinstance(other, SymExpr): return SymExpr(self.expr**other.expr, self.approx**other.approx) if isinstance(other, sympy.Expr): return SymExpr(self.expr**other, self.approx**other) return self**pystr_to_symbolic(other) def __eq__(self, other): if isinstance(other, sympy.Expr): return self.expr == other if isinstance(other, SymExpr): return self.expr == other.expr and self.approx == other.approx return self == pystr_to_symbolic(other) def __lt__(self, other): if isinstance(other, sympy.Expr): return self.expr < other if isinstance(other, SymExpr): return self.expr < other.expr return self < pystr_to_symbolic(other) def __gt__(self, other): if isinstance(other, sympy.Expr): return self.expr > other if isinstance(other, SymExpr): return self.expr > other.expr return self > pystr_to_symbolic(other) SymbolicType = Union[sympy.Basic, SymExpr] def symvalue(val): if isinstance(val, symbol): return val.get() return val def _checkEqualIvo(lst): return not lst or lst.count(lst[0]) == len(lst) def symtype(expr): stypes = [s.dtype for s in symlist(expr).values()] if len(stypes) == 0: return DEFAULT_SYMBOL_TYPE elif _checkEqualIvo(stypes): return stypes[0] else: raise TypeError( 'Cannot infer symbolic type from expression "%s"' ' with symbols [%s]' % (str(expr), ', '.join( [str(s) + ": " + str(s.dtype) for s in symlist(expr)]))) def symlist(values): result = {} try: values = iter(values) except TypeError: values = [values] for expr in values: if isinstance(expr, SymExpr): true_expr = expr.expr elif isinstance(expr, sympy.Basic): true_expr = expr else: continue for atom in true_expr.atoms(): if isinstance(atom, symbol): result[atom.name] = atom return result def evaluate(expr: Union[sympy.Basic, int, float], symbols: Dict[Union[symbol, str], Union[int, float]]) -> Union[int, float, numpy.number]: if isinstance(expr, SymExpr): return evaluate(expr.expr, symbols) if issymbolic(expr, set(map(str, symbols.keys()))): raise TypeError('Expression cannot be evaluated to a constant') if isinstance(expr, (int, float, numpy.number)): return expr syms = {(sname if isinstance(sname, sympy.Symbol) else symbol(sname)): sval.get() if isinstance(sval, symbol) else sval for sname, sval in symbols.items()} return expr.subs(syms) def issymbolic(value, constants=None): constants = constants or {} if isinstance(value, SymExpr): return issymbolic(value.expr) if isinstance(value, symbol) and value.name not in constants: return True if isinstance(value, sympy.Basic): for atom in value.atoms(): if isinstance(atom, symbol) and atom.name not in constants: return True return False def overapproximate(expr): if isinstance(expr, SymExpr): if expr.expr != expr.approx: return expr.approx else: return overapproximate(expr.expr) if not isinstance(expr, sympy.Basic): return expr a = sympy.Wild('a') b = sympy.Wild('b') c = sympy.Wild('c') match = expr.match(sympy.Min(a, b) + c) if match is not None and len(match) == 3: newexpr = sympy.Min(match[a] + match[c], match[b] + match[c]) match = newexpr.match(sympy.Min(a, b)) if match is not None and len(match) == 2: if issymbolic(match[a]) and not issymbolic(match[b]): return match[b] if issymbolic(match[b]) and not issymbolic(match[a]): return match[a] a = sympy.Wild('a', properties=[lambda k: k.is_Symbol or k.is_Integer]) b = sympy.Wild('b', properties=[lambda k: k.is_Symbol or k.is_Integer]) int_floor = sympy.Function('int_floor') match = expr.match(sympy.ceiling(b * int_floor(a - 1, b)) + b) if match is not None and len(match) == 2: return match[a] return expr def resolve_symbol_to_constant(symb, start_sdfg): if not issymbolic(symb): return symb else: sdfg = start_sdfg while sdfg is not None: if not issymbolic(symb, sdfg.constants): return evaluate(symb, sdfg.constants) else: sdfg = sdfg.parent_sdfg return None def symbols_in_ast(tree): to_visit = list(tree.__dict__.items()) symbols = [] while len(to_visit) > 0: (key, val) = to_visit.pop() if key == "func": continue if isinstance(val, ast.Name): symbols.append(val.id) continue if isinstance(val, ast.expr): to_visit += list(val.__dict__.items()) if isinstance(val, list): to_visit += [(key, v) for v in val] return dtypes.deduplicate(symbols) def symbol_name_or_value(val): if isinstance(val, symbol): return val.name return str(val) def sympy_to_dace(exprs, symbol_map=None): repl = {} symbol_map = symbol_map or {} oneelem = False try: iter(exprs) except TypeError: oneelem = True exprs = [exprs] exprs = list(exprs) for i, expr in enumerate(exprs): if isinstance(expr, sympy.Basic): for atom in expr.atoms(): if isinstance(atom, sympy.Symbol): try: repl[atom] = symbol_map[atom.name] except KeyError: repl[atom] = symbol(atom.name, **atom.assumptions0) exprs[i] = expr.subs(repl) if oneelem: return exprs[0] return exprs def is_sympy_userfunction(expr): try: return issubclass(type(type(expr)), sympy.core.function.UndefinedFunction) except AttributeError: return issubclass(type(type(expr)), sympy.function.UndefinedFunction) def swalk(expr, enter_functions=False): yield expr for arg in expr.args: if not enter_functions and is_sympy_userfunction(arg): yield arg continue yield from swalk(arg) _builtin_userfunctions = { 'int_floor', 'int_ceil', 'min', 'Min', 'max', 'Max', 'not', 'Not', 'Eq', 'NotEq', 'Ne', 'AND', 'OR' } def contains_sympy_functions(expr): if is_sympy_userfunction(expr): if str(expr.func) in _builtin_userfunctions: return False return True if not isinstance(expr, sympy.Basic): return False for arg in expr.args: if contains_sympy_functions(arg): return True return False def free_symbols_and_functions(expr: Union[SymbolicType, str]) -> Set[str]: if isinstance(expr, str): if dtypes.validate_name(expr): return {expr} expr = pystr_to_symbolic(expr) if not isinstance(expr, sympy.Basic): return set() result = {str(k) for k in expr.free_symbols} for atom in swalk(expr): if (is_sympy_userfunction(atom) and str(atom.func) not in _builtin_userfunctions): result.add(str(atom.func)) return result def sympy_numeric_fix(expr): if not isinstance(expr, sympy.Basic) or isinstance(expr, sympy.Number): try: if numpy.int64(expr) == expr: return int(expr) except OverflowError: try: if numpy.float64(expr) == expr: return expr except OverflowError: if expr > 0: return sympy.oo else: return -sympy.oo return expr def sympy_intdiv_fix(expr): nexpr = expr if not isinstance(expr, sympy.Basic): return expr a = sympy.Wild('a', properties=[lambda k: k.is_Symbol or k.is_Integer]) b = sympy.Wild('b', properties=[lambda k: k.is_Symbol or k.is_Integer]) c = sympy.Wild('c') d = sympy.Wild('d') e = sympy.Wild('e', properties=[ lambda k: isinstance(k, sympy.Basic) and not isinstance( k, sympy.Atom) ]) int_ceil = sympy.Function('int_ceil') int_floor = sympy.Function('int_floor') processed = 1 while processed > 0: processed = 0 for ceil in nexpr.find(sympy.ceiling): m = ceil.match(sympy.ceiling(a / b)) if m is not None: nexpr = nexpr.subs(ceil, int_ceil(m[a], m[b])) processed += 1 continue m = ceil.match(sympy.ceiling(int_ceil(c, d) / b)) if m is not None: nexpr = nexpr.subs(ceil, int_ceil(int_ceil(m[c], m[d]), m[b])) processed += 1 continue m = ceil.match(sympy.ceiling(a / int_ceil(c, d))) if m is not None: nexpr = nexpr.subs(ceil, int_ceil(m[a], int_ceil(m[c], m[d]))) processed += 1 continue m = ceil.match(sympy.ceiling(a * int_floor(c, d))) if m is not None: nexpr = nexpr.subs(ceil, m[a] * int_floor(m[c], m[d])) processed += 1 continue m = ceil.match(sympy.ceiling(a * int_ceil(c, d))) if m is not None: nexpr = nexpr.subs(ceil, m[a] * int_ceil(m[c], m[d])) processed += 1 continue m = ceil.match(sympy.ceiling(e / b)) if m is not None: nexpr = nexpr.subs(ceil, int_ceil(m[e], m[b])) processed += 1 continue for floor in nexpr.find(sympy.floor): m = floor.match(sympy.floor(a / b)) if m is not None: nexpr = nexpr.subs(floor, int_floor(m[a], m[b])) processed += 1 continue m = floor.match(sympy.floor(int_floor(c, d) / b)) if m is not None: nexpr = nexpr.subs(floor, int_floor(int_floor(m[c], m[d]), m[b])) processed += 1 continue m = floor.match(sympy.floor(a / int_floor(c, d))) if m is not None: nexpr = nexpr.subs(floor, int_floor(m[a], int_floor(m[c], m[d]))) processed += 1 continue m = floor.match(sympy.floor(e / b)) if m is not None: nexpr = nexpr.subs(floor, int_floor(m[e], m[b])) processed += 1 continue return nexpr def sympy_divide_fix(expr): nexpr = expr if not isinstance(expr, sympy.Basic): return expr int_floor = sympy.Function('int_floor') processed = 1 while processed > 0: processed = 0 for candidate in nexpr.find(sympy.Mul): for i, arg in enumerate(candidate.args): if isinstance(arg, sympy.Number) and abs(arg) >= 1: continue if isinstance(arg, sympy.Number) and (1 / arg) == int(1 / arg): ri = i break else: continue nexpr = nexpr.subs( candidate, int_floor( sympy.Mul(*(candidate.args[:ri] + candidate.args[ri + 1:])), int(1 / candidate.args[ri]))) processed += 1 return nexpr def simplify_ext(expr): if not isinstance(expr, sympy.Basic): return expr a = sympy.Wild('a') b = sympy.Wild('b') c = sympy.Wild('c') dic = expr.match(sympy.Min(a, b) + c) if dic: return sympy.Min(dic[a] + dic[c], dic[b] + dic[c]) dic = expr.match(sympy.Max(a, b) + c) if dic: return sympy.Max(dic[a] + dic[c], dic[b] + dic[c]) return expr class SympyBooleanConverter(ast.NodeTransformer): _ast_to_sympy_comparators = { ast.Eq: 'Eq', ast.Gt: 'Gt', ast.GtE: 'Ge', ast.Lt: 'Lt', ast.LtE: 'Le', ast.NotEq: 'Ne', ast.In: 'In', ast.Is: 'Is', ast.IsNot: 'IsNot', ast.NotIn: 'NotIn', } def visit_UnaryOp(self, node): if isinstance(node.op, ast.Not): func_node = ast.copy_location( ast.Name(id=type(node.op).__name__, ctx=ast.Load()), node) new_node = ast.Call(func=func_node, args=[self.visit(node.operand)], keywords=[]) return ast.copy_location(new_node, node) return node def visit_BoolOp(self, node): func_node = ast.copy_location( ast.Name(id=type(node.op).__name__, ctx=ast.Load()), node) new_node = ast.Call(func=func_node, args=[self.visit(value) for value in node.values], keywords=[]) return ast.copy_location(new_node, node) def visit_Compare(self, node: ast.Compare): if len(node.ops) > 1 or len(node.comparators) > 1: raise NotImplementedError op = node.ops[0] arguments = [node.left, node.comparators[0]] func_node = ast.copy_location( ast.Name( id=SympyBooleanConverter._ast_to_sympy_comparators[type(op)], ctx=ast.Load()), node) new_node = ast.Call(func=func_node, args=[self.visit(arg) for arg in arguments], keywords=[]) return ast.copy_location(new_node, node) def visit_Constant(self, node): if node.value is None: return ast.copy_location(ast.Name(id='NoneSymbol', ctx=ast.Load()), node) return self.generic_visit(node) def visit_NameConstant(self, node): return self.visit_Constant(node) @lru_cache(2048)
BSD 3-Clause New or Revised License
purestorage-openconnect/py-pure-client
pypureclient/flasharray/FA_2_10/api/ports_api.py
PortsApi.api210_ports_get_with_http_info
python
def api210_ports_get_with_http_info( self, authorization=None, x_request_id=None, filter=None, limit=None, names=None, offset=None, sort=None, total_item_count=None, async_req=False, _return_http_data_only=False, _preload_content=True, _request_timeout=None, ): if names is not None: if not isinstance(names, list): names = [names] if sort is not None: if not isinstance(sort, list): sort = [sort] params = {k: v for k, v in six.iteritems(locals()) if v is not None} if params.get('filter'): params['filter'] = str(params['filter']) if params.get('sort'): params['sort'] = [str(_x) for _x in params['sort']] if 'limit' in params and params['limit'] < 1: raise ValueError("Invalid value for parameter `limit` when calling `api210_ports_get`, must be a value greater than or equal to `1`") if 'offset' in params and params['offset'] < 0: raise ValueError("Invalid value for parameter `offset` when calling `api210_ports_get`, must be a value greater than or equal to `0`") collection_formats = {} path_params = {} query_params = [] if 'filter' in params: query_params.append(('filter', params['filter'])) if 'limit' in params: query_params.append(('limit', params['limit'])) if 'names' in params: query_params.append(('names', params['names'])) collection_formats['names'] = 'csv' if 'offset' in params: query_params.append(('offset', params['offset'])) if 'sort' in params: query_params.append(('sort', params['sort'])) collection_formats['sort'] = 'csv' if 'total_item_count' in params: query_params.append(('total_item_count', params['total_item_count'])) header_params = {} if 'authorization' in params: header_params['Authorization'] = params['authorization'] if 'x_request_id' in params: header_params['X-Request-ID'] = params['x_request_id'] form_params = [] local_var_files = {} body_params = None header_params['Accept'] = self.api_client.select_header_accept( ['application/json']) header_params['Content-Type'] = self.api_client.select_header_content_type( ['application/json']) auth_settings = [] return self.api_client.call_api( '/api/2.10/ports', 'GET', path_params, query_params, header_params, body=body_params, post_params=form_params, files=local_var_files, response_type='PortGetResponse', auth_settings=auth_settings, async_req=async_req, _return_http_data_only=_return_http_data_only, _preload_content=_preload_content, _request_timeout=_request_timeout, collection_formats=collection_formats, )
List ports Displays host name, iSCSI Qualified Names (IQNs), NVMe Qualified Names (NQNs), IPv4 address of the portal, Fibre Channel World Wide Names (WWNs), and failover ports, including those that were discovered by Purity//FA and those that have been manually assigned by system administrators. This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.api210_ports_get_with_http_info(async_req=True) >>> result = thread.get() :param str authorization: Access token (in JWT format) required to use any API endpoint (except `/oauth2`, `/login`, and `/logout`) :param str x_request_id: Supplied by client during request or generated by server. :param str filter: Narrows down the results to only the response objects that satisfy the filter criteria. :param int limit: Limits the size of the response to the specified number of objects on each page. To return the total number of resources, set `limit=0`. The total number of resources is returned as a `total_item_count` value. If the page size requested is larger than the system maximum limit, the server returns the maximum limit, disregarding the requested page size. :param list[str] names: Performs the operation on the unique name specified. Enter multiple names in comma-separated format. For example, `name01,name02`. :param int offset: The starting position based on the results of the query in relation to the full set of response objects returned. :param list[str] sort: Returns the response objects in the order specified. Set `sort` to the name in the response by which to sort. Sorting can be performed on any of the names in the response, and the objects can be sorted in ascending or descending order. By default, the response objects are sorted in ascending order. To sort in descending order, append the minus sign (`-`) to the name. A single request can be sorted on multiple objects. For example, you can sort all volumes from largest to smallest volume size, and then sort volumes of the same size in ascending order by volume name. To sort on multiple names, list the names as comma-separated values. :param bool total_item_count: If set to `true`, the `total_item_count` matching the specified query parameters is calculated and returned in the response. If set to `false`, the `total_item_count` is `null` in the response. This may speed up queries where the `total_item_count` is large. If not specified, defaults to `false`. :param bool async_req: Request runs in separate thread and method returns multiprocessing.pool.ApplyResult. :param bool _return_http_data_only: Returns only data field. :param bool _preload_content: Response is converted into objects. :param int _request_timeout: Total request timeout in seconds. It can also be a tuple of (connection time, read time) timeouts. :return: PortGetResponse If the method is called asynchronously, returns the request thread.
https://github.com/purestorage-openconnect/py-pure-client/blob/2d9fdef0b73321cea9613e7d1eb881b42845099b/pypureclient/flasharray/FA_2_10/api/ports_api.py#L29-L143
from __future__ import absolute_import import re import six from typing import List, Optional from .. import models class PortsApi(object): def __init__(self, api_client): self.api_client = api_client
BSD 2-Clause Simplified License
googlecloudplatform/perfkitbenchmarker
perfkitbenchmarker/providers/azure/azure_virtual_machine.py
AzureVirtualMachine._Suspend
python
def _Suspend(self): raise NotImplementedError()
Suspends the VM.
https://github.com/googlecloudplatform/perfkitbenchmarker/blob/c14a122016d414351d41167029c79c9a19709384/perfkitbenchmarker/providers/azure/azure_virtual_machine.py#L671-L673
import abc import collections import itertools import json import logging import posixpath import re import threading from absl import flags from perfkitbenchmarker import custom_virtual_machine_spec from perfkitbenchmarker import disk from perfkitbenchmarker import errors from perfkitbenchmarker import linux_virtual_machine from perfkitbenchmarker import placement_group from perfkitbenchmarker import resource from perfkitbenchmarker import virtual_machine from perfkitbenchmarker import vm_util from perfkitbenchmarker import windows_virtual_machine from perfkitbenchmarker.configs import option_decoders from perfkitbenchmarker.providers import azure from perfkitbenchmarker.providers.azure import azure_disk from perfkitbenchmarker.providers.azure import azure_network from perfkitbenchmarker.providers.azure import util from six.moves import range import yaml FLAGS = flags.FLAGS NUM_LOCAL_VOLUMES = { 'Standard_L8s_v2': 1, 'Standard_L16s_v2': 2, 'Standard_L32s_v2': 4, 'Standard_L64s_v2': 8, 'Standard_L80s_v2': 10 } _SCHEDULED_EVENTS_CMD = ('curl -H Metadata:true http://169.254.169.254/metadata' '/scheduledevents?api-version=2019-01-01') _SCHEDULED_EVENTS_CMD_WIN = ('Invoke-RestMethod -Headers @{"Metadata"="true"} ' '-Uri http://169.254.169.254/metadata/' 'scheduledevents?api-version=2019-01-01 | ' 'ConvertTo-Json') class AzureVmSpec(virtual_machine.BaseVmSpec): CLOUD = azure.CLOUD def __init__(self, *args, **kwargs): super(AzureVmSpec, self).__init__(*args, **kwargs) if isinstance(self.machine_type, custom_virtual_machine_spec.AzurePerformanceTierDecoder): self.tier = self.machine_type.tier self.compute_units = self.machine_type.compute_units self.machine_type = None else: self.tier = None self.compute_units = None @classmethod def _ApplyFlags(cls, config_values, flag_values): super(AzureVmSpec, cls)._ApplyFlags(config_values, flag_values) if flag_values['machine_type'].present: config_values['machine_type'] = yaml.safe_load(flag_values.machine_type) if flag_values['azure_accelerated_networking'].present: config_values['accelerated_networking'] = ( flag_values.azure_accelerated_networking) if flag_values['azure_low_priority_vms'].present: config_values['low_priority'] = flag_values.azure_low_priority_vms @classmethod def _GetOptionDecoderConstructions(cls): result = super(AzureVmSpec, cls)._GetOptionDecoderConstructions() result.update({ 'machine_type': (custom_virtual_machine_spec.AzureMachineTypeDecoder, {}), 'accelerated_networking': (option_decoders.BooleanDecoder, { 'default': False }), 'boot_disk_size': (option_decoders.IntDecoder, { 'default': None }), 'boot_disk_type': (option_decoders.StringDecoder, { 'default': None }), 'low_priority': (option_decoders.BooleanDecoder, { 'default': False }), }) return result class AzurePublicIPAddress(resource.BaseResource): def __init__(self, region, availability_zone, name, dns_name=None): super(AzurePublicIPAddress, self).__init__() self.region = region self.availability_zone = availability_zone self.name = name self._deleted = False self.resource_group = azure_network.GetResourceGroup() self.dns_name = dns_name def _Create(self): cmd = [ azure.AZURE_PATH, 'network', 'public-ip', 'create', '--location', self.region, '--name', self.name ] + self.resource_group.args if self.availability_zone: cmd += ['--zone', self.availability_zone, '--sku', 'Standard'] if self.dns_name: cmd += ['--dns-name', self.dns_name] _, stderr, retcode = vm_util.IssueCommand(cmd, raise_on_failure=False) if retcode and re.search(r'Cannot create more than \d+ public IP addresses', stderr): raise errors.Benchmarks.QuotaFailure( virtual_machine.QUOTA_EXCEEDED_MESSAGE + stderr) def _Exists(self): if self._deleted: return False stdout, _, _ = vm_util.IssueCommand( [ azure.AZURE_PATH, 'network', 'public-ip', 'show', '--output', 'json', '--name', self.name ] + self.resource_group.args, raise_on_failure=False) try: json.loads(stdout) return True except ValueError: return False def GetIPAddress(self): stdout, _ = vm_util.IssueRetryableCommand([ azure.AZURE_PATH, 'network', 'public-ip', 'show', '--output', 'json', '--name', self.name ] + self.resource_group.args) response = json.loads(stdout) return response['ipAddress'] def _Delete(self): self._deleted = True class AzureNIC(resource.BaseResource): def __init__(self, subnet, name, public_ip, accelerated_networking, network_security_group=None, private_ip=None): super(AzureNIC, self).__init__() self.subnet = subnet self.name = name self.public_ip = public_ip self.private_ip = private_ip self._deleted = False self.resource_group = azure_network.GetResourceGroup() self.region = self.subnet.vnet.region self.args = ['--nics', self.name] self.accelerated_networking = accelerated_networking self.network_security_group = network_security_group def _Create(self): cmd = [ azure.AZURE_PATH, 'network', 'nic', 'create', '--location', self.region, '--vnet-name', self.subnet.vnet.name, '--subnet', self.subnet.name, '--public-ip-address', self.public_ip, '--name', self.name ] + self.resource_group.args if self.private_ip: cmd += ['--private-ip-address', self.private_ip] if self.accelerated_networking: cmd += ['--accelerated-networking', 'true'] if self.network_security_group: cmd += ['--network-security-group', self.network_security_group.name] vm_util.IssueCommand(cmd) def _Exists(self): if self._deleted: return False stdout, _, _ = vm_util.IssueCommand( [ azure.AZURE_PATH, 'network', 'nic', 'show', '--output', 'json', '--name', self.name ] + self.resource_group.args, raise_on_failure=False) try: json.loads(stdout) return True except ValueError: return False def GetInternalIP(self): stdout, _ = vm_util.IssueRetryableCommand([ azure.AZURE_PATH, 'network', 'nic', 'show', '--output', 'json', '--name', self.name ] + self.resource_group.args) response = json.loads(stdout) return response['ipConfigurations'][0]['privateIpAddress'] def _Delete(self): self._deleted = True class AzureDedicatedHostGroup(resource.BaseResource): def __init__(self, name, region, resource_group, availability_zone): super(AzureDedicatedHostGroup, self).__init__() self.name = name + 'Group' self.region = region self.resource_group = resource_group self.availability_zone = availability_zone def _Create(self): create_cmd = ([ azure.AZURE_PATH, 'vm', 'host', 'group', 'create', '--name', self.name, '--location', self.region, '--platform-fault-domain-count', '1', ] + self.resource_group.args) if self.availability_zone: create_cmd.extend(['--zone', self.availability_zone]) vm_util.IssueCommand(create_cmd) def _Delete(self): delete_cmd = ([ azure.AZURE_PATH, 'vm', 'host', 'group', 'delete', '--host-group', self.name, ] + self.resource_group.args) vm_util.IssueCommand(delete_cmd) def _Exists(self): show_cmd = [ azure.AZURE_PATH, 'vm', 'host', 'group', 'show', '--output', 'json', '--name', self.name ] + self.resource_group.args stdout, _, _ = vm_util.IssueCommand(show_cmd, raise_on_failure=False) try: json.loads(stdout) return True except ValueError: return False def _GetSkuType(machine_type): sku = '' if re.match('Standard_D[0-9]*s_v3', machine_type): sku = 'DSv3-Type1' elif re.match('Standard_E[0-9]*s_v3', machine_type): sku = 'ESv3-Type1' else: raise ValueError('Dedicated hosting does not support machine type %s.' % machine_type) return sku class AzureDedicatedHost(resource.BaseResource): _lock = threading.Lock() host_group_map = {} def __init__(self, name, region, resource_group, sku_type, availability_zone): super(AzureDedicatedHost, self).__init__() self.name = name + '-Host' self.region = region self.resource_group = resource_group self.sku_type = sku_type self.availability_zone = availability_zone self.host_group = None self.fill_fraction = 0.0 def _CreateDependencies(self): with self._lock: if self.region not in self.host_group_map: new_host_group = AzureDedicatedHostGroup(self.name, self.region, self.resource_group, self.availability_zone) new_host_group.Create() self.host_group_map[self.region] = new_host_group.name self.host_group = self.host_group_map[self.region] def _Create(self): create_cmd = ([ azure.AZURE_PATH, 'vm', 'host', 'create', '--host-group', self.host_group, '--name', self.name, '--sku', self.sku_type, '--location', self.region, '--platform-fault-domain', '0', ] + self.resource_group.args) vm_util.IssueCommand(create_cmd) def _Delete(self): delete_cmd = ([ azure.AZURE_PATH, 'vm', 'host', 'delete', '--host-group', self.host_group, '--name', self.name, '--yes', ] + self.resource_group.args) vm_util.IssueCommand(delete_cmd) def _Exists(self): show_cmd = [ azure.AZURE_PATH, 'vm', 'host', 'show', '--output', 'json', '--name', self.name, '--host-group', self.host_group, ] + self.resource_group.args stdout, _, _ = vm_util.IssueCommand(show_cmd, raise_on_failure=False) try: json.loads(stdout) return True except ValueError: return False class AzureVirtualMachine(virtual_machine.BaseVirtualMachine): CLOUD = azure.CLOUD _lock = threading.Lock() host_map = collections.defaultdict(list) def __init__(self, vm_spec): super(AzureVirtualMachine, self).__init__(vm_spec) self.region = util.GetRegionFromZone(self.zone) self.availability_zone = util.GetAvailabilityZoneFromZone(self.zone) self.use_dedicated_host = vm_spec.use_dedicated_host self.num_vms_per_host = vm_spec.num_vms_per_host self.network = azure_network.AzureNetwork.GetNetwork(self) self.firewall = azure_network.AzureFirewall.GetFirewall() self.max_local_disks = NUM_LOCAL_VOLUMES.get(self.machine_type) or 1 self._lun_counter = itertools.count() self._deleted = False self.resource_group = azure_network.GetResourceGroup() self.public_ip = AzurePublicIPAddress(self.region, self.availability_zone, self.name + '-public-ip') self.nic = AzureNIC(self.network.subnet, self.name + '-nic', self.public_ip.name, vm_spec.accelerated_networking, self.network.nsg) self.storage_account = self.network.storage_account self.image = vm_spec.image or type(self).IMAGE_URN self.host = None if self.use_dedicated_host: self.host_series_sku = _GetSkuType(self.machine_type) self.host_list = None self.low_priority = vm_spec.low_priority self.low_priority_status_code = None self.spot_early_termination = False self.ultra_ssd_enabled = False disk_spec = disk.BaseDiskSpec('azure_os_disk') disk_spec.disk_type = ( vm_spec.boot_disk_type or self.storage_account.storage_type) if vm_spec.boot_disk_size: disk_spec.disk_size = vm_spec.boot_disk_size self.os_disk = azure_disk.AzureDisk( disk_spec, self, None, is_image=True) @property @classmethod @abc.abstractmethod def IMAGE_URN(cls): raise NotImplementedError() def _CreateDependencies(self): self.public_ip.Create() self.nic.Create() if self.use_dedicated_host: with self._lock: self.host_list = self.host_map[(self.host_series_sku, self.region)] if (not self.host_list or (self.num_vms_per_host and self.host_list[-1].fill_fraction + 1.0 / self.num_vms_per_host > 1.0)): new_host = AzureDedicatedHost(self.name, self.region, self.resource_group, self.host_series_sku, self.availability_zone) self.host_list.append(new_host) new_host.Create() self.host = self.host_list[-1] if self.num_vms_per_host: self.host.fill_fraction += 1.0 / self.num_vms_per_host def _RequiresUltraDisk(self): return any(disk_spec.disk_type == azure_disk.ULTRA_STORAGE for disk_spec in self.disk_specs) def _Create(self): if self.os_disk.disk_size: disk_size_args = ['--os-disk-size-gb', str(self.os_disk.disk_size)] else: disk_size_args = [] tags = {} tags.update(self.vm_metadata) tags.update(util.GetResourceTags(self.resource_group.timeout_minutes)) tag_args = ['--tags'] + util.FormatTags(tags) create_cmd = ([ azure.AZURE_PATH, 'vm', 'create', '--location', self.region, '--image', self.image, '--size', self.machine_type, '--admin-username', self.user_name, '--storage-sku', self.os_disk.disk_type, '--name', self.name ] + disk_size_args + self.resource_group.args + self.nic.args + tag_args) if self._RequiresUltraDisk(): self.ultra_ssd_enabled = True create_cmd.extend(['--ultra-ssd-enabled']) if self.availability_zone: create_cmd.extend(['--zone', self.availability_zone]) if self.use_dedicated_host: create_cmd.extend( ['--host-group', self.host.host_group, '--host', self.host.name]) num_hosts = len(self.host_list) if self.network.placement_group: create_cmd.extend(self.network.placement_group.AddVmArgs()) if self.low_priority: create_cmd.extend(['--priority', 'Spot']) if self.password: create_cmd.extend(['--admin-password', self.password]) else: create_cmd.extend(['--ssh-key-value', self.ssh_public_key]) azure_vm_create_timeout = 1800 _, stderr, retcode = vm_util.IssueCommand( create_cmd, timeout=azure_vm_create_timeout, raise_on_failure=False) if retcode: if ('Error Code: QuotaExceeded' in stderr or re.search(r'exceeding approved \S+ \S+ quota', stderr) or 'exceeding quota limit' in stderr): raise errors.Benchmarks.QuotaFailure( virtual_machine.QUOTA_EXCEEDED_MESSAGE + stderr) elif self.low_priority and 'OverconstrainedAllocationRequest' in stderr: raise errors.Benchmarks.InsufficientCapacityCloudFailure(stderr) if (self.use_dedicated_host and retcode and 'AllocationFailed' in stderr): if self.num_vms_per_host: raise errors.Resource.CreationError( 'Failed to create host: %d vms of type %s per host exceeds ' 'memory capacity limits of the host' % (self.num_vms_per_host, self.machine_type)) else: logging.warning( 'Creation failed due to insufficient host capacity. A new host will ' 'be created and instance creation will be retried.') with self._lock: if num_hosts == len(self.host_list): new_host = AzureDedicatedHost(self.name, self.region, self.resource_group, self.host_series_sku, self.availability_zone) self.host_list.append(new_host) new_host.Create() self.host = self.host_list[-1] raise errors.Resource.RetryableCreationError() if (not self.use_dedicated_host and retcode and ('AllocationFailed' in stderr or 'OverconstrainedZonalAllocationRequest' in stderr)): raise errors.Benchmarks.InsufficientCapacityCloudFailure(stderr) if retcode: raise errors.Resource.CreationError( 'Failed to create VM: %s return code: %s' % (stderr, retcode)) def _Exists(self): if self._deleted: return False show_cmd = [ azure.AZURE_PATH, 'vm', 'show', '--output', 'json', '--name', self.name ] + self.resource_group.args stdout, _, _ = vm_util.IssueCommand(show_cmd, raise_on_failure=False) try: json.loads(stdout) return True except ValueError: return False def _Delete(self): self._deleted = True def _Start(self): start_cmd = ([azure.AZURE_PATH, 'vm', 'start', '--name', self.name] + self.resource_group.args) vm_util.IssueCommand(start_cmd) self.ip_address = self.public_ip.GetIPAddress() def _Stop(self): stop_cmd = ([azure.AZURE_PATH, 'vm', 'stop', '--name', self.name] + self.resource_group.args) vm_util.IssueCommand(stop_cmd) deallocate_cmd = ( [azure.AZURE_PATH, 'vm', 'deallocate', '--name', self.name] + self.resource_group.args) vm_util.IssueCommand(deallocate_cmd)
Apache License 2.0
sqlfluff/sqlfluff
src/sqlfluff/cli/commands.py
get_config
python
def get_config(**kwargs) -> FluffConfig: if "dialect" in kwargs: try: dialect_selector(kwargs["dialect"]) except SQLFluffUserError as err: click.echo( colorize( f"Error loading dialect '{kwargs['dialect']}': {str(err)}", color=Color.red, ) ) sys.exit(66) except KeyError: click.echo( colorize( f"Error: Unknown dialect '{kwargs['dialect']}'", color=Color.red ) ) sys.exit(66) overrides = {k: kwargs[k] for k in kwargs if kwargs[k] is not None} try: return FluffConfig.from_root(overrides=overrides) except SQLFluffUserError as err: click.echo( colorize( f"Error loading config: {str(err)}", color=Color.red, ) ) sys.exit(66)
Get a config object from kwargs.
https://github.com/sqlfluff/sqlfluff/blob/ce4e5a344526f7ee61a0950adc079e4d3b5af438/src/sqlfluff/cli/commands.py#L206-L238
import sys import json import logging import time from typing import ( Callable, Tuple, NoReturn, Optional, List, ) import oyaml as yaml import click import pstats from io import StringIO import colorama from sqlfluff.cli.formatters import ( format_rules, format_violation, format_linting_result_header, format_linting_stats, colorize, format_dialect_warning, format_dialects, CallbackFormatter, ) from sqlfluff.cli.helpers import cli_table, get_package_version from sqlfluff.core import ( Linter, FluffConfig, SQLLintError, SQLTemplaterError, SQLFluffUserError, dialect_selector, dialect_readout, TimingSummary, ) from sqlfluff.core.enums import FormatType, Color from sqlfluff.core.linter import ParsedString class RedWarningsFilter(logging.Filter): def filter(self, record: logging.LogRecord) -> bool: if record.levelno >= logging.WARNING: record.msg = f"{colorize(record.msg, Color.red)} " return True def set_logging_level( verbosity: int, logger: Optional[logging.Logger] = None, stderr_output: bool = False ) -> None: fluff_logger = logging.getLogger("sqlfluff") fluff_logger.propagate = False colorama.init() handler = logging.StreamHandler(stream=sys.stderr if stderr_output else sys.stdout) handler.setFormatter(logging.Formatter("\u001b[0m%(levelname)-10s %(message)s")) handler.addFilter(RedWarningsFilter()) if logger: focus_logger = logging.getLogger(f"sqlfluff.{logger}") focus_logger.addHandler(handler) else: fluff_logger.addHandler(handler) parser_logger = logging.getLogger("sqlfluff.parser") if verbosity < 3: fluff_logger.setLevel(logging.WARNING) parser_logger.setLevel(logging.NOTSET) elif verbosity == 3: fluff_logger.setLevel(logging.INFO) parser_logger.setLevel(logging.WARNING) elif verbosity == 4: fluff_logger.setLevel(logging.DEBUG) parser_logger.setLevel(logging.INFO) elif verbosity > 4: fluff_logger.setLevel(logging.DEBUG) parser_logger.setLevel(logging.DEBUG) def common_options(f: Callable) -> Callable: f = click.version_option()(f) f = click.option( "-v", "--verbose", count=True, help=( "Verbosity, how detailed should the output be. This is *stackable*, so `-vv`" " is more verbose than `-v`. For the most verbose option try `-vvvv` or `-vvvvv`." ), )(f) f = click.option( "-n", "--nocolor", is_flag=True, help="No color - if this is set then the output will be without ANSI color codes.", )(f) return f def core_options(f: Callable) -> Callable: f = click.option( "--dialect", default=None, help="The dialect of SQL to lint (default=ansi)" )(f) f = click.option( "--templater", default=None, help="The templater to use (default=jinja)" )(f) f = click.option( "--rules", default=None, help=( "Narrow the search to only specific rules. For example " "specifying `--rules L001` will only search for rule `L001` (Unnecessary " "trailing whitespace). Multiple rules can be specified with commas e.g. " "`--rules L001,L002` will specify only looking for violations of rule " "`L001` and rule `L002`." ), )(f) f = click.option( "--exclude-rules", default=None, help=( "Exclude specific rules. For example " "specifying `--exclude-rules L001` will remove rule `L001` (Unnecessary " "trailing whitespace) from the set of considered rules. This could either " "be the whitelist, or the general set if there is no specific whitelist. " "Multiple rules can be specified with commas e.g. " "`--exclude-rules L001,L002` will exclude violations of rule " "`L001` and rule `L002`." ), )(f) f = click.option( "--ignore", default=None, help=( "Ignore particular families of errors so that they don't cause a failed " "run. For example `--ignore parsing` would mean that any parsing errors " "are ignored and don't influence the success or fail of a run. Multiple " "options are possible if comma separated e.g. `--ignore parsing,templating`." ), )(f) f = click.option( "--bench", is_flag=True, help="Set this flag to engage the benchmarking tool output.", )(f) f = click.option( "--logger", type=click.Choice( ["templater", "lexer", "parser", "linter", "rules"], case_sensitive=False ), help="Choose to limit the logging to one of the loggers.", )(f) return f
MIT License
jest-community/jest-pytest
src/__tests__/integration/home-assistant/homeassistant/components/mailbox/demo.py
DemoMailbox.async_get_media
python
def async_get_media(self, msgid): if msgid not in self._messages: raise StreamError("Message not found") audio_path = os.path.join( os.path.dirname(__file__), '..', 'tts', 'demo.mp3') with open(audio_path, 'rb') as file: return file.read()
Return the media blob for the msgid.
https://github.com/jest-community/jest-pytest/blob/b197b0b31e3ca5c411202d97583cbd2d2b0b92e9/src/__tests__/integration/home-assistant/homeassistant/components/mailbox/demo.py#L54-L62
import asyncio import logging import os from hashlib import sha1 import homeassistant.util.dt as dt from homeassistant.components.mailbox import (Mailbox, CONTENT_TYPE_MPEG, StreamError) _LOGGER = logging.getLogger(__name__) DOMAIN = "DemoMailbox" @asyncio.coroutine def async_get_handler(hass, config, discovery_info=None): return DemoMailbox(hass, DOMAIN) class DemoMailbox(Mailbox): def __init__(self, hass, name): super().__init__(hass, name) self._messages = {} txt = "Lorem ipsum dolor sit amet, consectetur adipiscing elit. " for idx in range(0, 10): msgtime = int(dt.as_timestamp( dt.utcnow()) - 3600 * 24 * (10 - idx)) msgtxt = "Message {}. {}".format( idx + 1, txt * (1 + idx * (idx % 2))) msgsha = sha1(msgtxt.encode('utf-8')).hexdigest() msg = {"info": {"origtime": msgtime, "callerid": "John Doe <212-555-1212>", "duration": "10"}, "text": msgtxt, "sha": msgsha} self._messages[msgsha] = msg @property def media_type(self): return CONTENT_TYPE_MPEG @asyncio.coroutine
MIT License
kkevsterrr/geneva
evaluator.py
Evaluator.update_ports
python
def update_ports(self, environment): command = ["docker", "exec", "--privileged", environment["server"]["container"].name, "netstat", "-ano"] output = self.exec_cmd_output(command) requested_port = self.args.get("port") self.logger.debug("Testing if port %s is open in the docker container" % requested_port) while (":%s" % requested_port) in output: self.logger.warn("Port %s is in use, choosing a new port" % requested_port) requested_port = random.randint(1000, 65000) output = self.exec_cmd_output(command) self.logger.debug("Using port %s" % requested_port) self.args.update({"port": requested_port}) self.client_args.update({"port": requested_port}) self.server_args.update({"port": requested_port})
Checks that the chosen port is open inside the docker container - if not, it chooses a new port. Args: environment (dict): Dictionary describing docker environment
https://github.com/kkevsterrr/geneva/blob/36d3585545d4cb3450ea0b166d8d5f20a64ed8d8/evaluator.py#L324-L342
import argparse import copy import logging import multiprocessing import os import random import socket import subprocess import sys import threading import time import re import warnings import requests import urllib3 import actions.utils import censors.censor_driver warnings.filterwarnings(action='ignore',module='.*paramiko.*') docker = None BASEPATH = os.path.dirname(os.path.abspath(__file__)) PROJECT_ROOT = BASEPATH class Evaluator(): def __init__(self, command, logger): self.args = get_args(command) self.test_plugin = self.args["test_type"] assert self.test_plugin, "Cannot import an empty plugin" self.public_ip = self.args.get("public_ip", "") self.external_client = self.args["external_client"] self.censor = self.args.get("censor") if not self.external_client and not self.censor: self.args["external_server"] = True self.external_server = self.args["external_server"] if not self.external_server and self.external_client: assert self.args.get("public_ip", ""), "Cannot use an external client to this server without specifying the public IP." self.public_ip = self.args.get("public_ip", "") worker = actions.utils.get_worker(self.public_ip, logger) if worker: self.public_ip = worker["ip"] self.args.update({'server': self.public_ip}) command += ["--server", self.public_ip] self.run_canary_phase = True self.client_args = copy.deepcopy(self.args) self.server_args = copy.deepcopy(self.args) self.client_cls = None self.server_cls = None self.plugin = None self.override_evaluation = False try: _, plugin_cls = actions.utils.import_plugin(self.test_plugin, "plugin") parsed_args = plugin_cls.get_args(command) self.args.update({k:v for k,v in parsed_args.items() if v or (not v and k not in self.args)}) self.plugin = plugin_cls(self.args) self.run_canary_phase = not self.plugin.override_evaluation self.override_evaluation = self.plugin.override_evaluation except ImportError: pass self.client_cls = collect_plugin(self.test_plugin, "client", command, self.args, self.client_args) self.server_cls = collect_plugin(self.test_plugin, "server", command, self.args, self.server_args) self.workers = self.args["workers"] self.stop = False self.skip_empty = not self.args["no_skip_empty"] self.output_directory = self.args["output_directory"] self.routing_ip = self.args.get("routing_ip", None) self.runs = self.args.get("runs", 1) self.fitness_by = self.args.get("fitness_by", "avg") self.forwarder = {} self.act_as_middlebox = self.args.get("act_as_middlebox") if self.act_as_middlebox: assert self.args.get("forward_ip") assert self.args.get("sender_ip") assert self.args.get("routing_ip") self.forwarder["forward_ip"] = self.args["forward_ip"] self.forwarder["sender_ip"] = self.args["sender_ip"] self.forwarder["routing_ip"] = self.args["routing_ip"] self.environments = [] if not os.path.exists(self.output_directory): os.mkdir(self.output_directory) self.use_docker = False if self.args["censor"]: import docker self.use_docker = True self.docker_client = docker.from_env() self.apiclient = docker.APIClient() self.logger = logger def evaluate(self, ind_list): self.assign_ids(ind_list) if self.override_evaluation: self.logger.debug("Beginning evaluation in plugin") return self.plugin.evaluate(self.args, self, ind_list, self.logger) if self.workers > 1 and self.use_docker: split = [ind_list[i::self.workers] for i in range(0, self.workers)] procs = [] for i in range(0, len(split)): if not split[i]: continue if self.use_docker: try: environment = self.create_test_environment(i) except (docker.errors.APIError, requests.exceptions.ConnectionError, urllib3.exceptions.ProtocolError): self.logger.exception("Failed to create evaluator environment - is docker running?") return proc = multiprocessing.Process(target=self.worker, args=(split[i], str(i), environment)) proc.start() procs.append(proc) try: for proc in procs: proc.join() except KeyboardInterrupt: self.shutdown() else: environment = {} if self.use_docker: try: environment = self.create_test_environment("main") except (docker.errors.APIError, requests.exceptions.ConnectionError, urllib3.exceptions.ProtocolError): self.logger.exception("Failed to create evaluator environment - is docker running?") return self.worker(ind_list, "main", environment) for ind in ind_list: self.read_fitness(ind) self.terminate_docker() return ind_list def run_test(self, environment, ind): if len(ind) == 0 and ind.environment_id != "canary" and self.skip_empty: self.logger.info("[skipped] Fitness %d: %s" % (-1000, str(ind))) ind.fitness = -1000 return "skipped", -1000 fitnesses = [] for run in range(0, self.runs): self.logger.debug("Launching %s plugin (run %d) for %s" % (self.test_plugin, run + 1, str(ind))) environment["id"] = ind.environment_id self.client_args.update({"environment_id": ind.environment_id}) self.server_args.update({"environment_id": ind.environment_id}) if not self.args["server_side"]: self.client_args.update({"strategy" : str(ind)}) self.server_args.update({"no_engine" : True}) else: self.server_args.update({"strategy" : str(ind)}) self.client_args.update({"no_engine" : True}) if self.args["censor"]: self.client_args.update({"server": environment["server"]["ip"]}) self.client_args.update({"wait_for_censor": True}) self.server_args.update({"wait_for_shutdown": True}) self.update_ports(environment) try: if self.plugin: self.logger.debug("Running standalone plugin.") self.args.update({"strategy": str(ind)}) self.plugin.start(self.args, self, environment, ind, self.logger) self.read_fitness(ind) else: self.logger.debug("Launching client and server directly.") if self.server_cls and not self.external_server and not self.act_as_middlebox: server = self.start_server(self.server_args, environment, self.logger) fitness = self.run_client(self.client_args, environment, self.logger) if self.server_cls and not self.external_server and not self.act_as_middlebox: self.stop_server(environment, server) self.read_fitness(ind) if self.args["server_side"]: ind.fitness = server.punish_fitness(ind.fitness, self.logger) actions.utils.write_fitness(ind.fitness, self.output_directory, environment["id"]) except actions.utils.SkipStrategyException as exc: self.logger.debug("Strategy evaluation ending.") ind.fitness = exc.fitness fitnesses.append(ind.fitness) break fitnesses.append(ind.fitness) if self.runs > 1: self.logger.debug("\t(%d/%d) Fitness %s: %s" % (run + 1, self.runs, str(ind.fitness), str(ind))) self.logger.debug("Storing fitness of %s by: %s" % (fitnesses, self.fitness_by)) if self.fitness_by == "min": ind.fitness = min(fitnesses) elif self.fitness_by == "max": ind.fitness = max(fitnesses) elif self.fitness_by == "avg": ind.fitness = round(sum(fitnesses)/len(fitnesses), 2) actions.utils.write_fitness(ind.fitness, self.output_directory, environment["id"]) self.logger.info("[%s] Fitness %s: %s" % (ind.environment_id, str(ind.fitness), str(ind))) return ind.environment_id, ind.fitness def run_client(self, args, environment, logger): fitness = None if environment.get("remote"): fitness = self.run_remote_client(args, environment, logger) elif environment.get("docker"): self.run_docker_client(args, environment, logger) else: self.run_local_client(args, environment, logger) fitpath = os.path.join(BASEPATH, self.output_directory, actions.utils.FLAGFOLDER, environment["id"]) + ".fitness" if not os.path.exists(fitpath): actions.utils.write_fitness(fitness, self.output_directory, environment["id"]) return fitness def run_docker_client(self, args, environment, logger): command = ["docker", "exec", "--privileged", environment["client"]["container"].name, "python", "code/plugins/plugin_client.py", "--server", environment["server"]["ip"]] base_cmd = actions.utils.build_command(args) command += base_cmd self.exec_cmd(command)
BSD 3-Clause New or Revised License
open-mss/mss
mslib/msui/topview.py
MSSTopViewWindow.setup_top_view
python
def setup_top_view(self): toolitems = ["(select to open control)", "Web Map Service", "Satellite Tracks", "Remote Sensing", "KML Overlay", "Airports/Airspaces"] self.cbTools.clear() self.cbTools.addItems(toolitems) self.update_predefined_maps() kwargs = self.changeMapSection(only_kwargs=True) self.mpl.canvas.init_map(**kwargs) self.setFlightTrackModel(self.waypoints_model) self.waypoints_model.dataChanged.connect(self.update_roundtrip_enabled) self.update_roundtrip_enabled() self.mpl.navbar.push_current() self.openTool(WMS + 1)
Initialise GUI elements. (This method is called before signals/slots are connected).
https://github.com/open-mss/mss/blob/179acb945ada3cc7adec37794439fd2defade3b9/mslib/msui/topview.py#L227-L251
import functools import logging from mslib.utils.config import config_loader, save_settings_qsettings, load_settings_qsettings from mslib.utils.coordinate import get_projection_params from PyQt5 import QtGui, QtWidgets, QtCore from mslib.msui.mss_qt import ui_topview_window as ui from mslib.msui.mss_qt import ui_topview_mapappearance as ui_ma from mslib.msui.viewwindows import MSSMplViewWindow from mslib.msui import wms_control as wc from mslib.msui import satellite_dockwidget as sat from mslib.msui import remotesensing_dockwidget as rs from mslib.msui import kmloverlay_dockwidget as kml from mslib.msui import airdata_dockwidget as ad from mslib.msui.icons import icons from mslib.msui.flighttrack import Waypoint WMS = 0 SATELLITE = 1 REMOTESENSING = 2 KMLOVERLAY = 3 AIRDATA = 4 class MSS_TV_MapAppearanceDialog(QtWidgets.QDialog, ui_ma.Ui_MapAppearanceDialog): def __init__(self, parent=None, settings_dict=None, wms_connected=False): super(MSS_TV_MapAppearanceDialog, self).__init__(parent) self.setupUi(self) if settings_dict is None: settings_dict = {"draw_graticule": True, "draw_coastlines": True, "fill_waterbodies": True, "fill_continents": True, "draw_flighttrack": True, "draw_marker": True, "label_flighttrack": True, "tov_plot_title_size": "default", "tov_axes_label_size": "default", "colour_water": (0, 0, 0, 0), "colour_land": (0, 0, 0, 0), "colour_ft_vertices": (0, 0, 0, 0), "colour_ft_waypoints": (0, 0, 0, 0) } settings_dict["fill_waterbodies"] = True self.wms_connected = wms_connected if self.wms_connected: self.cbFillContinents.setChecked(False) self.cbFillWaterBodies.setChecked(False) self.cbFillContinents.setEnabled(False) self.cbFillContinents.setStyleSheet("color: black") self.cbFillWaterBodies.setStyleSheet("color: black") else: self.cbFillWaterBodies.setChecked(settings_dict["fill_waterbodies"]) self.cbFillWaterBodies.setEnabled(False) self.cbFillContinents.setChecked(settings_dict["fill_continents"]) self.cbFillContinents.setEnabled(True) self.cbDrawGraticule.setChecked(settings_dict["draw_graticule"]) self.cbDrawCoastlines.setChecked(settings_dict["draw_coastlines"]) self.cbDrawFlightTrack.setChecked(settings_dict["draw_flighttrack"]) self.cbDrawMarker.setChecked(settings_dict["draw_marker"]) self.cbLabelFlightTrack.setChecked(settings_dict["label_flighttrack"]) for button, ids in [(self.btWaterColour, "colour_water"), (self.btLandColour, "colour_land"), (self.btWaypointsColour, "colour_ft_waypoints"), (self.btVerticesColour, "colour_ft_vertices")]: palette = QtGui.QPalette(button.palette()) colour = QtGui.QColor() colour.setRgbF(*settings_dict[ids]) palette.setColor(QtGui.QPalette.Button, colour) button.setPalette(palette) self.btWaterColour.clicked.connect(functools.partial(self.setColour, "water")) self.btLandColour.clicked.connect(functools.partial(self.setColour, "land")) self.btWaypointsColour.clicked.connect(functools.partial(self.setColour, "ft_waypoints")) self.btVerticesColour.clicked.connect(functools.partial(self.setColour, "ft_vertices")) for i in range(self.tov_cbtitlesize.count()): if self.tov_cbtitlesize.itemText(i) == settings_dict["tov_plot_title_size"]: self.tov_cbtitlesize.setCurrentIndex(i) for i in range(self.tov_cbaxessize.count()): if self.tov_cbaxessize.itemText(i) == settings_dict["tov_axes_label_size"]: self.tov_cbaxessize.setCurrentIndex(i) def get_settings(self): settings_dict = { "draw_graticule": self.cbDrawGraticule.isChecked(), "draw_coastlines": self.cbDrawCoastlines.isChecked(), "fill_waterbodies": self.cbFillWaterBodies.isChecked(), "fill_continents": self.cbFillContinents.isChecked(), "draw_flighttrack": self.cbDrawFlightTrack.isChecked(), "draw_marker": self.cbDrawMarker.isChecked(), "label_flighttrack": self.cbLabelFlightTrack.isChecked(), "tov_plot_title_size": self.tov_cbtitlesize.currentText(), "tov_axes_label_size": self.tov_cbaxessize.currentText(), "colour_water": QtGui.QPalette(self.btWaterColour.palette()).color(QtGui.QPalette.Button).getRgbF(), "colour_land": QtGui.QPalette(self.btLandColour.palette()).color(QtGui.QPalette.Button).getRgbF(), "colour_ft_vertices": QtGui.QPalette(self.btVerticesColour.palette()).color(QtGui.QPalette.Button).getRgbF(), "colour_ft_waypoints": QtGui.QPalette(self.btWaypointsColour.palette()).color(QtGui.QPalette.Button).getRgbF(), } return settings_dict def setColour(self, which): if which == "water": button = self.btWaterColour elif which == "land": button = self.btLandColour elif which == "ft_vertices": button = self.btVerticesColour elif which == "ft_waypoints": button = self.btWaypointsColour palette = QtGui.QPalette(button.palette()) colour = palette.color(QtGui.QPalette.Button) colour = QtWidgets.QColorDialog.getColor(colour) if colour.isValid(): palette.setColor(QtGui.QPalette.Button, colour) button.setPalette(palette) class MSSTopViewWindow(MSSMplViewWindow, ui.Ui_TopViewWindow): name = "Top View" def __init__(self, parent=None, model=None, _id=None): super(MSSTopViewWindow, self).__init__(parent, model, _id) logging.debug(_id) self.setupUi(self) self.setWindowIcon(QtGui.QIcon(icons('64x64'))) self.docks = [None, None, None, None, None] self.settings_tag = "topview" self.load_settings() self.setup_top_view() self.wms_connected = False self.btMapRedraw.clicked.connect(self.mpl.canvas.redraw_map) self.cbChangeMapSection.activated.connect(self.changeMapSection) self.btSettings.clicked.connect(self.settings_dialogue) self.btRoundtrip.clicked.connect(self.make_roundtrip) self.cbTools.currentIndexChanged.connect(self.openTool) def __del__(self): del self.mpl.canvas.waypoints_interactor
Apache License 2.0
chuckus/chromewhip
chromewhip/protocol/overlay.py
Overlay.highlightQuad
python
def highlightQuad(cls, quad: Union['DOM.Quad'], color: Optional['DOM.RGBA'] = None, outlineColor: Optional['DOM.RGBA'] = None, ): return ( cls.build_send_payload("highlightQuad", { "quad": quad, "color": color, "outlineColor": outlineColor, }), None )
Highlights given quad. Coordinates are absolute with respect to the main frame viewport. :param quad: Quad to highlight :type quad: DOM.Quad :param color: The highlight fill color (default: transparent). :type color: DOM.RGBA :param outlineColor: The highlight outline color (default: transparent). :type outlineColor: DOM.RGBA
https://github.com/chuckus/chromewhip/blob/7249f64f96df3c6ca0859a3da06ce7ddcebbfded/chromewhip/protocol/overlay.py#L170-L190
import logging from typing import Any, Optional, Union from chromewhip.helpers import PayloadMixin, BaseEvent, ChromeTypeBase log = logging.getLogger(__name__) from chromewhip.protocol import dom as DOM from chromewhip.protocol import page as Page from chromewhip.protocol import runtime as Runtime class HighlightConfig(ChromeTypeBase): def __init__(self, showInfo: Optional['bool'] = None, showStyles: Optional['bool'] = None, showRulers: Optional['bool'] = None, showExtensionLines: Optional['bool'] = None, contentColor: Optional['DOM.RGBA'] = None, paddingColor: Optional['DOM.RGBA'] = None, borderColor: Optional['DOM.RGBA'] = None, marginColor: Optional['DOM.RGBA'] = None, eventTargetColor: Optional['DOM.RGBA'] = None, shapeColor: Optional['DOM.RGBA'] = None, shapeMarginColor: Optional['DOM.RGBA'] = None, cssGridColor: Optional['DOM.RGBA'] = None, ): self.showInfo = showInfo self.showStyles = showStyles self.showRulers = showRulers self.showExtensionLines = showExtensionLines self.contentColor = contentColor self.paddingColor = paddingColor self.borderColor = borderColor self.marginColor = marginColor self.eventTargetColor = eventTargetColor self.shapeColor = shapeColor self.shapeMarginColor = shapeMarginColor self.cssGridColor = cssGridColor InspectMode = str class Overlay(PayloadMixin): @classmethod def disable(cls): return ( cls.build_send_payload("disable", { }), None ) @classmethod def enable(cls): return ( cls.build_send_payload("enable", { }), None ) @classmethod def getHighlightObjectForTest(cls, nodeId: Union['DOM.NodeId'], includeDistance: Optional['bool'] = None, includeStyle: Optional['bool'] = None, ): return ( cls.build_send_payload("getHighlightObjectForTest", { "nodeId": nodeId, "includeDistance": includeDistance, "includeStyle": includeStyle, }), cls.convert_payload({ "highlight": { "class": dict, "optional": False }, }) ) @classmethod def hideHighlight(cls): return ( cls.build_send_payload("hideHighlight", { }), None ) @classmethod def highlightFrame(cls, frameId: Union['Page.FrameId'], contentColor: Optional['DOM.RGBA'] = None, contentOutlineColor: Optional['DOM.RGBA'] = None, ): return ( cls.build_send_payload("highlightFrame", { "frameId": frameId, "contentColor": contentColor, "contentOutlineColor": contentOutlineColor, }), None ) @classmethod def highlightNode(cls, highlightConfig: Union['HighlightConfig'], nodeId: Optional['DOM.NodeId'] = None, backendNodeId: Optional['DOM.BackendNodeId'] = None, objectId: Optional['Runtime.RemoteObjectId'] = None, selector: Optional['str'] = None, ): return ( cls.build_send_payload("highlightNode", { "highlightConfig": highlightConfig, "nodeId": nodeId, "backendNodeId": backendNodeId, "objectId": objectId, "selector": selector, }), None ) @classmethod
MIT License
guildai/guildai
guild/query/qparse.py
p_flag_name
python
def p_flag_name(p): p[0] = p[1]
flag_name : UNQUOTED
https://github.com/guildai/guildai/blob/79d39402201168b7e94007d8e66ecf504e7aa71c/guild/query/qparse.py#L138-L140
from __future__ import absolute_import from __future__ import division import yaml from guild import _yacc from . import Select, Scalar, Attr, Flag from . import ParseError from . import qlex tokens = qlex.tokens def p_query(p): p[0] = p[1] def p_select_stmt(p): p[0] = Select(cols=p[2]) def p_col_list_head(p): p[0] = [p[1]] def p_col_list(p): p[0] = p[1] + [p[3]] def p_col(p): p[0] = p[1] def p_named_col(p): col = p[1] col.named_as = p[3] p[0] = col def p_col_name(p): p[0] = p[1] def p_implicit_scalar_col(p): p[0] = Scalar(p[1]) def p_explicit_scalar_col(p): p[0] = Scalar(p[2]) def p_qualified_implicit_scalar_col(p): p[0] = Scalar(p[2], p[1]) def p_scalar_step_col(p): scalar = p[1] scalar.step = True p[0] = scalar def p_scalar_key(p): p[0] = p[1] def p_scalar_qualifier(p): p[0] = p[1] def p_dot_attr_col(p): p[0] = Attr(p[2]) def p_attr_col(p): p[0] = Attr(p[2]) def p_attr_name(p): p[0] = p[1] def p_equals_flag_col(p): p[0] = Flag(p[2]) def p_flag_col(p): p[0] = Flag(p[2])
Apache License 2.0
arxiv/arxiv-browse
browse/controllers/home_page/__init__.py
get_home_page
python
def get_home_page() -> Response: response_data: Dict[str, Any] = {} response_headers: Dict[str, Any] = {} try: response_data['document_count'] = _get_document_count() except Exception as ex: logger.warning(f'Could not get abs page data: {ex}') raise InternalServerError from ex response_data['groups'] = taxonomy.definitions.GROUPS response_data['archives'] = taxonomy.definitions.ARCHIVES_ACTIVE response_data['categories'] = taxonomy.definitions.CATEGORIES_ACTIVE return response_data, status.HTTP_200_OK, response_headers
Get the data needed to generated the home page.
https://github.com/arxiv/arxiv-browse/blob/e68ea09833c30c30426678ad79250b6f0c56b52a/browse/controllers/home_page/__init__.py#L23-L37
import os import re from flask import current_app from typing import Any, Dict, Optional, Tuple from werkzeug.exceptions import InternalServerError from browse.services.database import get_document_count from arxiv import status, taxonomy from arxiv.base import logging from arxiv.base.globals import get_application_config app_config = get_application_config() logger = logging.getLogger(__name__) Response = Tuple[Dict[str, Any], int, Dict[str, Any]] RE_TOTAL_PAPERS = re.compile(r'^total_papers\s+(?P<count>[0-9]+)', re.MULTILINE)
MIT License
openstack/cyborg
cyborg/conductor/manager.py
ConductorManager.device_profile_create
python
def device_profile_create(self, context, obj_devprof): obj_devprof.create(context) return obj_devprof
Signal to conductor service to create a device_profile. :param context: request context. :param obj_devprof: a created (but not saved) device_profile object. :returns: created device_profile object.
https://github.com/openstack/cyborg/blob/17b18248f45aafa0cbc04a3307bd83f3deceb8e1/cyborg/conductor/manager.py#L50-L58
from oslo_log import log as logging import oslo_messaging as messaging import uuid from cyborg.common import exception from cyborg.common import placement_client from cyborg.conf import CONF from cyborg.objects.attach_handle import AttachHandle from cyborg.objects.attribute import Attribute from cyborg.objects.control_path import ControlpathID from cyborg.objects.deployable import Deployable from cyborg.objects.device import Device from cyborg.objects.driver_objects.driver_device import DriverDeployable from cyborg.objects.driver_objects.driver_device import DriverDevice from cyborg.objects.ext_arq import ExtARQ LOG = logging.getLogger(__name__) class ConductorManager(object): RPC_API_VERSION = '1.0' target = messaging.Target(version=RPC_API_VERSION) def __init__(self, topic, host=None): super(ConductorManager, self).__init__() self.topic = topic self.host = host or CONF.host self.placement_client = placement_client.PlacementClient() def periodic_tasks(self, context, raise_on_error=False): pass
Apache License 2.0
lixiny/cpf
hocontact/models/picr.py
PicrHourglassPointNet.picr_forward
python
def picr_forward(self, cam_intr, object_vert_3d, low_level_feature_map, image_resolution): results = {} if self.training: batch_size = object_vert_3d.shape[0] rand_rot = ( self.generate_random_rotation(self.maximal_angle).expand(batch_size, -1, -1).to(object_vert_3d.device) ) mean_obj_v = torch.mean(object_vert_3d, dim=1, keepdim=True) object_vert_3d = ( torch.bmm(rand_rot, (object_vert_3d - mean_obj_v).permute(0, 2, 1)).permute(0, 2, 1) + mean_obj_v ) dir_vec = self.generate_random_direction() rand_dist = torch.normal(torch.Tensor([self.mean_offset]), torch.Tensor([self.std_offset])) offset = rand_dist * dir_vec offset = offset.to(object_vert_3d.device) object_vert_3d = object_vert_3d + offset reprojected_vert = torch.bmm(cam_intr, object_vert_3d.transpose(1, 2)).transpose(1, 2) reprojected_vert = reprojected_vert[:, :, :2] / reprojected_vert[:, :, 2:] image_center_coord = image_resolution / 2 image_resolution = image_resolution.view((1, 1, 2)) image_center_coord = image_center_coord.view((1, 1, 2)) reprojected_grid = (reprojected_vert - image_center_coord) / image_center_coord in_image_mask = ( (reprojected_grid[:, :, 0] >= -1.0) & (reprojected_grid[:, :, 0] <= 1.0) & (reprojected_grid[:, :, 1] >= -1.0) & (reprojected_grid[:, :, 1] <= 1.0) ) in_image_mask = in_image_mask.float() reprojected_grid = reprojected_grid.unsqueeze(2) collected_features = F.grid_sample( low_level_feature_map, reprojected_grid, align_corners=True ) focal = cam_intr[:, :1, :1] object_vert_3d_z = object_vert_3d[:, :, 2:] normed_object_vert_3d_z = ((object_vert_3d_z - 0.4) / focal) / self.obj_scale_factor normed_object_vert_3d_z = normed_object_vert_3d_z.unsqueeze(1) collected_features = torch.cat((collected_features, normed_object_vert_3d_z), dim=1) vertex_contact_pred, contact_region_pred, anchor_elasti_pred = self.contact_head(collected_features) results.update( { "recov_vertex_contact": vertex_contact_pred, "recov_contact_in_image_mask": in_image_mask, "recov_contact_region": contact_region_pred, "recov_anchor_elasti": anchor_elasti_pred, } ) return results
low_level_feature_map = TENSOR[NBATCH, 64, 1/4 IMGH, 1/4 IMGW] object_vert_3d = TENSOR[NBATCH, NPOINT, 3] image_resolution = TENSOR[2]
https://github.com/lixiny/cpf/blob/69129a3a2ec76347752241850da5ced09d795b1d/hocontact/models/picr.py#L143-L212
import math import numpy as np import torch import torch.nn as nn import torch.nn.functional as F from manopth.rodrigues_layer import batch_rodrigues from hocontact.hodatasets.hoquery import TransQueries from hocontact.models.bases import hourglass from hocontact.models.contacthead import PointNetContactHead from hocontact.models.honet import HONet from hocontact.utils.netutils import freeze_batchnorm_stats class PicrHourglassPointNet(nn.Module): def __init__( self, hg_stacks=2, hg_blocks=1, hg_classes=64, obj_scale_factor=0.0001, honet_resnet_version=18, honet_center_idx=9, honet_mano_lambda_recov_joints3d=0.5, honet_mano_lambda_recov_verts3d=0, honet_mano_lambda_shape=5e-07, honet_mano_lambda_pose_reg=5e-06, honet_obj_lambda_recov_verts3d=0.5, honet_obj_trans_factor=100, honet_mano_fhb_hand=False, mean_offset=0.010, std_offset=0.005, maximal_angle=math.pi / 24, ): super(PicrHourglassPointNet, self).__init__() self.obj_scale_factor = obj_scale_factor self.ho_net = HONet( resnet_version=honet_resnet_version, mano_center_idx=honet_center_idx, mano_lambda_recov_joints3d=honet_mano_lambda_recov_joints3d, mano_lambda_recov_verts3d=honet_mano_lambda_recov_verts3d, mano_lambda_shape=honet_mano_lambda_shape, mano_lambda_pose_reg=honet_mano_lambda_pose_reg, obj_lambda_recov_verts3d=honet_obj_lambda_recov_verts3d, obj_trans_factor=honet_obj_trans_factor, obj_scale_factor=obj_scale_factor, mano_fhb_hand=honet_mano_fhb_hand, ) for param in self.ho_net.parameters(): param.requires_grad = False self.ho_net.eval() freeze_batchnorm_stats(self.ho_net) self.base_net = hourglass.StackedHourglass(hg_stacks, hg_blocks, hg_classes) self.intermediate_feature_size = hg_classes self.contact_head = PointNetContactHead(feat_dim=self.intermediate_feature_size + 1, n_region=17, n_anchor=4) self.mean_offset = mean_offset self.std_offset = std_offset self.maximal_angle = maximal_angle @staticmethod def generate_random_direction(): azi = torch.rand(1) * 2 * math.pi cos_inc = 1 - 2 * torch.rand(1) sin_inc = torch.sqrt(1 - cos_inc ** 2) vec = torch.tensor([sin_inc * torch.cos(azi), sin_inc * torch.sin(azi), cos_inc]) return vec.float() @staticmethod def generate_random_rotation(max_angle): axisang = PicrHourglassPointNet.generate_random_direction() angle = torch.rand(1) * max_angle axisang = axisang * angle rot_mat = batch_rodrigues(axisang.unsqueeze(0)).view(1, 3, 3) return rot_mat def forward(self, sample, rank=None): self.ho_net.eval() with torch.no_grad(): honet_results = self.ho_net(sample, rank=rank) if rank is None: device = torch.device("cuda") else: device = torch.device(f"cuda:{rank}") ls_results = [] image = sample[TransQueries.IMAGE] image_resolution = torch.from_numpy(np.array([image.shape[3], image.shape[2]])).float() image = image.to(device) image_resolution = image_resolution.to(device) ls_hg_feature, _ = self.base_net(image) has_contact_supv = True objverts3d = honet_results["recov_obj_verts3d"] cam_intr = sample[TransQueries.CAM_INTR].float() cam_intr = cam_intr.to(device) if has_contact_supv: for i_stack in range(self.base_net.nstacks): i_hg_feature = ls_hg_feature[i_stack] i_contact_results = self.picr_forward(cam_intr, objverts3d, i_hg_feature, image_resolution) ls_results.append(i_contact_results) extra_results = { "hand_tsl": honet_results["hand_center3d"], "hand_joints_3d": honet_results["recov_joints3d"], "hand_verts_3d": honet_results["recov_hand_verts3d"], "hand_full_pose": honet_results["full_pose"], "hand_shape": honet_results["shape"], "obj_tsl": honet_results["obj_center3d"], "obj_rot": honet_results["obj_prerot"], "obj_verts_3d": honet_results["recov_obj_verts3d"], } ls_results[-1].update(extra_results) evalutil_results = { "recov_obj_verts3d": honet_results["recov_obj_verts3d"], "obj_verts2d": honet_results["obj_verts2d"], } ls_results[-1].update(evalutil_results) return ls_results
MIT License
tensorflow/cloud
src/python/tensorflow_cloud/core/gcp.py
get_machine_type
python
def get_machine_type(cpu_cores, memory, accelerator_type): if accelerator_type.value == "TPU_V2" or accelerator_type.value == "TPU_V3": return "cloud_tpu" machine_type_map = { (4, 15): "n1-standard-4", (8, 30): "n1-standard-8", (16, 60): "n1-standard-16", (32, 120): "n1-standard-32", (64, 240): "n1-standard-64", (96, 360): "n1-standard-96", (2, 13): "n1-highmem-2", (4, 26): "n1-highmem-4", (8, 52): "n1-highmem-8", (16, 104): "n1-highmem-16", (32, 208): "n1-highmem-32", (64, 416): "n1-highmem-64", (96, 624): "n1-highmem-96", (16, 14.4): "n1-highcpu-16", (32, 28.8): "n1-highcpu-32", (64, 57.6): "n1-highcpu-64", (96, 86.4): "n1-highcpu-96", } return machine_type_map[(cpu_cores, memory)]
Returns the GCP AI Platform machine type.
https://github.com/tensorflow/cloud/blob/caf55b13e0f70c63ff9c5a6063eada31bab5639a/src/python/tensorflow_cloud/core/gcp.py#L93-L116
from __future__ import absolute_import from __future__ import division from __future__ import print_function import re import google.auth def get_project_name(): _, project_id = google.auth.default() if project_id is None: raise RuntimeError("Could not determine the GCP project id.") return project_id def validate_machine_configuration( cpu_cores, memory, accelerator_type, accelerator_count ): valid_configurations = _get_valid_machine_configurations() if accelerator_type.value == "TPU_V2" or accelerator_type.value == "TPU_V3": cpu_cores = None memory = None current_config = ( cpu_cores, memory, accelerator_type.value, accelerator_count) if current_config not in valid_configurations: raise ValueError( "Invalid machine configuration: cpu_cores:{}, memory:{}, " "accelerator_type:{}, accelerator_count:{}. Please see the " "following AI platform comptibility table for all valid " "configurations: " "https://cloud.google.com/ml-engine/docs/using-gpus#" "compute-engine-machine-types-with-gpu. If you are using TPU " "accelerator, please specify accelerator count as 8.".format( cpu_cores, memory, str(accelerator_type), accelerator_count ) ) def get_region(): return "us-central1" def get_accelerator_type(accl_type): accl_type_map = { "CPU": "ACCELERATOR_TYPE_UNSPECIFIED", "K80": "NVIDIA_TESLA_K80", "P100": "NVIDIA_TESLA_P100", "V100": "NVIDIA_TESLA_V100", "P4": "NVIDIA_TESLA_P4", "T4": "NVIDIA_TESLA_T4", "TPU_V2": "TPU_V2", "TPU_V3": "TPU_V3", } return accl_type_map[accl_type]
Apache License 2.0
swissdatasciencecenter/renku-python
renku/cli/workflow.py
execute
python
def execute( provider, config, set_params, values, name_or_id, ): from renku.core.commands.workflow import execute_workflow_command communicator = ClickCallback() result = ( execute_workflow_command() .with_communicator(communicator) .build() .execute( name_or_id=name_or_id, provider=provider, config=config, values=values, set_params=set_params, ) ) if result.output: click.echo( "Unchanged files:\n\n\t{0}".format("\n\t".join(click.style(path, fg="yellow") for path in result.output)) )
Execute a given workflow.
https://github.com/swissdatasciencecenter/renku-python/blob/5e43e2eff67cdf20fc2805799fe2822e23bc503d/renku/cli/workflow.py#L954-L982
import os import pydoc import shutil import sys from pathlib import Path from typing import TYPE_CHECKING import click from lazy_object_proxy import Proxy from rich.console import Console from rich.markdown import Markdown from renku.cli.utils.callback import ClickCallback from renku.core import errors from renku.core.commands.echo import ERROR from renku.core.commands.format.workflow import WORKFLOW_COLUMNS, WORKFLOW_FORMATS from renku.core.commands.view_model.activity_graph import ACTIVITY_GRAPH_COLUMNS if TYPE_CHECKING: from renku.core.commands.view_model.composite_plan import CompositePlanViewModel from renku.core.commands.view_model.plan import PlanViewModel def _supported_formats(): from renku.core.plugins.workflow import supported_formats return supported_formats() def _available_workflow_providers(): from renku.core.plugins.provider import available_workflow_providers return available_workflow_providers() def _print_plan(plan: "PlanViewModel"): click.echo(click.style("Id: ", bold=True, fg="magenta") + click.style(plan.id, bold=True)) click.echo(click.style("Name: ", bold=True, fg="magenta") + click.style(plan.name, bold=True)) if plan.description: Console().print(Markdown(plan.description)) click.echo(click.style("Command: ", bold=True, fg="magenta") + click.style(plan.full_command, bold=True)) click.echo(click.style("Success Codes: ", bold=True, fg="magenta") + click.style(plan.success_codes, bold=True)) if plan.inputs: click.echo(click.style("Inputs: ", bold=True, fg="magenta")) for run_input in plan.inputs: click.echo(click.style(f"\t- {run_input.name}:", bold=True)) if run_input.description: click.echo(click.style(f"\t\t{run_input.description}")) click.echo( click.style("\t\tDefault Value: ", bold=True, fg="magenta") + click.style(run_input.default_value, bold=True) ) if run_input.position: click.echo( click.style("\t\tPosition: ", bold=True, fg="magenta") + click.style(run_input.position, bold=True) ) if run_input.prefix: click.echo( click.style("\t\tPrefix: ", bold=True, fg="magenta") + click.style(run_input.prefix, bold=True) ) if plan.outputs: click.echo(click.style("Outputs: ", bold=True, fg="magenta")) for run_output in plan.outputs: click.echo(click.style(f"\t- {run_output.name}:", bold=True)) if run_output.description: click.echo(click.style(f"\t\t{run_output.description}")) click.echo( click.style("\t\tDefault Value: ", bold=True, fg="magenta") + click.style(run_output.default_value, bold=True) ) if run_output.position: click.echo( click.style("\t\tPosition: ", bold=True, fg="magenta") + click.style(run_output.position, bold=True) ) if run_output.prefix: click.echo( click.style("\t\tPrefix: ", bold=True, fg="magenta") + click.style(run_output.prefix, bold=True) ) if plan.parameters: click.echo(click.style("Parameters: ", bold=True, fg="magenta")) for run_parameter in plan.parameters: click.echo(click.style(f"\t- {run_parameter.name}:", bold=True)) if run_parameter.description: click.echo(click.style(f"\t\t{run_parameter.description}")) click.echo( click.style("\t\tDefault Value: ", bold=True, fg="magenta") + click.style(run_parameter.default_value, bold=True) ) if run_parameter.position: click.echo( click.style("\t\tPosition: ", bold=True, fg="magenta") + click.style(run_parameter.position, bold=True) ) if run_parameter.prefix: click.echo( click.style("\t\tPrefix: ", bold=True, fg="magenta") + click.style(run_parameter.prefix, bold=True) ) def _print_composite_plan(composite_plan: "CompositePlanViewModel"): click.echo(click.style("Id: ", bold=True, fg="magenta") + click.style(composite_plan.id, bold=True)) click.echo(click.style("Name: ", bold=True, fg="magenta") + click.style(composite_plan.name, bold=True)) if composite_plan.description: Console().print(Markdown(composite_plan.description)) click.echo(click.style("Steps: ", bold=True, fg="magenta")) for step in composite_plan.steps: click.echo(click.style(f"\t- {step.name}:", bold=True)) click.echo(click.style("\t\tId: ", bold=True, fg="magenta") + click.style(f"{step.id}", bold=True)) if composite_plan.mappings: click.echo(click.style("Mappings: ", bold=True, fg="magenta")) for mapping in composite_plan.mappings: click.echo(click.style(f"\t- {mapping.name}:", bold=True)) if mapping.description: click.echo(click.style(f"\t\t{mapping.description}")) click.echo( click.style("\t\tDefault Value: ", bold=True, fg="magenta") + click.style(mapping.default_value, bold=True) ) click.echo(click.style("\tMaps to: ", bold=True, fg="magenta")) for maps_to in mapping.maps_to: click.style(maps_to, bold=True) if composite_plan.links: click.echo(click.style("Links: ", bold=True, fg="magenta")) for link in composite_plan.links: click.echo(click.style("\t- From: ", bold=True, fg="magenta") + click.style(link.source, bold=True)) click.echo(click.style("\t\t To: ", bold=True, fg="magenta")) for sink in link.sinks: click.echo(click.style(f"\t\t- {sink}", bold=True)) @click.group() def workflow(): pass @workflow.command("ls") @click.option("--format", type=click.Choice(WORKFLOW_FORMATS), default="tabular", help="Choose an output format.") @click.option( "-c", "--columns", type=click.STRING, default="id,name,command", metavar="<columns>", help="Comma-separated list of column to display: {}.".format(", ".join(WORKFLOW_COLUMNS.keys())), show_default=True, ) def list_workflows(format, columns): from renku.core.commands.workflow import list_workflows_command result = list_workflows_command().build().execute(format=format, columns=columns) click.echo(result.output) @workflow.command() @click.argument("name_or_id", metavar="<name_or_id>") def show(name_or_id): from renku.core.commands.view_model.plan import PlanViewModel from renku.core.commands.workflow import show_workflow_command plan = show_workflow_command().build().execute(name_or_id=name_or_id).output if plan: if isinstance(plan, PlanViewModel): _print_plan(plan) else: _print_composite_plan(plan) else: click.secho(ERROR + f"Workflow '{name_or_id}' not found.") @workflow.command() @click.argument("name", metavar="<name>") @click.option("--force", is_flag=True, help="Override the existence check.") def remove(name, force): from renku.core.commands.workflow import remove_workflow_command remove_workflow_command().build().execute(name=name, force=force) @workflow.command() @click.option("-d", "--description", help="Workflow step's description.") @click.option("mappings", "-m", "--map", multiple=True, help="Mapping for a workflow parameter.") @click.option("defaults", "-s", "--set", multiple=True, help="Default value for a workflow parameter.") @click.option("links", "-l", "--link", multiple=True, help="Link source and sink parameters to connect steps.") @click.option("-p", "--describe-param", multiple=True, help="Add description for a workflow parameter.") @click.option("--map-inputs", is_flag=True, help="Exposes all child inputs as inputs on the CompositePlan.") @click.option("--map-outputs", is_flag=True, help="Exposes all child outputs as outputs on the CompositePlan.") @click.option("--map-params", is_flag=True, help="Exposes all child parameters as parameters on the CompositePlan.") @click.option("--map-all", is_flag=True, help="Combination of --map-inputs, --map-outputs, --map-params.") @click.option("--link-all", is_flag=True, help="Automatically link steps based on default values.") @click.option("--keyword", multiple=True, help="List of keywords for the workflow.") @click.option( "--from", "sources", type=click.Path(exists=True, dir_okay=False), multiple=True, help="Start a composite plan from this file as input.", ) @click.option( "--to", "sinks", type=click.Path(exists=True, dir_okay=True), multiple=True, help="End a composite plan at this file as output.", ) @click.argument("name", required=True) @click.argument("steps", nargs=-1, type=click.UNPROCESSED) def compose( description, mappings, defaults, links, describe_param, map_inputs, map_outputs, map_params, map_all, link_all, keyword, sources, sinks, name, steps, ): from renku.core.commands.workflow import compose_workflow_command if (sources or sinks) and steps: click.secho(ERROR + "--from/--to cannot be used at the same time as passing run/step names.") exit(1) elif not (sources or sinks or steps): click.secho(ERROR + "Either --from/--to passing run/step names is required.") exit(1) if map_all: map_inputs = map_outputs = map_params = True result = ( compose_workflow_command() .build() .execute( name=name, description=description, mappings=mappings, defaults=defaults, links=links, param_descriptions=describe_param, map_inputs=map_inputs, map_outputs=map_outputs, map_params=map_params, link_all=link_all, keywords=keyword, steps=steps, sources=sources, sinks=sinks, ) ) if not result.error: _print_composite_plan(result.output) @workflow.command() @click.argument("workflow_name", metavar="<name or uuid>") @click.option("--name", metavar="<new name>", help="New name of the workflow") @click.option("--description", metavar="<new desc>", help="New description of the workflow") @click.option( "--set", "set_params", multiple=True, metavar="<parameter>=<value>", help="Set default <value> for a <parameter>/add new parameter", ) @click.option( "--map", "map_params", multiple=True, metavar="<parameter>=<parameter or expression>", help="New mapping on the workflow", ) @click.option( "--rename-param", "rename_params", multiple=True, metavar='<parameter>="name"', help="New name for parameter", ) @click.option( "--describe-param", "describe_params", multiple=True, metavar='<parameter>="description"', help="New description of the workflow", ) def edit(workflow_name, name, description, set_params, map_params, rename_params, describe_params): from renku.core.commands.view_model.plan import PlanViewModel from renku.core.commands.workflow import edit_workflow_command result = ( edit_workflow_command() .build() .execute( name=workflow_name, new_name=name, description=description, set_params=set_params, map_params=map_params, rename_params=rename_params, describe_params=describe_params, ) ) if not result.error: plan = result.output if isinstance(plan, PlanViewModel): _print_plan(plan) else: _print_composite_plan(plan) @workflow.command() @click.argument("workflow_name", metavar="<name or uuid>") @click.option( "--format", default="cwl", type=click.Choice(Proxy(_supported_formats), case_sensitive=False), show_default=True, help="Workflow language format.", ) @click.option( "-o", "--output", metavar="<path>", type=click.Path(exists=False), default=None, help="Save to <path> instead of printing to terminal", ) @click.option( "--values", metavar="<file>", type=click.Path(exists=True, dir_okay=False), default=None, help="YAML file containing parameter mappings to be used.", ) def export(workflow_name, format, output, values): from renku.core.commands.workflow import export_workflow_command communicator = ClickCallback() result = ( export_workflow_command() .with_communicator(communicator) .build() .execute(name_or_id=workflow_name, format=format, output=output, values=values) ) if not output: click.echo(result.output) @workflow.command() @click.argument("paths", type=click.Path(exists=True, dir_okay=True), nargs=-1) @click.pass_context def inputs(ctx, paths): from renku.core.commands.workflow import workflow_inputs_command result = workflow_inputs_command().build().execute(paths=paths) input_paths = result.output click.echo("\n".join(input_paths)) if paths: if not input_paths or any( p not in input_paths and all(Path(o) not in Path(p).parents for o in input_paths) for p in paths ): ctx.exit(1) @workflow.command() @click.argument("paths", type=click.Path(exists=True, dir_okay=True), nargs=-1) @click.pass_context def outputs(ctx, paths): from renku.core.commands.workflow import workflow_outputs_command result = workflow_outputs_command().build().execute(paths=paths) output_paths = result.output click.echo("\n".join(output_paths)) if paths: if not output_paths or any( p not in output_paths and all(Path(o) not in Path(p).parents for o in output_paths) for p in paths ): ctx.exit(1) @workflow.command() @click.option( "provider", "-p", "--provider", default="cwltool", show_default=True, type=click.Choice(Proxy(_available_workflow_providers), case_sensitive=False), help="The workflow engine to use.", ) @click.option("config", "-c", "--config", metavar="<config file>", help="YAML file containing config for the provider.") @click.option( "set_params", "-s", "--set", multiple=True, metavar="<parameter>=<value>", help="Set <value> for a <parameter> to be used in execution.", ) @click.option( "--values", metavar="<file>", type=click.Path(exists=True, dir_okay=False), help="YAML file containing parameter mappings to be used.", ) @click.argument("name_or_id", required=True)
Apache License 2.0
unishared/videonotes
main.py
MediaInMemoryUpload.__init__
python
def __init__(self, body, mimetype='application/octet-stream', chunksize=256 * 1024, resumable=False): self._body = body self._mimetype = mimetype self._resumable = resumable self._chunksize = chunksize
Create a new MediaBytesUpload. Args: body: string, Bytes of body content. mimetype: string, Mime-type of the file or default of 'application/octet-stream'. chunksize: int, File will be uploaded in chunks of this many bytes. Only used if resumable=True. resumable: bool, True if this is a resumable upload. False means upload in a single request.
https://github.com/unishared/videonotes/blob/803cdd97b90823fb17f50dd55999aa7d1fec6c3a/main.py#L460-L476
import sys sys.path.insert(0, 'lib') __author__ = '[email protected] (Ali Afshar)' __author__ = '[email protected] Arnaud BRETON (UniShared)' from evernote_handlers import AuthEvernoteHandler, ExportEvernoteHandler from base_handlers import BaseHandler, BaseDriveHandler from utils import FileUtils, DriveState, UrlUtils import os import httplib2 import random from google.appengine.api import urlfetch import time from BufferedSmtpHandler import BufferingSMTPHandler from httplib import HTTPException from apiclient.errors import HttpError import webapp2 from apiclient.http import MediaUpload from oauth2client.client import flow_from_clientsecrets from oauth2client.client import AccessTokenRefreshError from oauth2client.appengine import simplejson as json import logging from utils import SibPath from configuration import configuration_dict logger = logging.getLogger("error") logger.setLevel(logging.ERROR) logger.addHandler(BufferingSMTPHandler(5)) urlfetch.set_default_fetch_deadline(45) httplib2.Http(timeout=45) SESSION_SECRET = open(SibPath('session.secret')).read() class HomePage(BaseHandler): TEMPLATE = 'index.html' def get(self, *args): return self.RenderTemplate(HomePage.TEMPLATE) class EditPage(BaseDriveHandler): TEMPLATE = 'index.html' def get(self, *args): user_agent = self.request.headers.get('User-Agent', None) if user_agent == 'facebookexternalhit/1.1 (+http://www.facebook.com/externalhit_uatext.php)': logging.debug('Returning template for scraper %s', user_agent) return self.RenderTemplate(EditPage.TEMPLATE) drive_state = DriveState.FromRequest(self.request) logging.debug('Drive state %s', drive_state.action) if drive_state.action == 'open' and len(drive_state.ids) > 0: code = self.request.get('code') if code: code = '?code=%s' % code self.redirect('/edit/%s%s' % (drive_state.ids[0], code)) return elif 'resource_id' in self.session and self.session['resource_id']: logging.debug('Restoring resource ID') resource_id = self.session['resource_id'] del self.session['resource_id'] return self.redirect('/edit/' + resource_id) elif drive_state.action == 'create': if drive_state.parent: self.redirect('/edit/?parent={0}'.format(drive_state.parent)) return creds = self.GetCodeCredentials() or self.GetSessionCredentials() if not creds: logging.debug('No credentials') resource_id_in_url = self.request.url.split('?', 1)[0].rsplit('/', 1)[1] if resource_id_in_url: logging.debug('Saving resource ID from URL %s', resource_id_in_url) self.session['resource_id'] = resource_id_in_url logging.debug('Redirecting to auth handler') return self.redirect('/auth') return self.RenderTemplate(EditPage.TEMPLATE) class ServiceHandler(BaseDriveHandler): def get(self): try: f = self.get_file(self.request.get('file_id')) f = FileUtils.transformations(f) self.RespondJSON(f) except AccessTokenRefreshError: logging.info('AccessTokenRefreshError') return self.abort(401) def post(self): service = self.CreateDrive() if service is None: return logging.debug('Get JSON data') data = self.RequestJSON() logging.debug('JSON data retrieved %s', json.dumps(data)) content = FileUtils.get_content_from_data(data) max_try = 5 for n in range(0, max_try): try: if 'templateId' in data: body = {'title': 'Your notes'} resource = service.files().copy(fileId=data['templateId'], body=body).execute() else: resource = { 'title': data['title'], 'description': data['description'], 'mimeType': data['mimeType'], } if 'parent' in data and data['parent']: logging.debug('Creating from a parent folder %s', data['parent']) default_folder_id = data['parent'] else: if 'defaultFolderId' in self.session and self.session['defaultFolderId']: default_folder_id = self.session['defaultFolderId'] else: default_folder_list = service.files().list(q='title="VideoNot.es"').execute() if default_folder_list and 'items' in default_folder_list and len(default_folder_list['items']): default_folder_id = default_folder_list['items'][0]['id'] self.session['defaultFolderId'] = default_folder_id else: folder_ressource = { 'title': 'VideoNot.es', 'mimeType': 'application/vnd.google-apps.folder' } default_folder = service.files().insert(body=folder_ressource).execute() default_folder_id = default_folder['id'] self.session['defaultFolderId'] = default_folder_id resource['parents'] = [{'id':default_folder_id}] logging.debug('Calling Drive API with content %s', str(content)) resource = service.files().insert( body=resource, media_body=MediaInMemoryUpload( content, data['mimeType'], resumable=True) ).execute() if BaseHandler.is_production(): anyone_permission = { 'type': 'anyone', 'role': 'reader', 'withLink': True } try: logging.info('Add anyone as a reader') service.permissions().insert(fileId=resource['id'], body=anyone_permission).execute() except HttpError: logging.info('Error when adding anyone as a reader') logging.debug('New ID created %s', resource['id']) return self.RespondJSON({'id': resource['id']}) except AccessTokenRefreshError: logging.info('AccessTokenRefreshError') return self.abort(401) except HttpError, http_error: logging.getLogger("error").exception("Try #%d: Exception occurred when creating file", n) if http_error.resp.status == 403: return self.abort(403) else: time.sleep((2 ** n) + (random.randint(0, 1000) / 1000)) except HTTPException: logging.getLogger("error").exception("Try #%d: Exception occurred when creating file", n) time.sleep((2 ** n) + (random.randint(0, 1000) / 1000)) logging.getLogger("error").exception("Exception occurred when creating file after %d tries", max_try) return self.abort(500) def put(self): service = self.CreateDrive() if service is None: return logging.debug('Get JSON data') data = self.RequestJSON() logging.debug('JSON data retrieved %s', json.dumps(data)) logging.info('Updating file %s', data['id']) content = FileUtils.get_content_from_data(data) max_try = 5 for n in range(0, max_try): try: if content is not None: resource = service.files().update( fileId=data['id'], newRevision=self.request.get('newRevision', False), body=data, media_body=MediaInMemoryUpload( content, data['mimeType'], resumable=True) ).execute() else: resource = service.files().update( fileId=data['id'], newRevision=self.request.get('newRevision', False), body=data).execute() return self.RespondJSON({'id': resource['id']}) except HttpError, http_error: logging.getLogger("error").exception("Try #%d: Exception occurred when updating file", n) if http_error.resp.status == 403: return self.abort(403) else: time.sleep((2 ** n) + (random.randint(0, 1000) / 1000)) except HTTPException: logging.getLogger("error").exception("Try #%d: Exception occurred when updating file", n) time.sleep((2 ** n) + (random.randint(0, 1000) / 1000)) except AccessTokenRefreshError: logging.info('AccessTokenRefreshError') return self.abort(401) logging.getLogger("error").exception("Exception occurred when updating file after %d tries", max_try) return self.abort(500) class AuthHandler(BaseDriveHandler): def get(self): creds = self.GetCodeCredentials() or self.GetSessionCredentials() if not creds: logging.debug('No credentials, redirecting to Oauth2 URL') next = self.request.get('next') if next and BaseHandler.is_authorized_domain(next): self.session['next'] = next file_id = self.request.get('file_id') if file_id: self.session['fileId'] = file_id redirect_uri = self.RedirectAuth() return self.redirect(redirect_uri) if 'next' in self.session: next = self.session['next'] del self.session['next'] params = {'videonotes_start': 1} if 'fileId' in self.session: file_id = self.session['fileId'] del self.session['fileId'] if file_id: params.update({'videonotes_id': file_id}) redirect_url = UrlUtils.add_query_parameter(next, params) return self.redirect(str(redirect_url)) else: return self.redirect('/edit/') class UserHandler(BaseDriveHandler): def get(self): service = self.CreateUserInfo() if service is None: return self.abort(401) try: logging.debug('Get user informations') result = service.userinfo().get().execute() self.RespondJSON(result) except AccessTokenRefreshError: return self.abort(401) class ProxyHandler(BaseHandler): def get(self): url = self.request.get('q') logging.debug('Fetch URL %s', url) if BaseHandler.is_authorized_domain(url): logging.debug('Authorized domain URL %s', url) result = urlfetch.fetch(url) if result.status_code == 200: self.response.out.write(result.content.strip()) else: logging.getLogger("error").error('Unauthorized domain %s', url) return self.abort(403) class AboutHandler(BaseDriveHandler): def get(self): service = self.CreateDrive() if service is None: return try: result = service.about().get().execute() self.RespondJSON(result) except AccessTokenRefreshError: return self.abort(401) class ConfigHandler(BaseHandler): def get(self): production = BaseHandler.is_production() logging.debug('Get configuration, production %s', production) segment_io_account = configuration_dict['segmentio'] logging.debug('Segment IO account %s', segment_io_account) app_id = flow_from_clientsecrets('client_secrets_{0}.json'.format(self.get_version()), scope='').client_id.split('.')[0].split('-')[0] logging.debug('App id %s', app_id) config = {'segmentio': segment_io_account, 'appId': app_id} return self.RespondJSON(config) class MediaInMemoryUpload(MediaUpload):
MIT License
altair-viz/altair-transform
altair_transform/vegaexpr.py
timeFormat
python
def timeFormat(value, specifier): raise NotImplementedError()
Formats a datetime value (either a Date object or timestamp) as a string, according to the local time. The specifier must be a valid d3-time-format specifier. For example: timeFormat(timestamp, '%A').
https://github.com/altair-viz/altair-transform/blob/b65bf854de1e80f931e063d8fb2ec938773826fb/altair_transform/vegaexpr.py#L481-L483
import datetime as dt from functools import reduce, wraps import itertools import math import operator import random import sys import time as timemod from typing import Any, Callable, Dict, Optional, List, Union, overload import numpy as np import pandas as pd from dateutil import tz from altair_transform.utils import evaljs, undefined, JSRegex def eval_vegajs(expression: str, datum: pd.DataFrame = None) -> pd.DataFrame: namespace = {"datum": datum} if datum is not None else {} namespace.update(VEGAJS_NAMESPACE) return evaljs(expression, namespace) def vectorize(func: Callable) -> Callable: @wraps(func) def wrapper(*args, **kwargs): series_args = [ arg for arg in itertools.chain(args, kwargs.values()) if isinstance(arg, pd.Series) ] if not series_args: return func(*args, **kwargs) else: index = reduce(operator.or_, [s.index for s in series_args]) def _get(x, i): return x.get(i, math.nan) if isinstance(x, pd.Series) else x return pd.Series( [ func( *(_get(arg, i) for arg in args), **{k: _get(v, i) for k, v in kwargs.items()}, ) for i in index ], index=index, ) if hasattr(func, "__annotations__"): wrapper.__annotations__ = { key: Union[pd.Series, val] for key, val in func.__annotations__.items() } return wrapper @vectorize def isArray(value: Any) -> bool: return isinstance(value, (list, np.ndarray)) @vectorize def isBoolean(value: Any) -> bool: return isinstance(value, (bool, np.bool_)) @vectorize def isDate(value: Any) -> bool: return isinstance(value, dt.datetime) @vectorize def isDefined(value: Any) -> bool: return value is not undefined @vectorize def isNumber(value: Any) -> bool: return np.issubdtype(type(value), np.number) @vectorize def isObject(value: Any) -> bool: return value is None or isinstance(value, dict) @vectorize def isRegExp(value: Any) -> bool: return isinstance(value, JSRegex) @vectorize def isString(value: Any) -> bool: return isinstance(value, str) @vectorize def isValid(value: Any) -> bool: return not (value is None or value is undefined or pd.isna(value)) @vectorize def toBoolean(value: Any) -> bool: return bool(value) @vectorize def toDate(value: Any) -> Optional[float]: if isinstance(value, (float, int)): return value if value is None or value == "": return None return pd.to_datetime(value).timestamp() * 1000 @vectorize def toNumber(value: Any) -> Optional[float]: if value is None or value == "": return None return float(value) @vectorize def toString(value: Any) -> Optional[str]: if value is None or value == "": return None if isinstance(value, float) and value % 1 == 0: return str(int(value)) return str(value) def now() -> float: return round(timemod.time() * 1000, 0) @overload def datetime() -> dt.datetime: ... @overload def datetime(timestamp: float) -> dt.datetime: ... @overload def datetime( year: float, month: int, day: int = 0, hour: int = 0, min: int = 0, sec: int = 0, millisec: float = 0, ) -> dt.datetime: ... @vectorize def datetime(*args): if len(args) == 0: return dt.datetime.now() elif len(args) == 1: return dt.datetime.fromtimestamp(0.001 * args[0]) elif len(args) == 2: return dt.datetime(*args, 1) elif len(args) <= 7: args = list(map(int, args)) args[1] += 1 if len(args) == 2: args.append(0) if len(args) == 7: args[6] = int(args[6] * 1000) return dt.datetime(*args) else: raise ValueError("Too many arguments") @vectorize def date(datetime: dt.datetime) -> int: return datetime.day @vectorize def day(datetime: dt.datetime) -> int: return (datetime.weekday() + 1) % 7 @vectorize def year(datetime: dt.datetime) -> int: return datetime.year @vectorize def quarter(datetime: dt.datetime) -> int: return (datetime.month - 1) // 3 @vectorize def month(datetime: dt.datetime) -> int: return datetime.month - 1 @vectorize def hours(datetime: dt.datetime) -> int: return datetime.hour @vectorize def minutes(datetime: dt.datetime) -> int: return datetime.minute @vectorize def seconds(datetime: dt.datetime) -> int: return datetime.second @vectorize def milliseconds(datetime: dt.datetime) -> float: return datetime.microsecond / 1000 @vectorize def time(datetime: dt.datetime) -> float: return datetime.timestamp() * 1000 @vectorize def timezoneoffset(datetime): raise NotImplementedError("timezoneoffset()") @vectorize def utc( year: int, month: int = 0, day: int = 1, hour: int = 0, min: int = 0, sec: int = 0, millisec: int = 0, ) -> float: return ( dt.datetime( int(year), int(month) + 1, int(day), int(hour), int(min), int(sec), int(millisec * 1000), tzinfo=dt.timezone.utc, ).timestamp() * 1000 ) @vectorize def utcdate(datetime: dt.datetime) -> int: return date(datetime.astimezone(tz.tzutc())) @vectorize def utcday(datetime: dt.datetime) -> int: return day(datetime.astimezone(tz.tzutc())) @vectorize def utcyear(datetime: dt.datetime) -> int: return year(datetime.astimezone(tz.tzutc())) @vectorize def utcquarter(datetime: dt.datetime) -> int: return quarter(datetime.astimezone(tz.tzutc())) @vectorize def utcmonth(datetime: dt.datetime) -> int: return month(datetime.astimezone(tz.tzutc())) @vectorize def utchours(datetime: dt.datetime) -> int: return hours(datetime.astimezone(tz.tzutc())) @vectorize def utcminutes(datetime: dt.datetime) -> int: return minutes(datetime.astimezone(tz.tzutc())) @vectorize def utcseconds(datetime: dt.datetime) -> int: return seconds(datetime.astimezone(tz.tzutc())) def utcmilliseconds(datetime: dt.datetime) -> float: return milliseconds(datetime.astimezone(tz.tzutc())) @vectorize def dayFormat(day: int) -> str: days = [ "Sunday", "Monday", "Tuesday", "Wednesday", "Thursday", "Friday", "Saturday", ] return days[day % 7] @vectorize def dayAbbrevFormat(day: int) -> str: days = ["Sun", "Mon", "Tue", "Wed", "Thu", "Fri", "Sat"] return days[day % 7] @vectorize def format(value, specifier): raise NotImplementedError() @vectorize def monthFormat(month: int) -> str: months = [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December", ] return months[month % 12] @vectorize def monthAbbrevFormat(month: int) -> str: months = [ "Jan", "Feb", "Ma", "Apr", "May", "Jun", "Jul", "Aug", "Sep", "Oct", "Nov", "Dec", ] return months[month % 12] @vectorize
MIT License
erdos-project/pylot
pylot/perception/detection/detection_eval_operator.py
CocoDetectionScoringModule.get_scores
python
def get_scores(self): self.get_summary_counter += 1 if (self.get_summary_counter - 1) % self._flags.detection_eval_freq != 0: return {} if len(self.coco_eval.images) < self._flags.detection_eval_lookback: return {} result_dict = self.coco_eval.evaluate_last_n( self._flags.detection_eval_lookback) return {"coco_" + k: v for k, v in result_dict.items()}
Every FLAGS.detection_eval_freq calls to this function (starting with the first call) it returns eval aggregate of the last FLAGS.detection_eval_lookback images as a dictionary.
https://github.com/erdos-project/pylot/blob/858869fec82238ccc3eba0947b0ea37e6202b054/pylot/perception/detection/detection_eval_operator.py#L50-L66
import json from datetime import datetime import erdos from pycocotools.coco import COCO from pycocotools.cocoeval import COCOeval from pylot.perception.base_perception_eval_operator import BasePerceptionEvalOperator, ScoringModule from pylot.utils import verify_keys_in_dict class DetectionEvalOperator(BasePerceptionEvalOperator): def __init__(self, prediction_stream: erdos.ReadStream, ground_truth_stream: erdos.ReadStream, finished_indicator_stream: erdos.WriteStream, evaluate_timely: bool, matching_policy: str, frame_gap: int, flags): super().__init__(prediction_stream, ground_truth_stream, finished_indicator_stream, evaluate_timely, matching_policy, frame_gap, CocoDetectionScoringModule, flags) class CocoDetectionScoringModule(ScoringModule): def __init__(self, flags): self._flags = flags self._logger = erdos.utils.setup_logging( "coco_detection_scoring_module", self._flags.log_file_name) label_list = ["person", "bicycle", "car", "motorcycle"] self.coco_eval = OnlineCOCOEval(label_list) self.get_summary_counter = 0
Apache License 2.0
langmead-lab/reference_flow
src/update_genome.py
update_allele
python
def update_allele( orig, alts, allele, indels, head, loc, f_var, hap, hap_str, offset, offset_other, chrom ): if len(orig) != len(alts[allele-1]): v_type = 'INDEL' else: v_type = 'SNP' flag_skip = False if indels: if loc == head - 1 and (len(orig) < len(alts[allele - 1])): print ('Warning: overlapped INS at {0}, {1} for hap{2}'.format(loc, chrom, hap_str)) new_offset = add_alt(hap, loc-1, orig, alts[allele-1], offset, True) elif loc >= head: new_offset = add_alt(hap, loc-1, orig, alts[allele-1], offset, False) else: flag_skip = True print ('Warning: conflict at {0}, {1} for hap{2}'.format(loc, chrom, hap_str)) else: new_offset = 0 hap[loc+offset-1] = alts[allele-1] if not flag_skip: f_var.write( '%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\t%s\n' % (hap_str, chrom, v_type, str(loc), str(loc+offset), orig, alts[allele-1], str(new_offset), str(offset_other) ) ) offset = new_offset head = loc + len(orig) return head, hap, offset
Update an allele
https://github.com/langmead-lab/reference_flow/blob/85fa12abcfc12f421f67d31c268ffd35d1427875/src/update_genome.py#L131-L177
import sys import argparse import random import copy from collections import OrderedDict def get_mutation_type(orig, alts): assert orig.count(',') == 0 if alts.count(',') == 0: if len(orig) == len(alts) and len(orig) == 1: return 'SNP' elif len(orig) == len(alts) and len(orig) != 1: return 'MNP' elif len(orig) != len(alts): return 'INDEL' return 'MULT' def get_allele_freq(info, num_haps, data_source, gnomad_ac_field): attrs = info.split(';') if data_source == '1kg': for a in attrs: if a[:3] == 'AC=': try: count = int(a[3:]) except: a = a[3:].split(',') inta = [int(i) for i in a] count = max(inta) return float(count) / num_haps elif data_source == 'gnomad': for a in attrs: field = a.split('=')[0] if field == gnomad_ac_field: return float(a.split('=')[1]) / num_haps return -1 def process_vcf_header(line, indiv, f_vcf, data_source, is_ld): labels = line.rstrip().split('\t') col = None num_haps = None if indiv != None: for i in range(9, len(labels)): if labels[i] == indiv: col = i if not col: print('Error! Couldn\'t find individual %s in VCF' % indiv) exit(1) f_vcf.write('\t'.join(labels[:9]) + f'\t{labels[col]}\n') else: if data_source == '1kg': if is_ld: f_vcf.write('\t'.join(labels[:9]) + '\tLD_SAMPLE\n') else: f_vcf.write('\t'.join(labels[:9]) + '\n') else: f_vcf.write(line) if data_source == '1kg': num_haps = 2 * (len(labels) - 9) return col, num_haps, labels def write_to_fasta(dict_genome, out_prefix, suffix, line_width = 60): f_fasta = open(f'{out_prefix}{suffix}.fa', 'w') for key in dict_genome.keys(): f_fasta.write(f'>{dict_genome[key][0]}\n') for i in range(0, len(dict_genome[key][1]), line_width): f_fasta.write(''.join(dict_genome[key][1][i: i + line_width]) + '\n') f_fasta.close()
MIT License
atomlinter/linter-pylama
bin/deps/flake8/api/legacy.py
get_style_guide
python
def get_style_guide(**kwargs): application = app.Application() application.parse_preliminary_options_and_args([]) flake8.configure_logging( application.prelim_opts.verbose, application.prelim_opts.output_file) application.make_config_finder() application.find_plugins() application.register_plugin_options() application.parse_configuration_and_cli([]) options = application.options for key, value in kwargs.items(): try: getattr(options, key) setattr(options, key, value) except AttributeError: LOG.error('Could not update option "%s"', key) application.make_formatter() application.make_notifier() application.make_guide() application.make_file_checker_manager() return StyleGuide(application)
r"""Provision a StyleGuide for use. :param \*\*kwargs: Keyword arguments that provide some options for the StyleGuide. :returns: An initialized StyleGuide :rtype: :class:`StyleGuide`
https://github.com/atomlinter/linter-pylama/blob/9157f7f84083007161814c93b537a712984f3c86/bin/deps/flake8/api/legacy.py#L19-L51
import logging import os.path import flake8 from flake8.formatting import base as formatter from flake8.main import application as app LOG = logging.getLogger(__name__) __all__ = ('get_style_guide',)
MIT License
mgear-dev/mgear_core
scripts/mgear/core/anim_utils.py
keySel
python
def keySel(): pm.setKeyframe()
Key selected controls
https://github.com/mgear-dev/mgear_core/blob/bb450fda44ff79c57f5f73d5a58c97a6b5c5d848/scripts/mgear/core/anim_utils.py#L523-L526
import re import traceback from functools import partial from maya import cmds import pymel.core as pm from pymel import versions import mgear from mgear.vendor.Qt import QtCore from mgear.vendor.Qt import QtWidgets from mgear.core import pyqt from mgear.core import dag from mgear.core import transform from mgear.core import utils from mgear.core import attribute from mgear.core import vector from mgear.core.attribute import reset_selected_channels_value from mgear.core.pickWalk import get_all_tag_children EXPR_LEFT_SIDE = re.compile("L(\d+)") EXPR_RIGHT_SIDE = re.compile("R(\d+)") CTRL_GRP_SUFFIX = "_controllers_grp" PLOT_GRP_SUFFIX = "_PLOT_grp" TAN_TOKEN = "_tan_ctl" TAN0_TOKEN = "_tan0_ctl" TAN1_TOKEN = "_tan1_ctl" START_IK_TOKEN = "_ik0_ctl" END_IK_TOKEN = "_ik1_ctl" POS_IK_TOKEN = "_spinePosition_ctl" NO_MIRROR_ATTRIBUTES = ["isRig", "uiHost", "_ctl"] def isSideElement(name): if "_L_" in name or "_R_" in name: return True nameParts = stripNamespace(name).split("|")[-1] for part in nameParts.split("_"): if EXPR_LEFT_SIDE.match(part) or EXPR_RIGHT_SIDE.match(part): return True else: return False def isSideNode(node): if node.hasAttr("side_label"): if node.side_label.get() in "LR": return True else: return False else: return isSideElement(node.name()) def swapSideLabel(name): for part in name.split("_"): if EXPR_LEFT_SIDE.match(part): return EXPR_LEFT_SIDE.sub(r"R\1", name) if EXPR_RIGHT_SIDE.match(part): return EXPR_RIGHT_SIDE.sub(r"L\1", name) else: if "_L_" in name: return name.replace("_L_", "_R_") elif "_R_" in name: return name.replace("_R_", "_L_") else: return name def swapSideLabelNode(node): name = node.stripNamespace() sw_name = swapSideLabel(name) if name != sw_name: return sw_name if node.hasAttr("side_label"): side = node.side_label.get() if side in "LR": c_side = node.attr("{}_custom_side_label".format(side)).get() if side == "L": cm_side = node.attr("R_custom_side_label").get() elif side == "R": cm_side = node.attr("L_custom_side_label").get() return node.stripNamespace().replace(c_side, cm_side) else: return node.stripNamespace() else: return swapSideLabel(node.stripNamespace()) def getClosestNode(node, nodesToQuery): distance = None closestNode = None node = pm.PyNode(node) for index, nodeTQ in enumerate(nodesToQuery): nodeTQ = pm.PyNode(nodeTQ) tmpDist = vector.getDistance2(node, nodeTQ) if index is 0: distance = tmpDist closestNode = nodeTQ if distance > tmpDist: distance = tmpDist closestNode = nodeTQ return closestNode.name() def recordNodesMatrices(nodes, desiredTime): nodeToMat_dict = {} for fk in nodes: fk = pm.PyNode(fk) nodeToMat_dict[fk.name()] = fk.getAttr("worldMatrix", time=desiredTime) return nodeToMat_dict def getRootNode(): root = None current = pm.ls(sl=True) if not current: raise RuntimeError("You need to select at least one rig node") if pm.objExists("{}.is_rig".format(current[0])): root = current[0] else: holder = current[0] while pm.listRelatives(holder, parent=True) and not root: if pm.objExists("{}.is_rig".format(holder)): root = holder else: holder = pm.listRelatives(holder, parent=True)[0] if not root: raise RuntimeError("Couldn't find root node from your selection") return root def getControlers(model, gSuffix=CTRL_GRP_SUFFIX): try: ctl_set = pm.PyNode(model.name() + gSuffix) members = ctl_set.members() return members except TypeError: return None def get_control_list(control, blend_attr, extension="_ctl"): controls = None controls_attribute = blend_attr.replace("_blend", extension) try: controls = cmds.getAttr("{}.{}".format(control, controls_attribute)) except ValueError: if control == "world_ctl": _msg = "New type attributes using world as host are not supported" raise RuntimeError(_msg) attr = "{}_{}_ctl".format(blend_attr.split("_")[0], control.split(":")[-1].split("_")[1]) controls = cmds.getAttr("{}.{}".format(control, attr)) return controls def get_ik_fk_controls(control, blend_attr, comp_ctl_list=None): ik_fk_controls = {"fk_controls": [], "ik_controls": []} if comp_ctl_list: ctl_list = cmds.getAttr("{}.{}".format(control, comp_ctl_list)) else: ctl_list = get_control_list(control, blend_attr) for ctl in ctl_list.split(","): if len(ctl) == 0: continue if "_ik" in ctl.lower() or "_upv" in ctl: ik_fk_controls["ik_controls"].append(ctl) elif "_fk" in ctl.lower(): ik_fk_controls["fk_controls"].append(ctl) return ik_fk_controls def get_ik_fk_controls_by_role(uiHost, attr_ctl_cnx): ik_controls = {"ik_control": None, "pole_vector": None, "ik_rot": None } fk_controls = [] uiHost = pm.PyNode(uiHost) if uiHost.hasAttr(attr_ctl_cnx): cnxs = uiHost.attr(attr_ctl_cnx).listConnections() if cnxs: for c in cnxs: role = c.ctl_role.get() if "fk" in role: fk_controls.append(c.stripNamespace()) elif role == "upv": ik_controls["pole_vector"] = c.stripNamespace() elif role == "ik": ik_controls["ik_control"] = c.stripNamespace() elif role == "ikRot": ik_controls["ik_rot"] = c.stripNamespace() fk_controls = sorted(fk_controls) return ik_controls, fk_controls def get_host_from_node(control): namespace = getNamespace(control).split("|")[-1] host = cmds.getAttr("{}.uiHost".format(control)) return "{}:{}".format(namespace, host) def getNamespace(modelName): if not modelName: return "" if len(modelName.split(":")) >= 2: nameSpace = ":".join(modelName.split(":")[:-1]) else: nameSpace = "" return nameSpace def stripNamespace(nodeName): return nodeName.split(":")[-1] def getNode(nodeName): try: return pm.PyNode(nodeName) except pm.MayaNodeError: return None def listAttrForMirror(node): res = ["tx", "ty", "tz", "rx", "ry", "rz", "sx", "sy", "sz", "ro"] res.extend(pm.listAttr(node, userDefined=True, shortNames=True)) res = list(filter(lambda x: not x.startswith("inv"), res)) res = list(filter(lambda x: node.attr(x).type() not in ["message", "string"], res)) return res def getInvertCheckButtonAttrName(str): return "inv{0}".format(str.lower().capitalize()) def selAll(model): controlers = getControlers(model) pm.select(controlers) def selGroup(model, groupSuffix): controlers = getControlers(model, groupSuffix) pm.select(controlers) def select_all_child_controls(control, *args): children = get_all_tag_children(control) if not children: return cmds.select(children, add=True) def quickSel(model, channel, mouse_button): qs_attr = model.attr("quicksel%s" % channel) if mouse_button == QtCore.Qt.LeftButton: names = qs_attr.get().split(",") if not names: return pm.select(clear=True) for name in names: ctl = dag.findChild(model, name) if ctl: ctl.select(add=True) elif mouse_button == QtCore.Qt.MidButton: names = [sel.name().split("|")[-1] for sel in pm.ls(selection=True) if sel.name().endswith("_ctl")] qs_attr.set(",".join(names)) elif mouse_button == QtCore.Qt.RightButton: names = qs_attr.get().split(",") if not names: return else: keyObj(model, names)
MIT License
lightly-ai/lightly
lightly/openapi_generated/swagger_client/models/configuration_set_request.py
ConfigurationSetRequest.configs
python
def configs(self, configs): if self._configuration.client_side_validation and configs is None: raise ValueError("Invalid value for `configs`, must not be `None`") self._configs = configs
Sets the configs of this ConfigurationSetRequest. :param configs: The configs of this ConfigurationSetRequest. # noqa: E501 :type: list[ConfigurationEntry]
https://github.com/lightly-ai/lightly/blob/00820e5a60522effb3685a8d792f15e99770ea50/lightly/openapi_generated/swagger_client/models/configuration_set_request.py#L92-L102
import pprint import re import six from lightly.openapi_generated.swagger_client.configuration import Configuration class ConfigurationSetRequest(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'name': 'str', 'configs': 'list[ConfigurationEntry]' } attribute_map = { 'name': 'name', 'configs': 'configs' } def __init__(self, name=None, configs=None, _configuration=None): if _configuration is None: _configuration = Configuration() self._configuration = _configuration self._name = None self._configs = None self.discriminator = None self.name = name self.configs = configs @property def name(self): return self._name @name.setter def name(self, name): if self._configuration.client_side_validation and name is None: raise ValueError("Invalid value for `name`, must not be `None`") self._name = name @property def configs(self): return self._configs @configs.setter
MIT License
open-eo/openeo-python-client
openeo/processes.py
ProcessBuilder.create_raster_cube
python
def create_raster_cube(self) -> 'ProcessBuilder': return create_raster_cube()
Create an empty raster data cube :return: An empty raster data cube with zero dimensions.
https://github.com/open-eo/openeo-python-client/blob/bde2d0f992bd52fc244c8bfeceac4e58d6b12c2d/openeo/processes.py#L879-L885
import builtins from openeo.internal.processes.builder import ProcessBuilderBase, UNSET class ProcessBuilder(ProcessBuilderBase): def __add__(self, other) -> 'ProcessBuilder': return self.add(other) def __radd__(self, other) -> 'ProcessBuilder': return add(other, self) def __sub__(self, other) -> 'ProcessBuilder': return self.subtract(other) def __rsub__(self, other) -> 'ProcessBuilder': return subtract(other, self) def __mul__(self, other) -> 'ProcessBuilder': return self.multiply(other) def __rmul__(self, other) -> 'ProcessBuilder': return multiply(other, self) def __truediv__(self, other) -> 'ProcessBuilder': return self.divide(other) def __rtruediv__(self, other) -> 'ProcessBuilder': return divide(other, self) def __neg__(self) -> 'ProcessBuilder': return self.multiply(-1) def __pow__(self, other) -> 'ProcessBuilder': return self.power(other) def __getitem__(self, key) -> 'ProcessBuilder': if isinstance(key, builtins.int): return self.array_element(index=key) else: return self.array_element(label=key) def __eq__(self, other) -> 'ProcessBuilder': return eq(self, other) def __ne__(self, other) -> 'ProcessBuilder': return neq(self, other) def absolute(self) -> 'ProcessBuilder': return absolute(x=self) def add(self, y) -> 'ProcessBuilder': return add(x=self, y=y) def add_dimension(self, name, label, type=UNSET) -> 'ProcessBuilder': return add_dimension(data=self, name=name, label=label, type=type) def aggregate_spatial(self, geometries, reducer, target_dimension=UNSET, context=UNSET) -> 'ProcessBuilder': return aggregate_spatial(data=self, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) def aggregate_spatial_binary(self, geometries, reducer, target_dimension=UNSET, context=UNSET) -> 'ProcessBuilder': return aggregate_spatial_binary(data=self, geometries=geometries, reducer=reducer, target_dimension=target_dimension, context=context) def aggregate_spatial_window(self, reducer, size, boundary=UNSET, align=UNSET, context=UNSET) -> 'ProcessBuilder': return aggregate_spatial_window(data=self, reducer=reducer, size=size, boundary=boundary, align=align, context=context) def aggregate_temporal(self, intervals, reducer, labels=UNSET, dimension=UNSET, context=UNSET) -> 'ProcessBuilder': return aggregate_temporal(data=self, intervals=intervals, reducer=reducer, labels=labels, dimension=dimension, context=context) def aggregate_temporal_period(self, period, reducer, dimension=UNSET, context=UNSET) -> 'ProcessBuilder': return aggregate_temporal_period(data=self, period=period, reducer=reducer, dimension=dimension, context=context) def all(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return all(data=self, ignore_nodata=ignore_nodata) def and_(self, y) -> 'ProcessBuilder': return and_(x=self, y=y) def anomaly(self, normals, period) -> 'ProcessBuilder': return anomaly(data=self, normals=normals, period=period) def any(self, ignore_nodata=UNSET) -> 'ProcessBuilder': return any(data=self, ignore_nodata=ignore_nodata) def apply(self, process, context=UNSET) -> 'ProcessBuilder': return apply(data=self, process=process, context=context) def apply_dimension(self, process, dimension, target_dimension=UNSET, context=UNSET) -> 'ProcessBuilder': return apply_dimension(data=self, process=process, dimension=dimension, target_dimension=target_dimension, context=context) def apply_kernel(self, kernel, factor=UNSET, border=UNSET, replace_invalid=UNSET) -> 'ProcessBuilder': return apply_kernel(data=self, kernel=kernel, factor=factor, border=border, replace_invalid=replace_invalid) def apply_neighborhood(self, process, size, overlap=UNSET, context=UNSET) -> 'ProcessBuilder': return apply_neighborhood(data=self, process=process, size=size, overlap=overlap, context=context) def arccos(self) -> 'ProcessBuilder': return arccos(x=self) def arcosh(self) -> 'ProcessBuilder': return arcosh(x=self) def arcsin(self) -> 'ProcessBuilder': return arcsin(x=self) def arctan(self) -> 'ProcessBuilder': return arctan(x=self) def arctan2(self, x) -> 'ProcessBuilder': return arctan2(y=self, x=x) def ard_normalized_radar_backscatter(self, elevation_model=UNSET, contributing_area=UNSET, ellipsoid_incidence_angle=UNSET, noise_removal=UNSET) -> 'ProcessBuilder': return ard_normalized_radar_backscatter(data=self, elevation_model=elevation_model, contributing_area=contributing_area, ellipsoid_incidence_angle=ellipsoid_incidence_angle, noise_removal=noise_removal) def ard_surface_reflectance(self, atmospheric_correction_method, cloud_detection_method, elevation_model=UNSET, atmospheric_correction_options=UNSET, cloud_detection_options=UNSET) -> 'ProcessBuilder': return ard_surface_reflectance(data=self, atmospheric_correction_method=atmospheric_correction_method, cloud_detection_method=cloud_detection_method, elevation_model=elevation_model, atmospheric_correction_options=atmospheric_correction_options, cloud_detection_options=cloud_detection_options) def array_append(self, value) -> 'ProcessBuilder': return array_append(data=self, value=value) def array_apply(self, process, context=UNSET) -> 'ProcessBuilder': return array_apply(data=self, process=process, context=context) def array_concat(self, array2) -> 'ProcessBuilder': return array_concat(array1=self, array2=array2) def array_contains(self, value) -> 'ProcessBuilder': return array_contains(data=self, value=value) def array_create(self=UNSET, repeat=UNSET) -> 'ProcessBuilder': return array_create(data=self, repeat=repeat) def array_create_labeled(self, labels) -> 'ProcessBuilder': return array_create_labeled(data=self, labels=labels) def array_element(self, index=UNSET, label=UNSET, return_nodata=UNSET) -> 'ProcessBuilder': return array_element(data=self, index=index, label=label, return_nodata=return_nodata) def array_filter(self, condition, context=UNSET) -> 'ProcessBuilder': return array_filter(data=self, condition=condition, context=context) def array_find(self, value) -> 'ProcessBuilder': return array_find(data=self, value=value) def array_find_label(self, label) -> 'ProcessBuilder': return array_find_label(data=self, label=label) def array_interpolate_linear(self) -> 'ProcessBuilder': return array_interpolate_linear(data=self) def array_labels(self) -> 'ProcessBuilder': return array_labels(data=self) def array_modify(self, values, index, length=UNSET) -> 'ProcessBuilder': return array_modify(data=self, values=values, index=index, length=length) def arsinh(self) -> 'ProcessBuilder': return arsinh(x=self) def artanh(self) -> 'ProcessBuilder': return artanh(x=self) def atmospheric_correction(self, method, elevation_model=UNSET, options=UNSET) -> 'ProcessBuilder': return atmospheric_correction(data=self, method=method, elevation_model=elevation_model, options=options) def between(self, min, max, exclude_max=UNSET) -> 'ProcessBuilder': return between(x=self, min=min, max=max, exclude_max=exclude_max) def ceil(self) -> 'ProcessBuilder': return ceil(x=self) def climatological_normal(self, period, climatology_period=UNSET) -> 'ProcessBuilder': return climatological_normal(data=self, period=period, climatology_period=climatology_period) def clip(self, min, max) -> 'ProcessBuilder': return clip(x=self, min=min, max=max) def cloud_detection(self, method, options=UNSET) -> 'ProcessBuilder': return cloud_detection(data=self, method=method, options=options) def constant(self) -> 'ProcessBuilder': return constant(x=self) def cos(self) -> 'ProcessBuilder': return cos(x=self) def cosh(self) -> 'ProcessBuilder': return cosh(x=self) def count(self, condition=UNSET, context=UNSET) -> 'ProcessBuilder': return count(data=self, condition=condition, context=context)
Apache License 2.0
aspose-words-cloud/aspose-words-cloud-python
asposewordscloud/models/report_build_options.py
ReportBuildOptions.__init__
python
def __init__(self): self.discriminator = None
ReportBuildOptions - a model defined in Swagger
https://github.com/aspose-words-cloud/aspose-words-cloud-python/blob/abf8fccfed40aa2b09c6cdcaf3f2723e1f412d85/asposewordscloud/models/report_build_options.py#L59-L62
import pprint import re import datetime import six import json class ReportBuildOptions(object): """ allowed enum values """ NONE = "None" ALLOWMISSINGMEMBERS = "AllowMissingMembers" REMOVEEMPTYPARAGRAPHS = "RemoveEmptyParagraphs" INLINEERRORMESSAGES = "InlineErrorMessages" """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { } attribute_map = { }
MIT License
dialoguemd/fastapi-sqla
fastapi_sqla/__init__.py
paginate_query
python
def paginate_query( query: DbQuery, session: Session, total_items: int, offset: int, limit: int, scalars: bool = True, ) -> Page[T]: raise NotImplementedError(f"no paginate_query registered for type {type(query)!r}")
Dispatch on registered functions based on `query` type
https://github.com/dialoguemd/fastapi-sqla/blob/2aa694edaf647eb5205f4500ef7dbe466d8a4106/fastapi_sqla/__init__.py#L230-L239
import asyncio import math import os from contextlib import contextmanager from functools import singledispatch from typing import Callable, Generic, List, Optional, TypeVar, Union import structlog from fastapi import Depends, FastAPI, Query, Request from fastapi.concurrency import contextmanager_in_threadpool from fastapi.responses import PlainTextResponse from pydantic import BaseModel, Field from pydantic.generics import GenericModel from sqlalchemy import engine_from_config from sqlalchemy.ext.declarative import DeferredReflection from sqlalchemy.orm import Query as LegacyQuery from sqlalchemy.orm.session import Session as SqlaSession from sqlalchemy.orm.session import sessionmaker from sqlalchemy.sql import Select, func, select try: from sqlalchemy.orm import declarative_base except ImportError: from sqlalchemy.ext.declarative import declarative_base try: from . import asyncio_support from .asyncio_support import AsyncSession from .asyncio_support import open_session as open_async_session except ImportError as err: asyncio_support = False asyncio_support_err = str(err) __all__ = [ "AsyncSession", "Base", "Page", "Paginate", "Session", "open_async_session", "open_session", "setup", ] logger = structlog.get_logger(__name__) _SESSION_KEY = "fastapi_sqla_session" _Session = sessionmaker() def setup(app: FastAPI): app.add_event_handler("startup", startup) app.middleware("http")(add_session_to_request) async_sqlalchemy_url = os.getenv("async_sqlalchemy_url") if async_sqlalchemy_url: assert asyncio_support, asyncio_support_err app.add_event_handler("startup", asyncio_support.startup) app.middleware("http")(asyncio_support.add_session_to_request) def startup(): engine = engine_from_config(os.environ, prefix="sqlalchemy_") Base.metadata.bind = engine Base.prepare(engine) _Session.configure(bind=engine) logger.info("startup", engine=engine) class Base(declarative_base(cls=DeferredReflection)): __abstract__ = True class Session(SqlaSession): def __new__(cls, request: Request) -> SqlaSession: try: return request.scope[_SESSION_KEY] except KeyError: raise Exception( "No session found in request, please ensure you've setup fastapi_sqla." ) @contextmanager def open_session() -> Session: session = _Session() logger.bind(db_session=session) try: yield session except Exception: logger.warning("context failed, rolling back", exc_info=True) session.rollback() raise else: try: session.commit() except Exception: logger.exception("commit failed, rolling back") session.rollback() raise finally: session.close() async def add_session_to_request(request: Request, call_next): async with contextmanager_in_threadpool(open_session()) as session: request.scope[_SESSION_KEY] = session response = await call_next(request) loop = asyncio.get_running_loop() if response.status_code < 400: try: await loop.run_in_executor(None, session.commit) except Exception: logger.exception("commit failed, returning http error") response = PlainTextResponse( content="Internal Server Error", status_code=500 ) if response.status_code >= 400: logger.warning("http error, rolling back", status_code=response.status_code) await loop.run_in_executor(None, session.rollback) return response T = TypeVar("T") class Item(GenericModel, Generic[T]): data: T class Collection(GenericModel, Generic[T]): data: List[T] class Meta(BaseModel): offset: int = Field(..., description="Current page offset") total_items: int = Field(..., description="Total number of items in the collection") total_pages: int = Field(..., description="Total number of pages in the collection") page_number: int = Field(..., description="Current page number. Starts at 1.") class Page(Collection, Generic[T]): meta: Meta DbQuery = Union[LegacyQuery, Select] QueryCountDependency = Callable[..., int] PaginateSignature = Callable[[DbQuery, Optional[bool]], Page[T]] def default_query_count(session: Session, query: DbQuery) -> int: if isinstance(query, LegacyQuery): result = query.count() elif isinstance(query, Select): result = session.execute(select(func.count()).select_from(query)).scalar() else: raise NotImplementedError(f"Query type {type(query)!r} is not supported") return result @singledispatch
MIT License
microsoft/hummingbird
hummingbird/ml/operator_converters/_tree_commons.py
get_parameters_for_tree_trav_sklearn
python
def get_parameters_for_tree_trav_sklearn(lefts, rights, features, thresholds, values, classes=None, extra_config={}): features = [max(x, 0) for x in features] values = np.array(values) if len(values.shape) == 3: values = values.reshape(values.shape[0], -1) if values.shape[1] > 1 and classes is not None and len(classes) > 0: values /= np.sum(values, axis=1, keepdims=True) if constants.NUM_TREES in extra_config: values /= extra_config[constants.NUM_TREES] return get_parameters_for_tree_trav_common(lefts, rights, features, thresholds, values)
This function is used to generate tree parameters for sklearn trees. Includes SklearnRandomForestClassifier/Regressor, and SklearnGradientBoostingClassifier. Args: left: The left nodes right: The right nodes features: The features used in the decision nodes thresholds: The thresholds used in the decision nodes values: The values stored in the leaf nodes classes: The list of class labels. None if regression model Returns: An array containing the extracted parameters
https://github.com/microsoft/hummingbird/blob/1836ade9ca01506825e827734b82d9efcd54feae/hummingbird/ml/operator_converters/_tree_commons.py#L313-L338
import copy import numpy as np import torch from ._tree_implementations import TreeImpl from ._tree_implementations import GEMMDecisionTreeImpl, TreeTraversalDecisionTreeImpl, PerfectTreeTraversalDecisionTreeImpl from . import constants from hummingbird.ml.exceptions import MissingConverter class Node: def __init__(self, id=None): self.id = id self.left = None self.right = None self.feature = None self.threshold = None self.value = None class TreeParameters: def __init__(self, lefts, rights, features, thresholds, values): self.lefts = lefts self.rights = rights self.features = features self.thresholds = thresholds self.values = values class PostTransform: def __call__(self, x): return x class ApplyBasePredictionPostTransform(PostTransform): def __init__(self, base_prediction): self.base_prediction = base_prediction def __call__(self, x): x += self.base_prediction return x class ApplySigmoidPostTransform(PostTransform): def __init__(self): self.one = torch.tensor(1.0) def __call__(self, x): output = torch.sigmoid(x) return torch.cat([self.one - output, output], dim=1) class ApplySigmoidBasePredictionPostTransform(PostTransform): def __init__(self, base_prediction): self.base_prediction = ApplyBasePredictionPostTransform(base_prediction) def __call__(self, x): return ApplySigmoidPostTransform()(self.base_prediction(x)) class ApplySoftmaxPostTransform(PostTransform): def __call__(self, x): return torch.softmax(x, dim=1) class ApplySoftmaxBasePredictionPostTransform(PostTransform): def __init__(self, base_prediction): self.base_prediction = ApplyBasePredictionPostTransform(base_prediction) def __call__(self, x): return ApplySoftmaxPostTransform()(self.base_prediction(x)) class ApplyTweediePostTransform(PostTransform): def __call__(self, x): return torch.exp(x) class ApplyTweedieBasePredictionPostTransform(PostTransform): def __init__(self, base_prediction): self.base_prediction = ApplyBasePredictionPostTransform(base_prediction) def __call__(self, x): return ApplyTweediePostTransform()(self.base_prediction(x)) def _find_max_depth(tree_parameters): depth = 0 for tree in tree_parameters: tree = copy.deepcopy(tree) lefts = tree.lefts rights = tree.rights ids = [i for i in range(len(lefts))] nodes = list(zip(ids, lefts, rights)) nodes_map = {0: Node(0)} current_node = 0 for i, node in enumerate(nodes): id, left, right = node if left != -1: l_node = Node(left) nodes_map[left] = l_node else: lefts[i] = id l_node = -1 if right != -1: r_node = Node(right) nodes_map[right] = r_node else: rights[i] = id r_node = -1 nodes_map[current_node].left = l_node nodes_map[current_node].right = r_node current_node += 1 depth = max(depth, _find_depth(nodes_map[0], -1)) return depth def _find_depth(node, current_depth): if node.left == -1 and node.right == -1: return current_depth + 1 elif node.left != -1 and node.right == -1: return _find_depth(node.l, current_depth + 1) elif node.right != -1 and node.left == -1: return _find_depth(node.r, current_depth + 1) elif node.right != -1 and node.left != -1: return max(_find_depth(node.left, current_depth + 1), _find_depth(node.right, current_depth + 1)) def get_tree_implementation_by_config_or_depth(extra_config, max_depth, low=3, high=10): if constants.TREE_IMPLEMENTATION not in extra_config: if max_depth is not None and max_depth <= low: return TreeImpl.gemm elif max_depth is not None and max_depth <= high: return TreeImpl.perf_tree_trav else: return TreeImpl.tree_trav if extra_config[constants.TREE_IMPLEMENTATION] == TreeImpl.gemm.name: return TreeImpl.gemm elif extra_config[constants.TREE_IMPLEMENTATION] == TreeImpl.tree_trav.name: return TreeImpl.tree_trav elif extra_config[constants.TREE_IMPLEMENTATION] == TreeImpl.perf_tree_trav.name: return TreeImpl.perf_tree_trav else: raise MissingConverter("Tree implementation {} not found".format(extra_config)) def get_tree_params_and_type(tree_infos, get_tree_parameters, extra_config): tree_parameters = [get_tree_parameters(tree_info) for tree_info in tree_infos] max_depth = max(1, _find_max_depth(tree_parameters)) tree_type = get_tree_implementation_by_config_or_depth(extra_config, max_depth) return tree_parameters, max_depth, tree_type def get_parameters_for_sklearn_common(tree_infos): lefts = tree_infos.tree_.children_left rights = tree_infos.tree_.children_right features = tree_infos.tree_.feature thresholds = tree_infos.tree_.threshold values = tree_infos.tree_.value return TreeParameters(lefts, rights, features, thresholds, values) def get_parameters_for_tree_trav_common(lefts, rights, features, thresholds, values, extra_config={}): if len(lefts) == 1: lefts = [1, -1, -1] rights = [2, -1, -1] features = [0, 0, 0] thresholds = [0, 0, 0] n_classes = values.shape[1] if type(values) is np.ndarray else 1 values = np.array([np.zeros(n_classes), values[0], values[0]]) values.reshape(3, n_classes) ids = [i for i in range(len(lefts))] nodes = list(zip(ids, lefts, rights, features, thresholds, values)) nodes_map = {0: Node(0)} current_node = 0 for i, node in enumerate(nodes): id, left, right, feature, threshold, value = node if left != -1: l_node = Node(left) nodes_map[left] = l_node else: lefts[i] = id l_node = -1 feature = -1 if right != -1: r_node = Node(right) nodes_map[right] = r_node else: rights[i] = id r_node = -1 feature = -1 nodes_map[current_node].left = l_node nodes_map[current_node].right = r_node nodes_map[current_node].feature = feature nodes_map[current_node].threshold = threshold nodes_map[current_node].value = value current_node += 1 lefts = np.array(lefts) rights = np.array(rights) features = np.array(features) thresholds = np.array(thresholds, dtype=np.float64) values = np.array(values, dtype=np.float64) return [nodes_map, ids, lefts, rights, features, thresholds, values]
MIT License
aspose-words-cloud/aspose-words-cloud-python
asposewordscloud/models/replace_text_parameters.py
ReplaceTextParameters.is_match_case
python
def is_match_case(self): return self._is_match_case
Gets the is_match_case of this ReplaceTextParameters. # noqa: E501 Gets or sets a value indicating whether flag, true means the search is case-sensitive; false means the search is not case-sensitive. # noqa: E501 :return: The is_match_case of this ReplaceTextParameters. # noqa: E501 :rtype: bool
https://github.com/aspose-words-cloud/aspose-words-cloud-python/blob/abf8fccfed40aa2b09c6cdcaf3f2723e1f412d85/asposewordscloud/models/replace_text_parameters.py#L83-L91
import pprint import re import datetime import six import json class ReplaceTextParameters(object): """ Attributes: swagger_types (dict): The key is attribute name and the value is attribute type. attribute_map (dict): The key is attribute name and the value is json key in definition. """ swagger_types = { 'is_match_case': 'bool', 'is_match_whole_word': 'bool', 'is_old_value_regex': 'bool', 'new_value': 'str', 'old_value': 'str' } attribute_map = { 'is_match_case': 'IsMatchCase', 'is_match_whole_word': 'IsMatchWholeWord', 'is_old_value_regex': 'IsOldValueRegex', 'new_value': 'NewValue', 'old_value': 'OldValue' } def __init__(self, is_match_case=None, is_match_whole_word=None, is_old_value_regex=None, new_value=None, old_value=None): self._is_match_case = None self._is_match_whole_word = None self._is_old_value_regex = None self._new_value = None self._old_value = None self.discriminator = None if is_match_case is not None: self.is_match_case = is_match_case if is_match_whole_word is not None: self.is_match_whole_word = is_match_whole_word if is_old_value_regex is not None: self.is_old_value_regex = is_old_value_regex if new_value is not None: self.new_value = new_value if old_value is not None: self.old_value = old_value @property
MIT License
shannonai/fast-knn-nmt
thirdparty/fairseq/examples/pointer_generator/pointer_generator_src/transformer_pg.py
TransformerPointerGeneratorDecoder.forward
python
def forward( self, prev_output_tokens, encoder_out: Optional[EncoderOut] = None, incremental_state: Optional[Dict[str, Dict[str, Optional[Tensor]]]] = None, features_only: bool = False, alignment_layer: Optional[int] = 0, alignment_heads: Optional[int] = 1, src_lengths: Optional[Any] = None, return_all_hiddens: bool = False, ): x, extra = self.extract_features( prev_output_tokens, encoder_out=encoder_out, incremental_state=incremental_state, alignment_layer=self.alignment_layer, alignment_heads=self.alignment_heads, ) if not features_only: if incremental_state is not None: prev_output_tokens = prev_output_tokens[:, -1:] prev_output_embed = self.embed_tokens(prev_output_tokens) prev_output_embed *= self.embed_scale predictors = torch.cat((prev_output_embed, x), 2) p_gens = self.project_p_gens(predictors) p_gens = torch.sigmoid(p_gens) x = self.output_layer(x, extra["attn"][0], encoder_out.src_tokens, p_gens) return x, extra
Args: prev_output_tokens (LongTensor): previous decoder outputs of shape `(batch, tgt_len)`, for teacher forcing encoder_out (EncoderOut, optional): output from the encoder, used for encoder-side attention incremental_state (dict, optional): dictionary used for storing state during :ref:`Incremental decoding` features_only (bool, optional): only return features without applying output layer (default: False) alignment_layer (int, optional): 0-based index of the layer to be used for pointing (default: 0) alignment_heads (int, optional): number of attention heads to be used for pointing (default: 1) Returns: tuple: - the decoder's output of shape `(batch, tgt_len, vocab)` - a dictionary with any model-specific outputs
https://github.com/shannonai/fast-knn-nmt/blob/27bbdd967befe06bfbfde11ab9cfa34b4aa46482/thirdparty/fairseq/examples/pointer_generator/pointer_generator_src/transformer_pg.py#L236-L288
import logging from typing import Any, Dict, Optional import torch import torch.nn as nn from fairseq import metrics, utils from fairseq.models import register_model, register_model_architecture from fairseq.models.fairseq_encoder import EncoderOut from fairseq.models.transformer import ( DEFAULT_MAX_SOURCE_POSITIONS, DEFAULT_MAX_TARGET_POSITIONS, TransformerDecoder, TransformerEncoder, TransformerModel, base_architecture, ) from torch import Tensor logger = logging.getLogger(__name__) @register_model("transformer_pointer_generator") class TransformerPointerGeneratorModel(TransformerModel): @staticmethod def add_args(parser): TransformerModel.add_args(parser) parser.add_argument('--alignment-heads', type=int, metavar='N', help='number of attention heads to be used for ' 'pointing') parser.add_argument('--alignment-layer', type=int, metavar='I', help='layer number to be used for pointing (0 ' 'corresponding to the bottommost layer)') parser.add_argument('--source-position-markers', type=int, metavar='N', help='dictionary includes N additional items that ' 'represent an OOV token at a particular input ' 'position') parser.add_argument('--force-generation', type=float, metavar='P', default=None, help='set the vocabulary distribution weight to P, ' 'instead of predicting it from the input (1.0 ' 'corresponding to generation, 0.0 to pointing)') @classmethod def build_model(cls, args, task): base_architecture(args) if args.encoder_layers_to_keep: args.encoder_layers = len(args.encoder_layers_to_keep.split(",")) if args.decoder_layers_to_keep: args.decoder_layers = len(args.decoder_layers_to_keep.split(",")) if getattr(args, "max_source_positions", None) is None: args.max_source_positions = DEFAULT_MAX_SOURCE_POSITIONS if getattr(args, "max_target_positions", None) is None: args.max_target_positions = DEFAULT_MAX_TARGET_POSITIONS if getattr(args, "source_position_markers", None) is None: args.source_position_markers = args.max_source_positions src_dict, tgt_dict = task.source_dictionary, task.target_dictionary if src_dict != tgt_dict: raise ValueError("Pointer-generator requires a joined dictionary") def build_embedding(dictionary, embed_dim, path=None): num_embeddings = len(dictionary) - args.source_position_markers padding_idx = dictionary.pad() unk_idx = dictionary.unk() logger.info( "dictionary indices from {0} to {1} will be mapped to {2}".format( num_embeddings, len(dictionary) - 1, unk_idx ) ) emb = Embedding(num_embeddings, embed_dim, padding_idx, unk_idx) if path: embed_dict = utils.parse_embedding(path) utils.load_embedding(embed_dict, dictionary, emb) return emb if args.share_all_embeddings: if args.encoder_embed_dim != args.decoder_embed_dim: raise ValueError( "--share-all-embeddings requires --encoder-embed-dim to match --decoder-embed-dim" ) if args.decoder_embed_path and ( args.decoder_embed_path != args.encoder_embed_path ): raise ValueError( "--share-all-embeddings not compatible with --decoder-embed-path" ) encoder_embed_tokens = build_embedding( src_dict, args.encoder_embed_dim, args.encoder_embed_path ) decoder_embed_tokens = encoder_embed_tokens args.share_decoder_input_output_embed = True else: encoder_embed_tokens = build_embedding( src_dict, args.encoder_embed_dim, args.encoder_embed_path ) decoder_embed_tokens = build_embedding( tgt_dict, args.decoder_embed_dim, args.decoder_embed_path ) encoder = cls.build_encoder(args, src_dict, encoder_embed_tokens) decoder = cls.build_decoder(args, tgt_dict, decoder_embed_tokens) return cls(args, encoder, decoder) @classmethod def build_encoder(cls, args, src_dict, embed_tokens): return TransformerPointerGeneratorEncoder(args, src_dict, embed_tokens) @classmethod def build_decoder(cls, args, tgt_dict, embed_tokens): return TransformerPointerGeneratorDecoder(args, tgt_dict, embed_tokens) class TransformerPointerGeneratorEncoder(TransformerEncoder): def forward(self, src_tokens, src_lengths, **kwargs): encoder_out = super().forward(src_tokens, src_lengths, **kwargs) return EncoderOut( encoder_out=encoder_out.encoder_out, encoder_padding_mask=encoder_out.encoder_padding_mask, encoder_embedding=encoder_out.encoder_embedding, encoder_states=encoder_out.encoder_states, src_tokens=src_tokens, src_lengths=None, ) class TransformerPointerGeneratorDecoder(TransformerDecoder): def __init__(self, args, dictionary, embed_tokens): super().__init__(args, dictionary, embed_tokens, no_encoder_attn=False) self.alignment_heads = args.alignment_heads self.alignment_layer = args.alignment_layer input_embed_dim = embed_tokens.embedding_dim p_gen_input_size = input_embed_dim + self.output_embed_dim self.project_p_gens = nn.Linear(p_gen_input_size, 1) nn.init.zeros_(self.project_p_gens.bias) self.num_types = len(dictionary) self.num_oov_types = args.source_position_markers self.num_embeddings = self.num_types - self.num_oov_types self.force_p_gen = args.force_generation
Apache License 2.0
graviti-ai/tensorbay-python-sdk
tensorbay/cli/cli.py
dataset
python
def dataset(obj: ContextInfo, tbrn: str, is_delete: bool, yes: bool) -> None: from tensorbay.cli.dataset import _implement_dataset _implement_dataset(obj, tbrn, is_delete, yes)
List, create or delete datasets.\f Arguments: obj: A :class:`.utility.ContextInfo` instance containing the command context. tbrn: The tbrn of the dataset, like "tb:KITTI". is_delete: Whether to delete the TensorBay dataset. yes: Confirm to delete the dataset completely.
https://github.com/graviti-ai/tensorbay-python-sdk/blob/db60d259869d6a528ee1ad84103d2b9bab1bd72e/tensorbay/cli/cli.py#L152-L164
from functools import partial from typing import Iterable, Optional, Tuple import click from tensorbay import __version__ from tensorbay.cli.custom import CustomCommand, DeprecatedOptionsCommand from tensorbay.cli.utility import ContextInfo @click.group(context_settings={"help_option_names": ("-h", "--help")}) @click.version_option(__version__) @click.option( "-k", "--key", "access_key", type=str, default="", help="The AccessKey to TensorBay (replace the AccessKey in config file).", ) @click.option("-u", "--url", type=str, default="", help="The login url.", hidden=True) @click.option( "-p", "--profile", "profile_name", type=str, default="default", help="Choose a profile from the config file to login.", ) @click.option("-d", "--debug", is_flag=True, help="Debug mode.") @click.pass_context def cli(ctx: click.Context, access_key: str, url: str, profile_name: str, debug: bool) -> None: from tensorbay.cli.utility import _implement_cli _implement_cli(ctx, access_key, url, profile_name, debug) command = partial(cli.command, cls=CustomCommand) @command( synopsis=( "$ gas ls # List all the datasets.", "$ gas ls tb:<dataset_name> # List segments of a dataset.", "$ gas ls tb:<dataset_name>:<segment_name> # List files of a segment.", ) ) @click.argument("tbrn", type=str, default="") @click.option( "-a", "--all", "list_all_files", is_flag=True, help="List all files under the segment." ) @click.option("-l", "show_total_num", is_flag=True, help="Show the total number of resources") @click.pass_obj def ls( obj: ContextInfo, tbrn: str, list_all_files: bool, show_total_num: bool ) -> None: from tensorbay.cli.ls import _implement_ls _implement_ls(obj, tbrn, list_all_files, show_total_num) @command( synopsis=( "$ gas config [key] # Show the config.", "$ gas config <key> <value> # Set the config.", "$ gas config -u [key] # Unset the config.", ) ) @click.argument("key", type=str, default="") @click.argument("value", type=str, default="") @click.option("-u", "--unset", is_flag=True, help="Unset the config option") @click.pass_obj def config(obj: ContextInfo, key: str, value: str, unset: bool) -> None: from tensorbay.cli.config import _implement_config _implement_config(obj, key, value, unset) @command( synopsis=( "$ gas dataset # List all the datasets.", "$ gas dataset tb:<dataset_name> # Create a dataset.", "$ gas dataset -d [-y] tb:<dataset_name> # Delete a dataset.", ) ) @click.argument("tbrn", type=str, default="") @click.option("-d", "--delete", "is_delete", is_flag=True, help="Delete TensorBay dataset") @click.option("-y", "--yes", is_flag=True, help="Confirm to delete the dataset completely.") @click.pass_obj
MIT License
xuru/pyvisdk
pyvisdk/do/storage_drs_vm_config_spec.py
StorageDrsVmConfigSpec
python
def StorageDrsVmConfigSpec(vim, *args, **kwargs): obj = vim.client.factory.create('ns0:StorageDrsVmConfigSpec') if (len(args) + len(kwargs)) < 1: raise IndexError('Expected at least 2 arguments got: %d' % len(args)) required = [ 'operation' ] optional = [ 'info', 'removeKey', 'dynamicProperty', 'dynamicType' ] for name, arg in zip(required+optional, args): setattr(obj, name, arg) for name, value in kwargs.items(): if name in required + optional: setattr(obj, name, value) else: raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional))) return obj
Updates the per-virtual-machine storage DRS configuration.NOTE: This data object type and all of its methods are experimental and subject to change in future releases.
https://github.com/xuru/pyvisdk/blob/de24eb4426eb76233dc2e57640d3274ffd304eb3/pyvisdk/do/storage_drs_vm_config_spec.py#L11-L34
import logging from pyvisdk.exceptions import InvalidArgumentError log = logging.getLogger(__name__)
MIT License
cupy/cupy
cupyx/scipy/ndimage/filters.py
gaussian_gradient_magnitude
python
def gaussian_gradient_magnitude(input, sigma, output=None, mode="reflect", cval=0.0, **kwargs): def derivative(input, axis, output, mode, cval): order = [0] * input.ndim order[axis] = 1 return gaussian_filter(input, sigma, order, output, mode, cval, **kwargs) return generic_gradient_magnitude(input, derivative, output, mode, cval)
Multi-dimensional gradient magnitude using Gaussian derivatives. Args: input (cupy.ndarray): The input array. sigma (scalar or sequence of scalar): Standard deviations for each axis of Gaussian kernel. A single value applies to all axes. output (cupy.ndarray, dtype or None): The array in which to place the output. Default is is same dtype as the input. mode (str): The array borders are handled according to the given mode (``'reflect'``, ``'constant'``, ``'nearest'``, ``'mirror'``, ``'wrap'``). Default is ``'reflect'``. cval (scalar): Value to fill past edges of input if mode is ``'constant'``. Default is ``0.0``. kwargs (dict, optional): dict of extra keyword arguments to pass ``gaussian_filter()``. Returns: cupy.ndarray: The result of the filtering. .. seealso:: :func:`scipy.ndimage.gaussian_gradient_magnitude` .. note:: When the output data type is integral (or when no output is provided and input is integral) the results may not perfectly match the results from SciPy due to floating-point rounding of intermediate results.
https://github.com/cupy/cupy/blob/a466b03ef0afd7c1ce1615e3f48da64ae38c1320/cupyx/scipy/ndimage/filters.py#L654-L687
import numpy import cupy from cupy._core import internal from cupyx.scipy.ndimage import _util from cupyx.scipy.ndimage import _filters_core from cupyx.scipy.ndimage import _filters_generic def correlate(input, weights, output=None, mode='reflect', cval=0.0, origin=0): return _correlate_or_convolve(input, weights, output, mode, cval, origin) def convolve(input, weights, output=None, mode='reflect', cval=0.0, origin=0): return _correlate_or_convolve(input, weights, output, mode, cval, origin, True) def correlate1d(input, weights, axis=-1, output=None, mode="reflect", cval=0.0, origin=0): weights, origins = _filters_core._convert_1d_args(input.ndim, weights, origin, axis) return _correlate_or_convolve(input, weights, output, mode, cval, origins) def convolve1d(input, weights, axis=-1, output=None, mode="reflect", cval=0.0, origin=0): weights, origins = _filters_core._convert_1d_args(input.ndim, weights, origin, axis) return _correlate_or_convolve(input, weights, output, mode, cval, origins, True) def _correlate_or_convolve(input, weights, output, mode, cval, origin, convolution=False): origins, int_type = _filters_core._check_nd_args(input, weights, mode, origin) if weights.size == 0: return cupy.zeros_like(input) _util._check_cval(mode, cval, _util._is_integer_output(output, input)) if convolution: weights = weights[tuple([slice(None, None, -1)] * weights.ndim)] origins = list(origins) for i, wsize in enumerate(weights.shape): origins[i] = -origins[i] if wsize % 2 == 0: origins[i] -= 1 origins = tuple(origins) elif weights.dtype.kind == "c": weights = weights.conj() weights_dtype = _util._get_weights_dtype(input, weights) offsets = _filters_core._origins_to_offsets(origins, weights.shape) kernel = _get_correlate_kernel(mode, weights.shape, int_type, offsets, cval) output = _filters_core._call_kernel(kernel, input, weights, output, weights_dtype=weights_dtype) return output @cupy._util.memoize(for_each_device=True) def _get_correlate_kernel(mode, w_shape, int_type, offsets, cval): return _filters_core._generate_nd_kernel( 'correlate', 'W sum = (W)0;', 'sum += cast<W>({value}) * wval;', 'y = cast<Y>(sum);', mode, w_shape, int_type, offsets, cval, ctype='W') def _run_1d_correlates(input, params, get_weights, output, mode, cval, origin=0): wghts = {} for param in params: if param not in wghts: wghts[param] = get_weights(param) wghts = [wghts[param] for param in params] return _filters_core._run_1d_filters( [None if w is None else correlate1d for w in wghts], input, wghts, output, mode, cval, origin) def uniform_filter1d(input, size, axis=-1, output=None, mode="reflect", cval=0.0, origin=0): return correlate1d(input, cupy.ones(size) / size, axis, output, mode, cval, origin) def uniform_filter(input, size=3, output=None, mode="reflect", cval=0.0, origin=0): sizes = _util._fix_sequence_arg(size, input.ndim, 'size', int) def get(size): return None if size <= 1 else cupy.ones(size) / size return _run_1d_correlates(input, sizes, get, output, mode, cval, origin) def gaussian_filter1d(input, sigma, axis=-1, order=0, output=None, mode="reflect", cval=0.0, truncate=4.0): radius = int(float(truncate) * float(sigma) + 0.5) weights = _gaussian_kernel1d(sigma, int(order), radius) return correlate1d(input, weights, axis, output, mode, cval) def gaussian_filter(input, sigma, order=0, output=None, mode="reflect", cval=0.0, truncate=4.0): sigmas = _util._fix_sequence_arg(sigma, input.ndim, 'sigma', float) orders = _util._fix_sequence_arg(order, input.ndim, 'order', int) truncate = float(truncate) def get(param): sigma, order = param radius = int(truncate * float(sigma) + 0.5) if radius <= 0: return None return _gaussian_kernel1d(sigma, order, radius) return _run_1d_correlates(input, list(zip(sigmas, orders)), get, output, mode, cval, 0) def _gaussian_kernel1d(sigma, order, radius): if order < 0: raise ValueError('order must be non-negative') sigma2 = sigma * sigma x = numpy.arange(-radius, radius+1) phi_x = numpy.exp(-0.5 / sigma2 * x ** 2) phi_x /= phi_x.sum() if order == 0: return cupy.asarray(phi_x) exponent_range = numpy.arange(order + 1) q = numpy.zeros(order + 1) q[0] = 1 D = numpy.diag(exponent_range[1:], 1) P = numpy.diag(numpy.ones(order)/-sigma2, -1) Q_deriv = D + P for _ in range(order): q = Q_deriv.dot(q) q = (x[:, None] ** exponent_range).dot(q) return cupy.asarray((q * phi_x)[::-1]) def prewitt(input, axis=-1, output=None, mode="reflect", cval=0.0): return _prewitt_or_sobel(input, axis, output, mode, cval, cupy.ones(3)) def sobel(input, axis=-1, output=None, mode="reflect", cval=0.0): return _prewitt_or_sobel(input, axis, output, mode, cval, cupy.array([1, 2, 1])) def _prewitt_or_sobel(input, axis, output, mode, cval, weights): axis = internal._normalize_axis_index(axis, input.ndim) def get(is_diff): return cupy.array([-1, 0, 1]) if is_diff else weights return _run_1d_correlates(input, [a == axis for a in range(input.ndim)], get, output, mode, cval) def generic_laplace(input, derivative2, output=None, mode="reflect", cval=0.0, extra_arguments=(), extra_keywords=None): if extra_keywords is None: extra_keywords = {} ndim = input.ndim modes = _util._fix_sequence_arg(mode, ndim, 'mode', _util._check_mode) output = _util._get_output(output, input) if ndim == 0: output[...] = input return output derivative2(input, 0, output, modes[0], cval, *extra_arguments, **extra_keywords) if ndim > 1: tmp = _util._get_output(output.dtype, input) for i in range(1, ndim): derivative2(input, i, tmp, modes[i], cval, *extra_arguments, **extra_keywords) output += tmp return output def laplace(input, output=None, mode="reflect", cval=0.0): weights = cupy.array([1, -2, 1], dtype=cupy.float64) def derivative2(input, axis, output, mode, cval): return correlate1d(input, weights, axis, output, mode, cval) return generic_laplace(input, derivative2, output, mode, cval) def gaussian_laplace(input, sigma, output=None, mode="reflect", cval=0.0, **kwargs): def derivative2(input, axis, output, mode, cval): order = [0] * input.ndim order[axis] = 2 return gaussian_filter(input, sigma, order, output, mode, cval, **kwargs) return generic_laplace(input, derivative2, output, mode, cval) def generic_gradient_magnitude(input, derivative, output=None, mode="reflect", cval=0.0, extra_arguments=(), extra_keywords=None): if extra_keywords is None: extra_keywords = {} ndim = input.ndim modes = _util._fix_sequence_arg(mode, ndim, 'mode', _util._check_mode) output = _util._get_output(output, input) if ndim == 0: output[...] = input return output derivative(input, 0, output, modes[0], cval, *extra_arguments, **extra_keywords) output *= output if ndim > 1: tmp = _util._get_output(output.dtype, input) for i in range(1, ndim): derivative(input, i, tmp, modes[i], cval, *extra_arguments, **extra_keywords) tmp *= tmp output += tmp return cupy.sqrt(output, output, casting='unsafe')
MIT License
googlecloudplatform/python-docs-samples
composer/tools/composer_db_transfer.py
DatabaseUtils.blob
python
def blob(column: str) -> str: big_enough_length_to_hold_hex_representation_of_blob = 150000 return ( f'CASE WHEN {column} IS NULL THEN "{DatabaseUtils.null_string}" ' fr'ELSE CONCAT("\\\x", CAST(HEX({column}) ' f"as char({big_enough_length_to_hold_hex_representation_of_blob}))) " "END" )
Returns SQL expression processing blob column for export.
https://github.com/googlecloudplatform/python-docs-samples/blob/35873ab69eaf0f2a6dd4a7007a12917265e2b848/composer/tools/composer_db_transfer.py#L181-L189
import argparse import json import logging import os import re import subprocess import time import typing import uuid SCRIPT_VERSION = "1.2" USAGE = r"""This script handles database transfer for Cloud Composer (Airflow 1.10.14/15 -> Airflow 2.0.1+). EXPORT python3 composer_db_transfer.py export \ --project [PROJECT NAME] \ --environment [ENVIRONMENT NAME] \ --location [REGION] \ --fernet-key-file [PATH TO FERNET KEY FILE - TO BE CREATED] CSV files with exported database are stored as /export/tables/[TABLE NAME].csv in the environment's bucket. dags, plugins and data directories are stored in /export/dirs in the environment's bucket. File with a fernet key is going to be created on the machine executing the script. Copying the files between the environments Exported files can be copied to the target environment, e.g. with: gsutil -m cp -r \ gs://[SOURCE ENV BUCKET NAME]/export \ gs://[TARGET ENV BUCKET NAME]/import IMPORT python3 composer_db_transfer.py import \ --project [PROJECT NAME] \ --environment [ENVIRONMENT NAME] \ --location [REGION] \ --fernet-key-file [PATH TO FERNET KEY FILE FROM SOURCE ENVIRONMENT] CSV files that should be imported are expected to be stored as /import/tables/[TABLE NAME].csv in the environment's bucket. dags, plugins and data directories that should be imported are expected to be stored in /import/dirs in the environment's bucket. `fernet-key-file` parameter specifies path to the fernet key file of the source environment on the machine executing the script. It is created during export phase. Additional --use-private-gke-endpoint option can be used to access environment's GKE cluster through internal endpoint. It might be useful for private IP environments. TROUBLESHOOTING Check "Troubleshooting" section of the script manual for troubleshooting guidance. Temporary kubeconfig file is created in the current directory. """ logging.basicConfig(level=logging.INFO, format="%(asctime)s - %(message)s") logger = logging.getLogger(__name__) class Command: class CommandExecutionError(Exception): pass @staticmethod def run_shell_command( command: typing.List[str], use_shell: bool = False, retry_number: int = 3, delay: float = 4.0, command_input: str = None, log_command: bool = True, log_error: bool = True, ) -> str: for i in range(retry_number + 1): try: return Command._run_shell_command( command, use_shell, command_input=command_input, log_command=log_command, ) except Command.CommandExecutionError as e: if log_error: logger.error(e) if i == retry_number: raise time.sleep(delay) if log_command: logger.info("Retrying...") @staticmethod def _run_shell_command( command: typing.List[str], use_shell: bool = False, command_input: str = None, log_command: bool = True, ) -> str: p = subprocess.Popen( command, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, shell=use_shell ) if log_command: logger.info("Executing shell command: %s", command) (res, _) = p.communicate(input=command_input) if p.returncode: logged_command = f' "{command}"' if log_command else "" error_message = ( f"Failed to run shell command{logged_command}, " f"details: {res}" ) raise Command.CommandExecutionError(error_message) return str(res.decode().strip("\n")) class DatabaseUtils: null_string = "a8fc0a-b77fe61-a9cf0b-9a0abf" @staticmethod def nullable(column: str) -> str: return ( f'CASE WHEN {column} IS NULL THEN "{DatabaseUtils.null_string}" ' f"ELSE {column} END" ) @staticmethod
Apache License 2.0
cober2019/network-automation
Restconf/ClassMaps.py
is_discard_class
python
def is_discard_class(outter_key, inner_key) -> None: values = {} if outter_key == "discard-class": values['discard_class'] = ', '.join(is_instance(inner_key.get('discard-class', {}))) return values
Prints discard class
https://github.com/cober2019/network-automation/blob/796b7760ca1f1e496a841c613eaff05ddba71b16/Restconf/ClassMaps.py#L97-L104
import requests import warnings import json warnings.filterwarnings('ignore', message='Unverified HTTPS request') headers = {"Content-Type": 'application/yang-data+json', 'Accept': 'application/yang-data+json'} def is_instance(list_or_dict) -> list: if isinstance(list_or_dict, list): make_list = list_or_dict else: make_list = [list_or_dict] return make_list def is_mpls(outter_key, inner_key) -> None: values = {} if outter_key == "mpls": values['mpls'] = is_instance(inner_key.get('experimental', {}).get('topmost', {})) return values def is_vlan(outter_key, inner_key) -> None: values = {} if outter_key == "vlan": if inner_key.get('inner') is not None: values['inner'] = ', '.join(is_instance(inner_key.get('inner', {}))) elif inner_key.get('value') is not None: values['value'] = ', '.join(is_instance(inner_key.get('value', {}))) return values def is_protocol(outter_key, inner_key) -> None: values = {} if outter_key == "protocol": if len(inner_key.get('protocols-list')) == 1: values['protocols'] = inner_key.get('protocols-list').get('protocols') else: values['protocols'] = ', '.join([i.get('protocols') for i in inner_key.get('protocols-list')]) return values def is_access_group(outter_key, inner_key) -> None: values = {} if outter_key == "access-group": if inner_key.get('index') is not None: values['index'] = ', '.join(is_instance(inner_key.get('index', {}))) elif inner_key.get('name') is not None: values['name'] = ', '.join(is_instance(inner_key.get('name', {}))) return values def is_security_group(outter_key, inner_key) -> None: values = {} if outter_key == "security-group": if inner_key.get('source') is not None: values['secgroup_src'] = inner_key.get('source', {}).get('tag', {}) elif inner_key.get('destination') is not None: values['secgroup_dest'] = inner_key.get('destination', {}).get('tag', {}) return values def is_atm(outter_key, inner_key) -> None: values = {} if outter_key == "atm": if inner_key.get('clp') is not None: values['clp'] = ', '.join(is_instance(inner_key.get('clp', {}))) elif inner_key.get('atm-vci ') is not None: values['atm-vci'] = ', '.join(is_instance(inner_key.get('atm-vci ', {}))) return values
MIT License
berendkleinhaneveld/registrationshop
ui/transformations/ParameterModel.py
ParameterModel.rowCount
python
def rowCount(self, index): if index.isValid(): return 0 return len(self.parameters)
:type parent: QModelIndex :rtype: int
https://github.com/berendkleinhaneveld/registrationshop/blob/0d6f3ee5324865cdcb419369139f37c39dfe9a1c/ui/transformations/ParameterModel.py#L88-L96
from PySide.QtCore import QAbstractItemModel from PySide.QtCore import QModelIndex from PySide.QtCore import Qt from PySide.QtCore import Slot from core.elastix import Parameter from core.elastix import ParameterList class ParameterModel(QAbstractItemModel): def __init__(self): super(ParameterModel, self).__init__() self.parameters = [] self.headers = ["Parameter", "Value"] def setParameters(self, parameters): self.parameters = parameters self.layoutChanged.emit() def addParameter(self): standardParameter = Parameter("ParameterName", "Value") self.parameters.append(standardParameter) self.insertRows(len(self.parameters), 1, QModelIndex()) def removeParameterAtIndex(self, index): del self.parameters[index] self.removeRow(index, QModelIndex()) @Slot(ParameterList) def setTransformation(self, transformation): self.setParameters(transformation.parameters) def index(self, row, column, parent): if not self.hasIndex(row, column, parent): return self.invalidIndex() if column == 0: return self.createIndex(row, column, str(self.parameters[row].key())) if column == 1: return self.createIndex(row, column, str(self.parameters[row].value())) return None def parent(self, index): return self.invalidIndex()
MIT License
cc1-cloud/cc1
src/wi/forms/storage_image.py
AddDiskForm.clean_filesystem
python
def clean_filesystem(self): return int(self.cleaned_data['filesystem'])
Cast 'filesystem' to int.
https://github.com/cc1-cloud/cc1/blob/8113673fa13b6fe195cea99dedab9616aeca3ae8/src/wi/forms/storage_image.py#L79-L83
from django import forms from django.utils.translation import ugettext_lazy as _ from wi.utils import parsing from wi.utils.forms import attrs_dict, BetterForm from wi.utils.states import image_platforms_reversed from wi.utils.widgets import SelectWithDisabled class EditDiskForm(forms.Form): name = forms.CharField(widget=forms.TextInput(attrs=dict(attrs_dict, maxlength=45)), label=_('Disk name')) description = forms.CharField(required=False, widget=forms.Textarea(attrs=dict(attrs_dict, maxlength=512, rows=3, cols=20)), label=_('Disk description')) def __init__(self, *args, **kwargs): rest_data = kwargs.pop('rest_data') super(EditDiskForm, self).__init__(*args, **kwargs) self.fields['disk_controller'] = forms.ChoiceField(choices=parsing.parse_generic_enabled(rest_data, 'disk_controllers'), widget=SelectWithDisabled(attrs=dict()), label=_("Bus")) self.fields['disk_controller'].widget.attrs['class'] = 'medium' def clean_disk_controller(self): return int(self.cleaned_data['disk_controller']) class AddDiskForm(EditDiskForm): size = forms.IntegerField(max_value=2000000, min_value=2, label=_('Disk size [MB]')) def __init__(self, *args, **kwargs): super(AddDiskForm, self).__init__(*args, **kwargs) rest_data = kwargs.pop('rest_data') self.fields['filesystem'] = forms.ChoiceField(choices=parsing.parse_generic(rest_data, 'supported_filesystems'), widget=SelectWithDisabled(attrs=dict()), label=_('File system')) self.fields['filesystem'].widget.attrs['class'] = 'medium'
Apache License 2.0
aklajnert/pytest-subprocess
pytest_subprocess/core.py
FakeProcess.context
python
def context(cls) -> "FakeProcess": return cls()
Return a new FakeProcess instance to use it as a context manager.
https://github.com/aklajnert/pytest-subprocess/blob/b820ca5a9cf856dc671ca7e758d648b5a224aee9/pytest_subprocess/core.py#L578-L580
import asyncio import io import os import signal import subprocess import sys import time from collections import defaultdict from collections import deque from copy import deepcopy from typing import Any as AnyType from typing import Awaitable from typing import Callable from typing import DefaultDict from typing import Deque from typing import Dict from typing import List from typing import Optional from typing import Sequence from typing import Tuple from typing import Type from typing import Union from . import asyncio_subprocess from .utils import Any from .utils import Command from .utils import Thread OPTIONAL_TEXT = Union[str, bytes, None] OPTIONAL_TEXT_OR_ITERABLE = Union[ str, bytes, None, Sequence[Union[str, bytes]], ] BUFFER = Union[None, io.BytesIO, io.StringIO] ARGUMENT = Union[str, Any] COMMAND = Union[Sequence[ARGUMENT], str, Command] class PluginInternalError(Exception): class FakePopen: stdout: BUFFER = None stderr: BUFFER = None returncode: Optional[int] = None text_mode: bool = False pid: int = 0 def __init__( self, command: Union[ Union[bytes, str], Sequence[Union[str, bytes, "os.PathLike[str]", "os.PathLike[bytes]"]], ], stdout: OPTIONAL_TEXT_OR_ITERABLE = None, stderr: OPTIONAL_TEXT_OR_ITERABLE = None, returncode: int = 0, wait: Optional[float] = None, callback: Optional[Callable] = None, callback_kwargs: Optional[Dict[str, AnyType]] = None, signal_callback: Optional[Callable] = None, stdin_callable: Optional[Callable] = None, **_: Dict[str, AnyType] ) -> None: self.args = command self.__stdout: OPTIONAL_TEXT_OR_ITERABLE = stdout self.__stderr: OPTIONAL_TEXT_OR_ITERABLE = stderr self.__returncode: Optional[int] = returncode self.__wait: Optional[float] = wait self.__thread: Optional[Thread] = None self.__callback: Optional[Optional[Callable]] = callback self.__callback_kwargs: Optional[Dict[str, AnyType]] = callback_kwargs self.__signal_callback: Optional[Callable] = signal_callback self.__stdin_callable: Optional[Optional[Callable]] = stdin_callable self._signals: List[int] = [] def __enter__(self) -> "FakePopen": return self def __exit__(self, *args: List, **kwargs: Dict) -> None: if self.__thread and self.__thread.exception: raise self.__thread.exception def communicate( self, input: OPTIONAL_TEXT = None, timeout: Optional[float] = None ) -> Tuple[AnyType, AnyType]: if input and self.__stdin_callable: callable_output = self.__stdin_callable(input) if isinstance(callable_output, dict): self.stdout = self._extend_stream_from_dict( callable_output, "stdout", self.stdout ) self.stderr = self._extend_stream_from_dict( callable_output, "stderr", self.stderr ) if self.__thread is not None: self.__thread.join(timeout) return ( self.stdout.getvalue() if self.stdout else None, self.stderr.getvalue() if self.stderr else None, ) def _extend_stream_from_dict( self, dictionary: Dict[str, AnyType], key: str, stream: BUFFER ) -> BUFFER: data = dictionary.get(key) if data: return self._prepare_buffer(input=data, io_base=stream) return None def poll(self) -> Optional[int]: return self.returncode def wait(self, timeout: Optional[float] = None) -> int: if timeout and self.__wait and timeout < self.__wait: self.__wait -= timeout raise subprocess.TimeoutExpired(self.args, timeout) if self.__thread is not None: self.__thread.join() if self.returncode is None and self.__returncode is not None: self.returncode = self.__returncode if self.__thread.exception: raise self.__thread.exception if self.returncode is None: raise PluginInternalError return self.returncode def send_signal(self, sig: int) -> None: self._signals.append(sig) if self.__signal_callback: self.__signal_callback(self, sig) def terminate(self) -> None: self.send_signal(signal.SIGTERM) def kill(self) -> None: if sys.platform == "win32": self.terminate() else: self.send_signal(signal.SIGKILL) def configure(self, **kwargs: Optional[Dict]) -> None: self.__universal_newlines = kwargs.get("universal_newlines", None) text = kwargs.get("text", None) encoding = kwargs.get("encoding", None) errors = kwargs.get("errors", None) if text and sys.version_info < (3, 7): raise TypeError("__init__() got an unexpected keyword argument 'text'") self.text_mode = bool(text or self.__universal_newlines or encoding or errors) if ( text is not None and self.__universal_newlines is not None and bool(self.__universal_newlines) != bool(text) ): raise subprocess.SubprocessError( "Cannot disambiguate when both text " "and universal_newlines are supplied but " "different. Pass one or the other." ) if kwargs.get("stdout") == subprocess.PIPE: self.stdout = self._prepare_buffer(self.__stdout) stderr = kwargs.get("stderr") if stderr == subprocess.STDOUT and self.__stderr: self.stdout = self._prepare_buffer(self.__stderr, self.stdout) elif stderr == subprocess.PIPE: self.stderr = self._prepare_buffer(self.__stderr) def _prepare_buffer( self, input: OPTIONAL_TEXT_OR_ITERABLE, io_base: BUFFER = None, ) -> Union[io.BytesIO, io.StringIO]: linesep = self._convert(os.linesep) if isinstance(input, (list, tuple)): input = linesep.join(map(self._convert, input)) if input: input += linesep if isinstance(input, str) and not self.text_mode: input = input.encode() if isinstance(input, bytes) and self.text_mode: input = input.decode() if input and self.__universal_newlines and isinstance(input, str): input = input.replace("\r\n", "\n") if io_base is not None: input = io_base.getvalue() + (input) io_base = io.StringIO() if self.text_mode else io.BytesIO() if input is None: return io_base io_base.write(input) return io_base def _convert(self, input: Union[str, bytes]) -> Union[str, bytes]: if isinstance(input, bytes) and self.text_mode: return input.decode() if isinstance(input, str) and not self.text_mode: return input.encode() return input def _wait(self, wait_period: float) -> None: time.sleep(wait_period) if self.returncode is None: self._finish_process() def run_thread(self) -> None: if self.__wait is None and self.__callback is None: self._finish_process() else: if self.__callback: self.__thread = Thread( target=self.__callback, args=(self,), kwargs=self.__callback_kwargs or {}, ) else: self.__thread = Thread(target=self._wait, args=(self.__wait,)) self.__thread.start() def _finish_process(self) -> None: self.returncode = self.__returncode if self.stdout: self.stdout.seek(0) if self.stderr: self.stderr.seek(0) def received_signals(self) -> Tuple[int, ...]: return tuple(self._signals) class AsyncFakePopen(FakePopen): async def communicate( self, input: OPTIONAL_TEXT = None, timeout: Optional[float] = None ) -> Tuple[AnyType, AnyType]: return super().communicate(input, timeout) async def wait(self, timeout: Optional[float] = None) -> int: return super().wait(timeout) class ProcessNotRegisteredError(Exception): class ProcessDispatcher: process_list: List["FakeProcess"] = [] built_in_popen: Optional[Callable] = None built_in_async_subprocess: Optional[AnyType] = None _allow_unregistered: bool = False _cache: Dict["FakeProcess", Dict["FakeProcess", AnyType]] = dict() _keep_last_process: bool = False _pid: int = 0 @classmethod def register(cls, process: "FakeProcess") -> None: if not cls.process_list: cls.built_in_popen = subprocess.Popen subprocess.Popen = cls.dispatch cls.built_in_async_subprocess = asyncio.subprocess asyncio.create_subprocess_shell = cls.async_shell asyncio.create_subprocess_exec = cls.async_shell asyncio.subprocess = asyncio_subprocess cls._cache[process] = { proc: deepcopy(proc.definitions) for proc in cls.process_list } cls.process_list.append(process) @classmethod def deregister(cls, process: "FakeProcess") -> None: cls.process_list.remove(process) cache = cls._cache.pop(process) for proc, processes in cache.items(): proc.definitions = processes if not cls.process_list: subprocess.Popen = cls.built_in_popen cls.built_in_popen = None if cls.built_in_async_subprocess is None: raise PluginInternalError asyncio.subprocess = cls.built_in_async_subprocess asyncio.create_subprocess_shell = ( cls.built_in_async_subprocess.create_subprocess_shell ) asyncio.create_subprocess_exec = ( cls.built_in_async_subprocess.create_subprocess_exec ) cls.built_in_async_subprocess = None @classmethod def dispatch( cls, command: COMMAND, **kwargs: Optional[Dict] ) -> Union[FakePopen, subprocess.Popen]: process = cls.__dispatch(command) if process is None: if cls.built_in_popen is None: raise PluginInternalError popen: subprocess.Popen = cls.built_in_popen(command, **kwargs) return popen result = cls._prepare_instance(FakePopen, command, kwargs, process) if not isinstance(result, FakePopen): raise PluginInternalError result.run_thread() return result @classmethod async def async_shell( cls, command: COMMAND, **kwargs: Optional[Dict] ) -> Union[AsyncFakePopen, asyncio.subprocess.Process]: process = cls.__dispatch(command) if process is None: if cls.built_in_async_subprocess is None: raise PluginInternalError async_shell: Awaitable[ asyncio.subprocess.Process ] = cls.built_in_async_subprocess.create_subprocess_shell(command, **kwargs) return await async_shell if not isinstance(command, str): raise ValueError("cmd must be a string") if sys.platform == "win32" and isinstance( asyncio.get_event_loop_policy().get_event_loop(), asyncio.SelectorEventLoop ): raise NotImplementedError( "The SelectorEventLoop doesn't support subprocess" ) result = cls._prepare_instance(AsyncFakePopen, command, kwargs, process) if not isinstance(result, AsyncFakePopen): raise PluginInternalError result.run_thread() return result @classmethod def _prepare_instance( cls, klass: Union[Type[FakePopen], Type[AsyncFakePopen]], command: COMMAND, kwargs: dict, process: dict, ) -> Union[FakePopen, AsyncFakePopen]: fake_popen_kwargs = process.copy() fake_popen_kwargs["command"] = command result = klass(**fake_popen_kwargs) result.pid = cls._pid result.configure(**kwargs) return result @classmethod def __dispatch(cls, command: COMMAND) -> Optional[dict]: command_instance, processes, process_instance = cls._get_process(command) if process_instance: process_instance.calls.append(command) if not processes: if not cls.process_list[-1]._allow_unregistered: raise ProcessNotRegisteredError( "The process '%s' was not registered." % ( command if isinstance(command, str) else " ".join(str(item) for item in command), ) ) else: return None process = processes.popleft() if not processes and process_instance is not None: if cls.process_list[-1]._keep_last_process: processes.append(process) elif command_instance: del process_instance.definitions[command_instance] cls._pid += 1 if isinstance(process, bool): return None return process @classmethod def _get_process( cls, command: COMMAND ) -> Tuple[ Optional[Command], Optional[Deque[Union[dict, bool]]], Optional["FakeProcess"] ]: for proc in reversed(cls.process_list): command_instance, processes = next( ( (key, value) for key, value in proc.definitions.items() if key == command ), (None, None), ) process_instance = proc if processes and isinstance(processes, deque): return command_instance, processes, process_instance return None, None, None class IncorrectProcessDefinition(Exception): class FakeProcess: any: Type[Any] = Any def __init__(self) -> None: self.definitions: DefaultDict[Command, Deque[Union[Dict, bool]]] = defaultdict( deque ) self.calls: Deque[COMMAND] = deque() self._allow_unregistered: bool = False self._keep_last_process: bool = False def register_subprocess( self, command: COMMAND, stdout: OPTIONAL_TEXT_OR_ITERABLE = None, stderr: OPTIONAL_TEXT_OR_ITERABLE = None, returncode: int = 0, wait: Optional[float] = None, callback: Optional[Callable] = None, callback_kwargs: Optional[Dict[str, AnyType]] = None, signal_callback: Optional[Callable] = None, occurrences: int = 1, stdin_callable: Optional[Callable] = None, ) -> None: if wait is not None and callback is not None: raise IncorrectProcessDefinition( "The 'callback' and 'wait' arguments cannot be used " "together. Add sleep() to your callback instead." ) if not isinstance(command, Command): command = Command(command) self.definitions[command].extend( [ { "command": command, "stdout": stdout, "stderr": stderr, "returncode": returncode, "wait": wait, "callback": callback, "callback_kwargs": callback_kwargs, "signal_callback": signal_callback, "stdin_callable": stdin_callable, } ] * occurrences ) def pass_command(self, command: COMMAND, occurrences: int = 1,) -> None: if not isinstance(command, Command): command = Command(command) self.definitions[command].extend([True] * occurrences) def __enter__(self) -> "FakeProcess": ProcessDispatcher.register(self) return self def __exit__(self, *args: List, **kwargs: Dict) -> None: ProcessDispatcher.deregister(self) def allow_unregistered(self, allow: bool) -> None: self._allow_unregistered = allow def call_count(self, command: COMMAND) -> int: if not isinstance(command, Command): command_instance = Command(command) return len(tuple(filter(lambda elem: elem == command_instance, self.calls))) def keep_last_process(self, keep: bool) -> None: self._keep_last_process = keep @classmethod
MIT License
sphinx-toolbox/sphinx-toolbox
sphinx_toolbox/formatting.py
depart_iabbr_node
python
def depart_iabbr_node(translator: HTMLTranslator, node: ItalicAbbreviationNode): translator.body.append("</i></abbr>")
Depart an :class:`~.ItalicAbbreviationNode`. :param translator: :param node: The node being visited.
https://github.com/sphinx-toolbox/sphinx-toolbox/blob/cee88c6bceac20a9ae0e381ada2fb2453ca3fc0b/sphinx_toolbox/formatting.py#L168-L176
from typing import List, Tuple from docutils import nodes from docutils.nodes import Node, system_message from docutils.parsers.rst import roles from sphinx.application import Sphinx from sphinx.roles import Abbreviation from sphinx.util.docutils import SphinxRole from sphinx.writers.html import HTMLTranslator from sphinx.writers.latex import LaTeXTranslator from sphinx_toolbox.utils import SphinxExtMetadata, metadata_add_version __all__ = [ "ItalicAbbreviationNode", "ItalicAbbreviation", "visit_iabbr_node", "depart_iabbr_node", "latex_visit_iabbr_node", "latex_depart_iabbr_node", "setup" ] class ItalicAbbreviationNode(nodes.abbreviation): class ItalicAbbreviation(Abbreviation): def run(self) -> Tuple[List[Node], List[system_message]]: options = self.options.copy() matched = self.abbr_re.search(self.text) if matched: text = self.text[:matched.start()].strip() options["explanation"] = matched.group(1) else: text = self.text return [ItalicAbbreviationNode(self.rawtext, text, **options)], [] class BoldTitle(SphinxRole): def run(self) -> Tuple[List[nodes.Node], List[nodes.system_message]]: assert self.text is not None node_list: List[nodes.Node] = [ nodes.raw('', r"\vspace{10px}", format="latex"), nodes.strong(f"**{self.text}**", self.text), ] return node_list, [] def visit_iabbr_node(translator: HTMLTranslator, node: ItalicAbbreviationNode): translator.body.append('<i class="abbreviation">') attrs = {} if node.hasattr("explanation"): attrs["title"] = node["explanation"] translator.body.append(translator.starttag(node, "abbr", '', **attrs))
MIT License
miguelgrinberg/alchemical
examples/flaskr/flaskr/blog.py
get_post
python
def get_post(id, check_author=True): post = db.session.get(Post, id) if post is None: abort(404, f"Post id {id} doesn't exist.") if check_author and post.author != current_user: abort(403) return post
Get a post and its author by id. Checks that the id exists and optionally that the current user is the author. :param id: id of post to get :param check_author: require the current user to be the author :return: the post with author information :raise 404: if a post with the given id doesn't exist :raise 403: if the current user isn't the author
https://github.com/miguelgrinberg/alchemical/blob/9ade0914da50c9dafbb6595ac271b248af93d660/examples/flaskr/flaskr/blog.py#L24-L42
from flask import Blueprint from flask import flash from flask import redirect from flask import render_template from flask import request from flask import url_for from werkzeug.exceptions import abort from flask_login import current_user from flask_login import login_required from flaskr import db from flaskr.models import Post bp = Blueprint("blog", __name__) @bp.route("/") def index(): posts = db.session.scalars(Post.select()).all() return render_template("blog/index.html", posts=posts)
MIT License
azure/autorest.python
test/vanilla/legacy/Expected/AcceptanceTests/BodyString/bodystring/operations/_string_operations.py
StringOperations.put_whitespace
python
def put_whitespace( self, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") request = build_put_whitespace_request( content_type=content_type, template_url=self.put_whitespace.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {})
Set String value with leading and trailing whitespace ':code:`<tab>`:code:`<space>`:code:`<space>`Now is the time for all good men to come to the aid of their country:code:`<tab>`:code:`<space>`:code:`<space>`'. :keyword callable cls: A custom type or function that will be passed the direct response :return: None, or the result of cls(response) :rtype: None :raises: ~azure.core.exceptions.HttpResponseError
https://github.com/azure/autorest.python/blob/90d60a965788e3b4c0809e6686bdc3525acac89c/test/vanilla/legacy/Expected/AcceptanceTests/BodyString/bodystring/operations/_string_operations.py#L629-L664
import functools from typing import TYPE_CHECKING import warnings from azure.core.exceptions import ( ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error, ) from azure.core.pipeline import PipelineResponse from azure.core.pipeline.transport import HttpResponse from azure.core.rest import HttpRequest from azure.core.tracing.decorator import distributed_trace from msrest import Serializer from .. import models as _models from .._vendor import _convert_request if TYPE_CHECKING: from typing import Any, Callable, Dict, Generic, Optional, TypeVar T = TypeVar("T") ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]] _SERIALIZER = Serializer() def build_get_null_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/string/null') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_put_null_request( **kwargs ): content_type = kwargs.pop('content_type', None) accept = "application/json" url = kwargs.pop("template_url", '/string/null') header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, **kwargs ) def build_get_empty_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/string/empty') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_put_empty_request( **kwargs ): content_type = kwargs.pop('content_type', None) json = "" accept = "application/json" url = kwargs.pop("template_url", '/string/empty') header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, json=json, **kwargs ) def build_get_mbcs_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/string/mbcs') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_put_mbcs_request( **kwargs ): content_type = kwargs.pop('content_type', None) json = "啊齄丂狛狜隣郎隣兀﨩ˊ〞〡¦℡㈱‐ー﹡﹢﹫、〓ⅰⅹ⒈€㈠㈩ⅠⅫ! ̄ぁんァヶΑ︴АЯаяāɡㄅㄩ─╋︵﹄︻︱︳︴ⅰⅹɑɡ〇〾⿻⺁䜣€" accept = "application/json" url = kwargs.pop("template_url", '/string/mbcs') header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, json=json, **kwargs ) def build_get_whitespace_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/string/whitespace') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_put_whitespace_request( **kwargs ): content_type = kwargs.pop('content_type', None) json = " Now is the time for all good men to come to the aid of their country " accept = "application/json" url = kwargs.pop("template_url", '/string/whitespace') header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, json=json, **kwargs ) def build_get_not_provided_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/string/notProvided') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_get_base64_encoded_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/string/base64Encoding') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_get_base64_url_encoded_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/string/base64UrlEncoding') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) def build_put_base64_url_encoded_request( **kwargs ): content_type = kwargs.pop('content_type', None) accept = "application/json" url = kwargs.pop("template_url", '/string/base64UrlEncoding') header_parameters = kwargs.pop("headers", {}) if content_type is not None: header_parameters['Content-Type'] = _SERIALIZER.header("content_type", content_type, 'str') header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="PUT", url=url, headers=header_parameters, **kwargs ) def build_get_null_base64_url_encoded_request( **kwargs ): accept = "application/json" url = kwargs.pop("template_url", '/string/nullBase64UrlEncoding') header_parameters = kwargs.pop("headers", {}) header_parameters['Accept'] = _SERIALIZER.header("accept", accept, 'str') return HttpRequest( method="GET", url=url, headers=header_parameters, **kwargs ) class StringOperations(object): models = _models def __init__(self, client, config, serializer, deserializer): self._client = client self._serialize = serializer self._deserialize = deserializer self._config = config @distributed_trace def get_null( self, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_get_null_request( template_url=self.get_null.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize("str", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_null.metadata = {"url": "/string/null"} @distributed_trace def put_null( self, string_body=None, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") if string_body is not None: json = self._serialize.body(string_body, "str") else: json = None request = build_put_null_request( content_type=content_type, json=json, template_url=self.put_null.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) put_null.metadata = {"url": "/string/null"} @distributed_trace def get_empty( self, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_get_empty_request( template_url=self.get_empty.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize("str", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_empty.metadata = {"url": "/string/empty"} @distributed_trace def put_empty( self, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") request = build_put_empty_request( content_type=content_type, template_url=self.put_empty.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) put_empty.metadata = {"url": "/string/empty"} @distributed_trace def get_mbcs( self, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_get_mbcs_request( template_url=self.get_mbcs.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize("str", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_mbcs.metadata = {"url": "/string/mbcs"} @distributed_trace def put_mbcs( self, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) content_type = kwargs.pop("content_type", "application/json") request = build_put_mbcs_request( content_type=content_type, template_url=self.put_mbcs.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) if cls: return cls(pipeline_response, None, {}) put_mbcs.metadata = {"url": "/string/mbcs"} @distributed_trace def get_whitespace( self, **kwargs ): cls = kwargs.pop("cls", None) error_map = {401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError} error_map.update(kwargs.pop("error_map", {})) request = build_get_whitespace_request( template_url=self.get_whitespace.metadata["url"], ) request = _convert_request(request) request.url = self._client.format_url(request.url) pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs) response = pipeline_response.http_response if response.status_code not in [200]: map_error(status_code=response.status_code, response=response, error_map=error_map) error = self._deserialize.failsafe_deserialize(_models.Error, pipeline_response) raise HttpResponseError(response=response, model=error) deserialized = self._deserialize("str", pipeline_response) if cls: return cls(pipeline_response, deserialized, {}) return deserialized get_whitespace.metadata = {"url": "/string/whitespace"} @distributed_trace
MIT License
dropbox/securitybot
securitybot/auth/auth.py
Auth.can_auth
python
def can_auth(self): pass
Returns: (bool) Whether 2FA is available.
https://github.com/dropbox/securitybot/blob/8cc4846602011db396df621d2e84e5808a6f3441/securitybot/auth/auth.py#L29-L35
__author__ = 'Alex Bertsch' __email__ = '[email protected]' from securitybot.util import enum from datetime import timedelta from abc import ABCMeta, abstractmethod AUTH_STATES = enum('NONE', 'PENDING', 'AUTHORIZED', 'DENIED', ) AUTH_TIME = timedelta(hours=2) class Auth(object): __metaclass__ = ABCMeta @abstractmethod
Apache License 2.0
burnysc2/python-sc2
sc2/bot_ai.py
BotAI.expansion_locations_dict
python
def expansion_locations_dict(self) -> Dict[Point2, Units]: assert ( self._expansion_positions_list ), f"self._find_expansion_locations() has not been run yet, so accessing the list of expansion locations is pointless." expansion_locations: Dict[Point2, Units] = {pos: Units([], self) for pos in self._expansion_positions_list} for resource in self.resources: exp_position: Point2 = self._resource_location_to_expansion_position_dict.get(resource.position, None) if exp_position: assert exp_position in expansion_locations expansion_locations[exp_position].append(resource) return expansion_locations
Returns dict with the correct expansion position Point2 object as key, resources as Units (mineral fields and vespene geysers) as value. Caution: This function is slow. If you only need the expansion locations, use the property above.
https://github.com/burnysc2/python-sc2/blob/a0b90b4447f23fc352a9bd931ae95ee5f4911032/sc2/bot_ai.py#L271-L288
from __future__ import annotations import itertools import math import random import time import warnings from collections import Counter from contextlib import suppress from typing import TYPE_CHECKING, Dict, List, Optional, Set, Tuple, Union from loguru import logger from s2clientprotocol import sc2api_pb2 as sc_pb from sc2.cache import property_cache_once_per_frame, property_cache_once_per_frame_no_copy from sc2.constants import ( ALL_GAS, EQUIVALENTS_FOR_TECH_PROGRESS, IS_PLACEHOLDER, PROTOSS_TECH_REQUIREMENT, TERRAN_STRUCTURES_REQUIRE_SCV, TERRAN_TECH_REQUIREMENT, ZERG_TECH_REQUIREMENT, FakeEffectID, abilityid_to_unittypeid, geyser_ids, mineral_ids, ) from sc2.data import ActionResult, Alert, Race, Result, Target, race_townhalls from sc2.dicts.unit_research_abilities import RESEARCH_INFO from sc2.dicts.unit_train_build_abilities import TRAIN_INFO from sc2.dicts.unit_trained_from import UNIT_TRAINED_FROM from sc2.dicts.upgrade_researched_from import UPGRADE_RESEARCHED_FROM from sc2.distances import DistanceCalculation from sc2.game_data import AbilityData, Cost, GameData from sc2.game_state import Blip, EffectData, GameState from sc2.ids.ability_id import AbilityId from sc2.ids.unit_typeid import UnitTypeId from sc2.ids.upgrade_id import UpgradeId from sc2.pixel_map import PixelMap from sc2.position import Point2 from sc2.unit import Unit from sc2.unit_command import UnitCommand from sc2.units import Units if TYPE_CHECKING: from sc2.client import Client from sc2.game_info import GameInfo, Ramp class BotAI(DistanceCalculation): EXPANSION_GAP_THRESHOLD = 15 def _initialize_variables(self): DistanceCalculation.__init__(self) if not hasattr(self, "opponent_id"): self.opponent_id: str = None if not hasattr(self, "distance_calculation_method"): self.distance_calculation_method: int = 2 if not hasattr(self, "unit_command_uses_self_do"): self.unit_command_uses_self_do: bool = False self.realtime: bool = False self.base_build: int = -1 self.all_units: Units = Units([], self) self.units: Units = Units([], self) self.workers: Units = Units([], self) self.larva: Units = Units([], self) self.structures: Units = Units([], self) self.townhalls: Units = Units([], self) self.gas_buildings: Units = Units([], self) self.all_own_units: Units = Units([], self) self.enemy_units: Units = Units([], self) self.enemy_structures: Units = Units([], self) self.all_enemy_units: Units = Units([], self) self.resources: Units = Units([], self) self.destructables: Units = Units([], self) self.watchtowers: Units = Units([], self) self.mineral_field: Units = Units([], self) self.vespene_geyser: Units = Units([], self) self.placeholders: Units = Units([], self) self.techlab_tags: Set[int] = set() self.reactor_tags: Set[int] = set() self.minerals: int = 50 self.vespene: int = 0 self.supply_army: float = 0 self.supply_workers: float = 12 self.supply_cap: float = 15 self.supply_used: float = 12 self.supply_left: float = 3 self.idle_worker_count: int = 0 self.army_count: int = 0 self.warp_gate_count: int = 0 self.actions: List[UnitCommand] = [] self.blips: Set[Blip] = set() self.race: Race = None self.enemy_race: Race = None self._units_created: Counter = Counter() self._unit_tags_seen_this_game: Set[int] = set() self._units_previous_map: Dict[int, Unit] = {} self._structures_previous_map: Dict[int, Unit] = {} self._enemy_units_previous_map: Dict[int, Unit] = {} self._enemy_structures_previous_map: Dict[int, Unit] = {} self._all_units_previous_map: Dict[int, Unit] = {} self._previous_upgrades: Set[UpgradeId] = set() self._expansion_positions_list: List[Point2] = [] self._resource_location_to_expansion_position_dict: Dict[Point2, Point2] = {} self._time_before_step: float = None self._time_after_step: float = None self._min_step_time: float = math.inf self._max_step_time: float = 0 self._last_step_step_time: float = 0 self._total_time_in_on_step: float = 0 self._total_steps_iterations: int = 0 self.unit_tags_received_action: Set[int] = set() @property def time(self) -> float: return self.state.game_loop / 22.4 @property def time_formatted(self) -> str: t = self.time return f"{int(t // 60):02}:{int(t % 60):02}" @property def step_time(self) -> Tuple[float, float, float, float]: avg_step_duration = ( (self._total_time_in_on_step / self._total_steps_iterations) if self._total_steps_iterations else 0 ) return ( self._min_step_time * 1000, avg_step_duration * 1000, self._max_step_time * 1000, self._last_step_step_time * 1000, ) @property def game_info(self) -> GameInfo: return self._game_info @property def game_data(self) -> GameData: return self._game_data @property def client(self) -> Client: return self._client @property def larva_count(self): warnings.warn( "self.larva_count will be removed soon, please use len(self.larva) or self.larva.amount instead", DeprecationWarning, stacklevel=2, ) return len(self.larva) def alert(self, alert_code: Alert) -> bool: assert isinstance(alert_code, Alert), f"alert_code {alert_code} is no Alert" return alert_code.value in self.state.alerts @property def start_location(self) -> Point2: return self._game_info.player_start_location @property def enemy_start_locations(self) -> List[Point2]: return self._game_info.start_locations @property def main_base_ramp(self) -> Ramp: if hasattr(self, "cached_main_base_ramp"): return self.cached_main_base_ramp try: self.cached_main_base_ramp = min( (ramp for ramp in self.game_info.map_ramps if len(ramp.upper) in {2, 5}), key=lambda r: self.start_location.distance_to(r.top_center), ) except ValueError: self.cached_main_base_ramp = min( (ramp for ramp in self.game_info.map_ramps if len(ramp.upper) in {4, 9}), key=lambda r: self.start_location.distance_to(r.top_center), ) return self.cached_main_base_ramp @property_cache_once_per_frame def expansion_locations_list(self) -> List[Point2]: assert ( self._expansion_positions_list ), f"self._find_expansion_locations() has not been run yet, so accessing the list of expansion locations is pointless." return self._expansion_positions_list @property_cache_once_per_frame
MIT License
kuri65536/python-for-android
python-modules/twisted/twisted/trial/itrial.py
IReporter.addExpectedFailure
python
def addExpectedFailure(test, failure, todo):
Record that the given test failed, and was expected to do so. @type test: L{pyunit.TestCase} @param test: The test which this is about. @type error: L{failure.Failure} @param error: The error which this test failed with. @type todo: L{unittest.Todo} @param todo: The reason for the test's TODO status.
https://github.com/kuri65536/python-for-android/blob/26402a08fc46b09ef94e8d7a6bbc3a54ff9d0891/python-modules/twisted/twisted/trial/itrial.py#L170-L180
import zope.interface as zi from zope.interface import Attribute class ITestCase(zi.Interface): failureException = zi.Attribute( "The exception class that is raised by failed assertions") def __call__(result): def countTestCases(): def id(): def run(result): def shortDescription(): class IReporter(zi.Interface): stream = zi.Attribute( "Deprecated in Twisted 8.0. " "The io-stream that this reporter will write to") tbformat = zi.Attribute("Either 'default', 'brief', or 'verbose'") args = zi.Attribute( "Additional string argument passed from the command line") shouldStop = zi.Attribute( """ A boolean indicating that this reporter would like the test run to stop. """) separator = Attribute( "Deprecated in Twisted 8.0. " "A value which will occasionally be passed to the L{write} method.") testsRun = Attribute( """ The number of tests that seem to have been run according to this reporter. """) def startTest(method): def stopTest(method): def startSuite(name): def endSuite(name): def cleanupErrors(errs): def upDownError(userMeth, warn=True, printStatus=True): def addSuccess(test): def addError(test, error): def addFailure(test, failure):
Apache License 2.0
eifinger/homeassistant-config
custom_components/dwd_weather/weather.py
DWDWeather.device_state_attributes
python
def device_state_attributes(self): return self._connector.infos
Return data validity infos.
https://github.com/eifinger/homeassistant-config/blob/fb26cb795ea407710e30fb679d2ca51cfad8cecf/custom_components/dwd_weather/weather.py#L111-L113
import logging from homeassistant.components.weather import WeatherEntity from homeassistant.const import TEMP_CELSIUS from homeassistant.helpers.typing import ConfigType, HomeAssistantType from .const import ( ATTRIBUTION, DEFAULT_NAME, DOMAIN, DWDWEATHER_COORDINATOR, DWDWEATHER_DATA, DWDWEATHER_NAME, ) _LOGGER = logging.getLogger(__name__) async def async_setup_entry( hass: HomeAssistantType, entry: ConfigType, async_add_entities ) -> None: hass_data = hass.data[DOMAIN][entry.entry_id] async_add_entities([DWDWeather(entry.data, hass_data)], False) class DWDWeather(WeatherEntity): def __init__(self, entry_data, hass_data): self._connector = hass_data[DWDWEATHER_DATA] self._coordinator = hass_data[DWDWEATHER_COORDINATOR] self._name = f"{DEFAULT_NAME} {hass_data[DWDWEATHER_NAME]}" self._unique_id = f"{hass_data[DWDWEATHER_NAME]}" async def async_added_to_hass(self): self.async_on_remove( self._coordinator.async_add_listener(self.async_write_ha_state) ) @property def should_poll(self): return False @property def unique_id(self): return self._unique_id @property def name(self): self._name @property def condition(self): return self._connector.get_condition() @property def temperature(self): return self._connector.get_temperature() @property def temperature_unit(self): return TEMP_CELSIUS @property def pressure(self): return self._connector.get_pressure() @property def wind_speed(self): return self._connector.get_wind_speed() @property def wind_bearing(self): return self._connector.get_wind_direction() @property def visibility(self): return self._connector.get_visibility() @property def humidity(self): return self._connector.get_humidity() @property def attribution(self): return ATTRIBUTION @property def forecast(self): return self._connector.forecast @property
MIT License
oversurge/ps1-brender-reverse
ps1_argonaut/files/WADFile.py
WADFile.export_experimental_models
python
def export_experimental_models(self, folder_path: Path, wad_filename: str): n_models = self.n_models n_animations = self.n_animations def guess_compatible_animation(position: int, n_vertices_groups: int): position = int((position / n_models) * n_animations) a = int(position) b = int(math.ceil(position)) while a > -1 or b < n_animations: if a > -1: if self.animations[a].n_vertices_groups == n_vertices_groups: return a a -= 1 if b < n_animations: if self.animations[b].n_vertices_groups == n_vertices_groups: return b b += 1 return None if not folder_path.exists(): folder_path.mkdir() elif folder_path.is_file(): raise FileExistsError self._prepare_obj_export(folder_path, wad_filename) for i, model_3d in enumerate(self.dpsx.models_3d): obj_filename = f"{wad_filename}_{i}" with (folder_path / (obj_filename + '.OBJ')).open('w', encoding='ASCII') as obj_file: if model_3d.n_vertices_groups == 1: model_3d.to_single_obj(obj_file, obj_filename, self.textures, wad_filename) else: animation_id = guess_compatible_animation(i, self.models_3d[i].n_vertices_groups) if animation_id is None: model_3d.to_single_obj(obj_file, obj_filename, self.textures, wad_filename) else: model_3d.animate(self.animations[animation_id]).to_single_obj(obj_file, obj_filename, self.textures, wad_filename)
Tries to find one compatible animation for each model in the WAD, animates it to make it clean (see doc about 3D models) and exports them into Wavefront OBJ files at the given location.
https://github.com/oversurge/ps1-brender-reverse/blob/9e0f4bed801d94eef5542be9b202505a92aead06/ps1_argonaut/files/WADFile.py#L148-L187
import math from io import BytesIO, SEEK_CUR, StringIO, BufferedIOBase from pathlib import Path from typing import Dict, Union, Optional, List from ps1_argonaut.BaseDataClasses import BaseWADSection from ps1_argonaut.configuration import Configuration, wavefront_header, G from ps1_argonaut.errors_warnings import SectionNameError from ps1_argonaut.files.DATFile import DATFile from ps1_argonaut.wad_sections.DPSX.ChunkClasses import ChunkHolder from ps1_argonaut.wad_sections.DPSX.DPSXSection import DPSXSection from ps1_argonaut.wad_sections.DPSX.Model3DData import Model3DData from ps1_argonaut.wad_sections.ENDSection import ENDSection from ps1_argonaut.wad_sections.PORTSection import PORTSection from ps1_argonaut.wad_sections.SPSX.SPSXSection import SPSXSection from ps1_argonaut.wad_sections.SPSX.Sounds import DialoguesBGMsSoundFlags from ps1_argonaut.wad_sections.TPSX.TPSXSection import TPSXSection class WADFile(Dict[bytes, BaseWADSection], DATFile): suffix = 'WAD' sections_conf: Dict[bytes, BaseWADSection] = { TPSXSection.codename_bytes: TPSXSection, SPSXSection.codename_bytes: SPSXSection, DPSXSection.codename_bytes: DPSXSection, PORTSection.codename_bytes: PORTSection, ENDSection.codename_bytes: ENDSection} def __init__(self, stem: str, sections: Dict[bytes, BaseWADSection] = None, data: bytes = None): dict.__init__(self, sections if sections is not None else {}) DATFile.__init__(self, stem, data=data) def __str__(self): titles = ' ({})'.format(', '.join(title.strip(' ') for title in self.titles)) if self.titles else '' res = f"Game level{titles}" if self: res += '\n' if self.tpsx: res += f" {self.n_textures:>4} texture(s)" if isinstance(self.spsx, SPSXSection): res += f" {self.n_sounds:>4} audio file(s)" if self.dpsx: res += f" {self.n_models:>4} model(s) {self.n_animations:>4} animation(s)" f" {self.n_filled_chunks:>4} chunk(s)" return res @property def tpsx(self) -> Optional[TPSXSection]: return self[TPSXSection.codename_bytes] if TPSXSection.codename_bytes in self else None @property def spsx(self) -> Optional[SPSXSection]: return self[SPSXSection.codename_bytes] if SPSXSection.codename_bytes in self else None @property def dpsx(self) -> Optional[DPSXSection]: return self[DPSXSection.codename_bytes] if DPSXSection.codename_bytes in self else None @property def port(self) -> Optional[PORTSection]: return self[PORTSection.codename_bytes] if PORTSection.codename_bytes in self else None @property def end(self) -> Optional[ENDSection]: return self[ENDSection.codename_bytes] if ENDSection.codename_bytes in self else None @property def titles(self) -> List[str]: return () if self.tpsx is None else self.tpsx.titles @property def textures(self): return None if (self.tpsx is None) else self.tpsx.texture_file.textures @property def n_textures(self): return 0 if self.tpsx is None else len(self.tpsx.texture_file.textures) @property def common_sound_effects(self): return None if (self.spsx is None) else self.spsx.common_sfx @property def ambient_tracks(self): return None if (self.spsx is None) else self.spsx.ambient_tracks @property def flattened_level_sfx(self): return None if (self.end is None) else self.spsx.level_sfx_groups.sounds @property def level_sfx(self): return None if (self.end is None) else self.spsx.level_sfx_groups @property def dialogues_bgms(self): return None if (self.end is None) else self.spsx.dialogues_bgms @property def n_sounds(self): return 0 if (self.spsx is None or not isinstance(self.spsx, SPSXSection)) else self.spsx.n_sounds @property def models_3d(self): return None if self.dpsx is None else self.dpsx.models_3d @property def n_models(self): return 0 if self.dpsx is None else len(self.dpsx.models_3d) @property def animations(self): return None if self.dpsx is None else self.dpsx.animations @property def n_animations(self): return 0 if self.dpsx is None else len(self.dpsx.animations) @property def scripts(self): return None if self.dpsx is None else self.dpsx.scripts @property def n_scripts(self): return 0 if self.dpsx is None else len(self.dpsx.scripts) @property def chunks_matrix(self): return None if (self.dpsx is None) else self.dpsx.level_file.chunks_matrix @property def n_filled_chunks(self): return 0 if self.dpsx is None else self.dpsx.level_file.chunks_matrix.n_filled_chunks def _prepare_obj_export(self, folder_path: Path, wad_filename: str): with (folder_path / (wad_filename + '.MTL')).open('w', encoding='ASCII') as mtl_file: mtl_file.write(wavefront_header + f"newmtl mtl1\nmap_Kd {wad_filename}.PNG") self.tpsx.texture_file.to_colorized_texture().save(folder_path / (wad_filename + '.PNG'))
MIT License
cos-archives/modular-odm
modularodm/ext/concurrency.py
with_proxies
python
def with_proxies(proxy_map, get_key): def wrapper(cls): for label, ProxiedClass in six.iteritems(proxy_map): proxy = proxy_factory(cls, label, ProxiedClass, get_key) setattr(cls, label, proxy) return cls return wrapper
Class decorator factory; adds proxy class variables to target class. :param dict proxy_map: Mapping between class variable labels and proxied classes :param function get_key: Extension-specific key function; may return e.g. the current Flask request
https://github.com/cos-archives/modular-odm/blob/8a34891892b8af69b21fdc46701c91763a5c1cf9/modularodm/ext/concurrency.py#L48-L62
import weakref import collections import six from werkzeug.local import LocalProxy from modularodm.cache import Cache from modularodm.writequeue import WriteQueue proxies = collections.defaultdict( lambda: collections.defaultdict(weakref.WeakKeyDictionary) ) proxied_members = { '_cache': Cache, '_object_cache': Cache, 'queue': WriteQueue, } def proxy_factory(BaseSchema, label, ProxiedClass, get_key): def local(): key = get_key() try: return proxies[BaseSchema][label][key] except KeyError: proxies[BaseSchema][label][key] = ProxiedClass() return proxies[BaseSchema][label][key] return LocalProxy(local)
Apache License 2.0
nervanasystems/ngraph-python
ngraph/op_graph/tensorboard/summary.py
scalar
python
def scalar(name, scalar): name = _clean_tag(name) if not isinstance(scalar, float): scalar = float(scalar) return Summary(value=[Summary.Value(tag=name, simple_value=scalar)])
Outputs a `Summary` protocol buffer containing a single scalar value. The generated Summary has a Tensor.proto containing the input Tensor. Args: name: A name for the generated node. Will also serve as the series name in TensorBoard. scalar: A real numeric Tensor containing a single value. collections: Optional list of graph collections keys. The new summary op is added to these collections. Defaults to `[GraphKeys.SUMMARIES]`. Returns: A scalar `Tensor` of type `string`. Which contains a `Summary` protobuf. Raises: ValueError: If tensor has the wrong shape or type.
https://github.com/nervanasystems/ngraph-python/blob/ac032c83c7152b615a9ad129d54d350f9d6a2986/ngraph/op_graph/tensorboard/summary.py#L69-L87
from __future__ import absolute_import from __future__ import division from __future__ import print_function import logging import re as _re import bisect from six import StringIO from six.moves import range from PIL import Image import wave import numpy as np from tensorflow.core.framework.summary_pb2 import Summary, HistogramProto logger = logging.getLogger(__name__) _INVALID_TAG_CHARACTERS = _re.compile(r'[^-/\w\.]') def _clean_tag(name): if name is not None: new_name = _INVALID_TAG_CHARACTERS.sub('_', name) new_name = new_name.lstrip('/') if new_name != name: logger.debug('Summary name {} is illegal; using {} instead.'.format(name, new_name)) name = new_name return name
Apache License 2.0
dhermes/project-euler
python/functions.py
recurrence_next
python
def recurrence_next(relation, values): if len(relation) != len(values): raise ValueError("Poorly specified recurrence") recurrence_order = len(relation) next_val = sum(relation[i] * values[i] for i in range(recurrence_order)) return values[1:] + [next_val]
Gets next term in a recurrence based on relation. Assumes recurrence of length k satisfies f(n+k) = relation[0]*f(n) + relation[1]*f(n+1) + ... Values are also expected to be ordered [f(n),f(n+1),...]
https://github.com/dhermes/project-euler/blob/2aabf89b28f033a3e59b7411a9c9c261117818cf/python/functions.py#L55-L67
import operator import sys from fractions import gcd from math import factorial from math import log from math import sqrt from path import DATA_PATH def lcm(n, m): return n * m / (gcd(n, m)) def choose(n, k): return factorial(n) / (factorial(k) * factorial(n - k)) def get_data(problem_number): filename = 'no%s.txt' % str(problem_number).zfill(3) absolute_path = '%s/%s' % (DATA_PATH, filename) with open(absolute_path) as fh: result = fh.read() return result def robust_divide(n, quotient, include_count=False): if quotient in (-1, 1): raise ValueError("Please don't use %s as a quotient." % quotient) result = n count = 0 while result % quotient == 0: count += 1 result = result / quotient if include_count: return result, count else: return result
Apache License 2.0
fish-quant/big-fish
bigfish/detection/spot_modeling.py
_build_reference_spot_2d
python
def _build_reference_spot_2d(image, spots, radius, alpha): radius_yx = np.ceil(radius[-1]).astype(np.int64) yx_shape = radius_yx * 2 + 1 indices = [i for i in range(spots.shape[0])] np.random.shuffle(indices) indices = indices[:min(2000, spots.shape[0])] candidate_spots = spots[indices, :] l_reference_spot = [] for i_spot in range(candidate_spots.shape[0]): spot_y, spot_x = candidate_spots[i_spot, :] image_spot, _ = _get_spot_surface(image, spot_y, spot_x, radius_yx) if image_spot.shape == (yx_shape, yx_shape): l_reference_spot.append(image_spot) if len(l_reference_spot) <= 30: warnings.warn("Problem occurs during the computation of a reference " "spot. Not enough (uncropped) spots have been detected.", UserWarning) if len(l_reference_spot) == 0: reference_spot = np.zeros((yx_shape, yx_shape), dtype=image.dtype) return reference_spot l_reference_spot = np.stack(l_reference_spot, axis=0) alpha_ = alpha * 100 reference_spot = np.percentile(l_reference_spot, alpha_, axis=0) reference_spot = reference_spot.astype(image.dtype) return reference_spot
Build a median or mean spot in 2 dimensions as reference. Reference spot is computed from a sample of uncropped detected spots. If such sample is not possible, an empty frame is returned. Parameters ---------- image : np.ndarray Image with shape (y, x). spots : np.ndarray, np.int64 Coordinate of the spots with shape (nb_spots, 2) for 2-d images. radius : Tuple[float] Radius in pixels of the detected spots, one element per dimension. alpha : int or float Intensity score of the reference spot, between 0 and 1. If 0, reference spot approximates the spot with the lowest intensity. If 1, reference spot approximates the brightest spot. Returns ------- reference_spot : np.ndarray Reference spot in 2-d.
https://github.com/fish-quant/big-fish/blob/5512b6e3274872793ef4365a6dc423c72add91f9/bigfish/detection/spot_modeling.py#L211-L275
import warnings import numpy as np import bigfish.stack as stack from scipy.special import erf from scipy.optimize import curve_fit def build_reference_spot(image, spots, voxel_size_z=None, voxel_size_yx=100, psf_z=None, psf_yx=200, alpha=0.5): stack.check_array(image, ndim=[2, 3], dtype=[np.uint8, np.uint16, np.float32, np.float64]) stack.check_array(spots, ndim=2, dtype=np.int64) stack.check_parameter(voxel_size_z=(int, float, type(None)), voxel_size_yx=(int, float), psf_z=(int, float, type(None)), psf_yx=(int, float), alpha=(int, float)) if alpha < 0 or alpha > 1: raise ValueError("'alpha' should be a value between 0 and 1, not {0}" .format(alpha)) ndim = image.ndim if ndim == 3 and voxel_size_z is None: raise ValueError("Provided image has {0} dimensions but " "'voxel_size_z' parameter is missing.".format(ndim)) if ndim == 3 and psf_z is None: raise ValueError("Provided image has {0} dimensions but " "'psf_z' parameter is missing.".format(ndim)) if ndim != spots.shape[1]: raise ValueError("Provided image has {0} dimensions but spots are " "detected in {1} dimensions." .format(ndim, spots.shape[1])) if ndim == 2: voxel_size_z, psf_z = None, None radius = stack.get_radius(voxel_size_z, voxel_size_yx, psf_z, psf_yx) if image.ndim == 3: reference_spot = _build_reference_spot_3d(image, spots, radius, alpha) else: reference_spot = _build_reference_spot_2d(image, spots, radius, alpha) return reference_spot def _build_reference_spot_3d(image, spots, radius, alpha): radius_z = np.ceil(radius[0]).astype(np.int64) z_shape = radius_z * 2 + 1 radius_yx = np.ceil(radius[-1]).astype(np.int64) yx_shape = radius_yx * 2 + 1 indices = [i for i in range(spots.shape[0])] np.random.shuffle(indices) indices = indices[:min(2000, spots.shape[0])] candidate_spots = spots[indices, :] l_reference_spot = [] for i_spot in range(candidate_spots.shape[0]): spot_z, spot_y, spot_x = candidate_spots[i_spot, :] image_spot, _, = _get_spot_volume(image, spot_z, spot_y, spot_x, radius_z, radius_yx) if image_spot.shape == (z_shape, yx_shape, yx_shape): l_reference_spot.append(image_spot) if len(l_reference_spot) <= 30: warnings.warn("Problem occurs during the computation of a reference " "spot. Not enough (uncropped) spots have been detected.", UserWarning) if len(l_reference_spot) == 0: reference_spot = np.zeros((z_shape, yx_shape, yx_shape), dtype=image.dtype) return reference_spot l_reference_spot = np.stack(l_reference_spot, axis=0) alpha_ = alpha * 100 reference_spot = np.percentile(l_reference_spot, alpha_, axis=0) reference_spot = reference_spot.astype(image.dtype) return reference_spot def _get_spot_volume(image, spot_z, spot_y, spot_x, radius_z, radius_yx): z_spot_min = max(0, int(spot_z - radius_z)) z_spot_max = min(image.shape[0], int(spot_z + radius_z)) y_spot_min = max(0, int(spot_y - radius_yx)) y_spot_max = min(image.shape[1], int(spot_y + radius_yx)) x_spot_min = max(0, int(spot_x - radius_yx)) x_spot_max = min(image.shape[2], int(spot_x + radius_yx)) image_spot = image[z_spot_min:z_spot_max + 1, y_spot_min:y_spot_max + 1, x_spot_min:x_spot_max + 1] return image_spot, (z_spot_min, y_spot_min, x_spot_min)
BSD 3-Clause New or Revised License
nuagenetworks/vspk-python
vspk/v5_0/nunetconfprofile.py
NUNetconfProfile.port
python
def port(self, value): self._port = value
Set port value. Notes: Netconf session port
https://github.com/nuagenetworks/vspk-python/blob/375cce10ae144ad6017104e57fcd3630898cc2a6/vspk/v5_0/nunetconfprofile.py#L244-L252
from .fetchers import NUMetadatasFetcher from .fetchers import NUGlobalMetadatasFetcher from bambou import NURESTObject class NUNetconfProfile(NURESTObject): __rest_name__ = "netconfprofile" __resource_name__ = "netconfprofiles" CONST_ENTITY_SCOPE_GLOBAL = "GLOBAL" CONST_ENTITY_SCOPE_ENTERPRISE = "ENTERPRISE" def __init__(self, **kwargs): super(NUNetconfProfile, self).__init__() self._name = None self._password = None self._last_updated_by = None self._description = None self._entity_scope = None self._port = None self._user_name = None self._assoc_entity_type = None self._external_id = None self.expose_attribute(local_name="name", remote_name="name", attribute_type=str, is_required=True, is_unique=False) self.expose_attribute(local_name="password", remote_name="password", attribute_type=str, is_required=True, is_unique=False) self.expose_attribute(local_name="last_updated_by", remote_name="lastUpdatedBy", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="description", remote_name="description", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="entity_scope", remote_name="entityScope", attribute_type=str, is_required=False, is_unique=False, choices=[u'ENTERPRISE', u'GLOBAL']) self.expose_attribute(local_name="port", remote_name="port", attribute_type=int, is_required=False, is_unique=False) self.expose_attribute(local_name="user_name", remote_name="userName", attribute_type=str, is_required=True, is_unique=False) self.expose_attribute(local_name="assoc_entity_type", remote_name="assocEntityType", attribute_type=str, is_required=False, is_unique=False) self.expose_attribute(local_name="external_id", remote_name="externalID", attribute_type=str, is_required=False, is_unique=True) self.metadatas = NUMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self.global_metadatas = NUGlobalMetadatasFetcher.fetcher_with_object(parent_object=self, relationship="child") self._compute_args(**kwargs) @property def name(self): return self._name @name.setter def name(self, value): self._name = value @property def password(self): return self._password @password.setter def password(self, value): self._password = value @property def last_updated_by(self): return self._last_updated_by @last_updated_by.setter def last_updated_by(self, value): self._last_updated_by = value @property def description(self): return self._description @description.setter def description(self, value): self._description = value @property def entity_scope(self): return self._entity_scope @entity_scope.setter def entity_scope(self, value): self._entity_scope = value @property def port(self): return self._port @port.setter
BSD 3-Clause New or Revised License
sjml/simulatorgenerator
lib/twitter.py
Status.__init__
python
def __init__(self, **kwargs): param_defaults = { 'coordinates': None, 'contributors': None, 'created_at': None, 'current_user_retweet': None, 'favorited': None, 'favorite_count': None, 'geo': None, 'id': None, 'in_reply_to_screen_name': None, 'in_reply_to_user_id': None, 'in_reply_to_status_id': None, 'lang': None, 'location': None, 'now': None, 'place': None, 'possibly_sensitive': None, 'retweeted': None, 'retweeted_status': None, 'retweet_count': None, 'scopes': None, 'source': None, 'text': None, 'truncated': None, 'urls': None, 'user': None, 'user_mentions': None, 'hashtags': None, 'media': None, 'withheld_copyright': None, 'withheld_in_countries': None, 'withheld_scope': None} for (param, default) in param_defaults.iteritems(): setattr(self, param, kwargs.get(param, default))
An object to hold a Twitter status message. This class is normally instantiated by the twitter.Api class and returned in a sequence. Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007" Args: created_at: The time this status message was posted. [Optional] favorited: Whether this is a favorite of the authenticated user. [Optional] favorite_count: Number of times this status message has been favorited. [Optional] id: The unique id of this status message. [Optional] text: The text of this status message. [Optional] location: the geolocation string associated with this message. [Optional] relative_created_at: A human readable string representing the posting time. [Optional] user: A twitter.User instance representing the person posting the message. [Optional] now: The current time, if the client chooses to set it. Defaults to the wall clock time. [Optional] urls: user_mentions: hashtags: geo: place: coordinates: contributors: retweeted: retweeted_status: current_user_retweet: retweet_count: possibly_sensitive: scopes: withheld_copyright: withheld_in_countries: withheld_scope:
https://github.com/sjml/simulatorgenerator/blob/f9f2127fc63c18facd0625361514ebc217b47bff/lib/twitter.py#L118-L198
__author__ = '[email protected]' __version__ = '1.1' import base64 import os import rfc822 import sys import tempfile import textwrap import time import urllib import urllib2 import urlparse import gzip import StringIO import re import requests from requests_oauthlib import OAuth1 from calendar import timegm from datetime import datetime try: import json as simplejson except ImportError: try: import simplejson except ImportError: try: from django.utils import simplejson except ImportError: raise ImportError, "Unable to load a json library" try: from urlparse import parse_qsl, parse_qs except ImportError: from cgi import parse_qsl, parse_qs try: from hashlib import md5 except ImportError: from md5 import md5 CHARACTER_LIMIT = 140 DEFAULT_CACHE = object() REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token' ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token' AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize' SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate' class TwitterError(Exception): @property def message(self): return self.args[0] class Status(object):
MIT License
martinomensio/spacy-universal-sentence-encoder
spacy_universal_sentence_encoder/__init__.py
create_from
python
def create_from(nlp, use_model_code): if use_model_code not in util.configs: raise ValueError(f'Model "{use_model_code}" not available') config = util.configs[use_model_code] return language.create_nlp(config, nlp)
From an existing `nlp` object, adds the vectors from the specific `use_model_code` by adding pipeline stages
https://github.com/martinomensio/spacy-universal-sentence-encoder/blob/bbc6efe924b8abc71bd4c347878822fed0bd03f7/spacy_universal_sentence_encoder/__init__.py#L21-L26
from __future__ import unicode_literals from spacy.util import load_model_from_init_py from spacy.tokens import Doc import warnings from . import util from .util import create_lang as load_model __version__ = util.pkg_meta["version"] from . import language warnings.filterwarnings('ignore', message=r"\[W007\]", category=UserWarning) def load(**overrides): return load_model_from_init_py(__file__, **overrides)
MIT License
bobotig/ebook-reader-dict
wikidict/utils.py
format_description
python
def format_description(locale: str, output_dir: Path) -> str: words_count = (output_dir / "words.count").read_text().strip() thousands_sep = thousands_separator[locale] words_count = f"{int(words_count):,}".replace(",", thousands_sep) dump_date = (output_dir / "words.snapshot").read_text().strip() dump_date = f"{dump_date[:4]}-{dump_date[4:6]}-{dump_date[6:8]}" url_dictfile = DOWNLOAD_URL_DICTFILE.format(locale) url_kobo = DOWNLOAD_URL_KOBO.format(locale) url_stardict = DOWNLOAD_URL_STARDICT.format(locale) creation_date = NOW.isoformat() return release_description[locale].format(**locals())
Generate the release description.
https://github.com/bobotig/ebook-reader-dict/blob/1f3c27b12c36091641ae47bdd3bf2f9f72153368/wikidict/utils.py#L115-L135
import re from contextlib import suppress from datetime import datetime from functools import partial from pathlib import Path from typing import List, Match, Tuple, Union from warnings import warn from cachetools import cached from cachetools.keys import hashkey import regex import requests import wikitextparser from .constants import ( DOWNLOAD_URL_DICTFILE, DOWNLOAD_URL_KOBO, DOWNLOAD_URL_STARDICT, IMG_CSS, ) from .lang import ( last_template_handler, pattern_file, release_description, templates_ignored, templates_italic, templates_multi, templates_other, thousands_separator, ) from .user_functions import * from .hiero_utils import render_hiero NOW = datetime.utcnow() MAGIC_WORDS = { "CURRENTYEAR": str(NOW.year), "CURRENTMONTH": NOW.strftime("%m"), "CURRENTMONTH1": str(NOW.month), "CURRENTDAY": str(NOW.day), "CURRENTDAY2": NOW.strftime("%d"), "CURRENTDOW": NOW.strftime("%w"), "CURRENTTIME": NOW.strftime("%H:%M"), "CURRENTHOUR": NOW.strftime("%H"), "CURRENTWEEK": NOW.strftime("%V"), "CURRENTTIMESTAMP": NOW.strftime("%Y%m%d%H%M%S"), } def convert_gender(gender: str) -> str: return f" <i>{gender}.</i>" if gender else "" def convert_pronunciation(pronunciations: List[str]) -> str: if not pronunciations: return "" return " " + ", ".join(f"\\{p}\\" for p in pronunciations) def get_word_of_the_day(locale: str) -> str: months = { "en": [ "January", "February", "March", "April", "May", "June", "July", "August", "September", "October", "November", "December", ], } word_of_the_day = { "ca": ("", ""), "es": ( f"Plantilla:palabra_de_la_semana/{NOW.strftime('%-V')}", r" palabra= ([^\|]+)", ), "en": ( f"Wiktionary:Word_of_the_day/{NOW.strftime('%Y')}/{months['en'][int(NOW.strftime('%-m')) - 1]}_{NOW.strftime('%d')}", r"{{WOTD\|([^\|]+)\|", ), "fr": ( f"Mod%C3%A8le:Entr%C3%A9e_du_jour/{NOW.strftime('%Y/%m/%d')}", r"<span style=\"font-size:120%;\">'''\[\[([^\]]+)\]\]'''</span>", ), "pt": ("", ""), "sv": ( "Mall:högkvalitativt", r"<big>\[\[([^\]]+)\]\]</big>", ), } special_word, pattern = word_of_the_day[locale] url = f"https://{locale}.wiktionary.org/wiki/{special_word}?action=raw" with requests.get(url) as req: matches = re.findall(pattern, req.text) return str(matches[0].strip()) if matches else ""
MIT License
angr/angr
angr/state_plugins/heap/heap_base.py
SimHeapBase._conc_alloc_size
python
def _conc_alloc_size(self, sim_size): if self.state.solver.symbolic(sim_size): size = self.state.solver.max_int(sim_size) if size > self.state.libc.max_variable_size: l.warning("Allocation request of %d bytes exceeded maximum of %d bytes; allocating %d bytes", size, self.state.libc.max_variable_size, self.state.libc.max_variable_size) size = self.state.libc.max_variable_size else: size = self.state.solver.eval(sim_size) return size
Concretizes a size argument, if necessary, to something that makes sense when allocating space. Here we just maximize its potential size up to the maximum variable size specified in the libc plugin. TODO: Further consideration of the tradeoffs of this approach is probably warranted. SimHeapPTMalloc especially makes a lot of different concretization strategy assumptions, but this function handles one of the more important problems that any heap implementation will face: how to decide the amount of space to allocate upon request for a symbolic size. Either we do as we do here and silently constrain the amount returned to a default max value, or we could add a path constraint to the state to prevent exploration of any paths that would have legitimately occurred given a larger allocation size. The first approach (the silent maximum) has its benefit in that the explored state space will not be constrained. Sometimes this could work out, as when an allocation is returned that is smaller than requested but which the program doesn't end up making full use of anyways. Alternatively, this lack of fidelity could cause the program to overwrite other allocations made, since it should be able to assume the allocation is as large as it requested it be. The second approach (the path constraint) has its benefit in that no paths will be explored that *could* fail when an allocation is made too small. On the other hand, as stated above, some of these paths might not have failed anyways, and doing this causes us to lose the opportunity to explore those paths. Perhaps these behaviors could be parameterized in the future?
https://github.com/angr/angr/blob/94de0f468df0c0d27428301dae93d94f935ade9b/angr/state_plugins/heap/heap_base.py#L40-L73
from ..plugin import SimStatePlugin from ...errors import SimMemoryError from .. import sim_options as opts import logging l = logging.getLogger("angr.state_plugins.heap.heap_base") DEFAULT_HEAP_LOCATION = 0xc0000000 DEFAULT_HEAP_SIZE = 64 * 4096 class SimHeapBase(SimStatePlugin): def __init__(self, heap_base=None, heap_size=None): super().__init__() self.heap_base = heap_base if heap_base is not None else DEFAULT_HEAP_LOCATION self.heap_size = heap_size if heap_size is not None else DEFAULT_HEAP_SIZE self.mmap_base = self.heap_base + self.heap_size * 2 def copy(self, memo): o = super().copy(memo) o.heap_base = self.heap_base o.heap_size = self.heap_size o.mmap_base = self.mmap_base return o
BSD 2-Clause Simplified License
qutech-delft/quantuminspire
src/quantuminspire/qiskit/circuit_parser.py
CircuitToString.parse
python
def parse(self, stream: StringIO, instruction: QasmQobjInstruction) -> None: if instruction.name == 'bfunc': self.bfunc_instructions.append(instruction) elif hasattr(instruction, 'conditional'): self._parse_bin_ctrl_gate(stream, instruction) else: gate_name = f'_{instruction.name.lower()}' gate_function = getattr(self, gate_name, getattr(self, "_gate_not_supported")) gate_function(stream, instruction)
Parses a gate. For each type of gate a separate (private) parsing method is defined and called. The resulting cQASM code is written to the stream. When the gate is a binary controlled gate, Qiskit uses two instructions to handle it. The first instruction is a so-called bfunc with the conditional information (mask, value to check etc.) which is stored for later use. The next instruction is the actual gate which must be executed conditionally. The parsing is forwarded to method _parse_bin_ctrl_gate which reads the earlier stored bfunc. When a gate is not supported _gate_not_supported is called which raises an exception. :param stream: The string-io stream to where the resulting cQASM is written. :param instruction: The Qiskit instruction to translate to cQASM.
https://github.com/qutech-delft/quantuminspire/blob/ebff819ae2938459c32bf86355fc567a2195925b/src/quantuminspire/qiskit/circuit_parser.py#L682-L703
import copy from io import StringIO from typing import Optional, Tuple, List import numpy as np from qiskit.qobj import QasmQobjInstruction from quantuminspire.exceptions import ApiError class CircuitToString: def __init__(self, full_state_projection: bool = True) -> None: self.bfunc_instructions: List[QasmQobjInstruction] = [] self.full_state_projection = full_state_projection @staticmethod def _gate_not_supported(_stream: StringIO, instruction: QasmQobjInstruction, _binary_control: Optional[str] = None) -> None: if hasattr(instruction, 'conditional'): raise ApiError(f'Conditional gate c-{instruction.name.lower()} not supported') raise ApiError(f'Gate {instruction.name.lower()} not supported') @staticmethod def _cz(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('CZ q[{0}], q[{1}]\n'.format(*instruction.qubits)) @staticmethod def _c_cz(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-CZ {0}q[{1}], q[{2}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _cx(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('CNOT q[{0}], q[{1}]\n'.format(*instruction.qubits)) @staticmethod def _c_cx(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-CNOT {0}q[{1}], q[{2}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _ccx(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('Toffoli q[{0}], q[{1}], q[{2}]\n'.format(*instruction.qubits)) @staticmethod def _c_ccx(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-Toffoli {0}q[{1}], q[{2}], q[{3}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _h(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('H q[{0}]\n'.format(*instruction.qubits)) @staticmethod def _c_h(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-H {0}q[{1}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _id(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('I q[{0}]\n'.format(*instruction.qubits)) @staticmethod def _c_id(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-I {0}q[{1}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _s(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('S q[{0}]\n'.format(*instruction.qubits)) @staticmethod def _c_s(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-S {0}q[{1}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _sdg(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('Sdag q[{0}]\n'.format(*instruction.qubits)) @staticmethod def _c_sdg(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-Sdag {0}q[{1}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _swap(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('SWAP q[{0}], q[{1}]\n'.format(*instruction.qubits)) @staticmethod def _c_swap(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-SWAP {0}q[{1}], q[{2}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _t(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('T q[{0}]\n'.format(*instruction.qubits)) @staticmethod def _c_t(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-T {0}q[{1}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _tdg(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('Tdag q[{0}]\n'.format(*instruction.qubits)) @staticmethod def _c_tdg(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-Tdag {0}q[{1}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _x(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('X q[{0}]\n'.format(*instruction.qubits)) @staticmethod def _c_x(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-X {0}q[{1}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _y(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('Y q[{0}]\n'.format(*instruction.qubits)) @staticmethod def _c_y(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-Y {0}q[{1}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _z(stream: StringIO, instruction: QasmQobjInstruction) -> None: stream.write('Z q[{0}]\n'.format(*instruction.qubits)) @staticmethod def _c_z(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: stream.write('C-Z {0}q[{1}]\n'.format(binary_control, *instruction.qubits)) @staticmethod def _r(stream: StringIO, instruction: QasmQobjInstruction, axis: str) -> None: angle_q0 = float(instruction.params[0]) stream.write('R{0} q[{1}], {2:.6f}\n'.format(axis, *instruction.qubits, angle_q0)) @staticmethod def _c_r(stream: StringIO, instruction: QasmQobjInstruction, axis: str, binary_control: str) -> None: angle_q0 = float(instruction.params[0]) stream.write('C-R{0} {1}q[{2}], {3:.6f}\n'.format(axis, binary_control, *instruction.qubits, angle_q0)) @staticmethod def _rx(stream: StringIO, instruction: QasmQobjInstruction) -> None: CircuitToString._r(stream, instruction, 'x') @staticmethod def _c_rx(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: CircuitToString._c_r(stream, instruction, 'x', binary_control) @staticmethod def _ry(stream: StringIO, instruction: QasmQobjInstruction) -> None: CircuitToString._r(stream, instruction, 'y') @staticmethod def _c_ry(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: CircuitToString._c_r(stream, instruction, 'y', binary_control) @staticmethod def _rz(stream: StringIO, instruction: QasmQobjInstruction) -> None: CircuitToString._r(stream, instruction, 'z') @staticmethod def _c_rz(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: CircuitToString._c_r(stream, instruction, 'z', binary_control) @staticmethod def _u(stream: StringIO, instruction: QasmQobjInstruction) -> None: CircuitToString._u3(stream, instruction) @staticmethod def _c_u(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: CircuitToString._c_u3(stream, instruction, binary_control) @staticmethod def _u1(stream: StringIO, instruction: QasmQobjInstruction) -> None: temp_instruction = copy.deepcopy(instruction) temp_instruction.params[0:0] = (0, 0) CircuitToString._u3(stream, temp_instruction) @staticmethod def _c_u1(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: temp_instruction = copy.deepcopy(instruction) temp_instruction.params[0:0] = (0, 0) CircuitToString._c_u3(stream, temp_instruction, binary_control) @staticmethod def _u2(stream: StringIO, instruction: QasmQobjInstruction) -> None: temp_instruction = copy.deepcopy(instruction) temp_instruction.params.insert(0, np.pi/2) CircuitToString._u3(stream, temp_instruction) @staticmethod def _c_u2(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: temp_instruction = copy.deepcopy(instruction) temp_instruction.params.insert(0, np.pi/2) CircuitToString._c_u3(stream, temp_instruction, binary_control) @staticmethod def _u3(stream: StringIO, instruction: QasmQobjInstruction) -> None: gates = ['Rz', 'Ry', 'Rz'] angles = list(float(instruction.params[i]) for i in [2, 0, 1]) index_q0 = [instruction.qubits[0]] * 3 for triplet in zip(gates, index_q0, angles): if triplet[2] != 0: stream.write('{0} q[{1}], {2:.6f}\n'.format(*triplet)) @staticmethod def _c_u3(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: gates = ['C-Rz', 'C-Ry', 'C-Rz'] binary_controls = [binary_control] * 3 angles = list(float(instruction.params[i]) for i in [2, 0, 1]) index_q0 = [instruction.qubits[0]] * 3 for quadruplets in zip(gates, binary_controls, index_q0, angles): if quadruplets[3] != 0: stream.write('{0} {1}q[{2}], {3:.6f}\n'.format(*quadruplets)) @staticmethod def _barrier(stream: StringIO, instruction: QasmQobjInstruction) -> None: @staticmethod def _c_barrier(stream: StringIO, instruction: QasmQobjInstruction, binary_control: str) -> None: def _measure(self, stream: StringIO, instruction: QasmQobjInstruction) -> None: if not self.full_state_projection: stream.write('measure q[{0}]\n'.format(*instruction.qubits)) @staticmethod def get_mask_data(mask: int) -> Tuple[int, int]: if mask == 0: return -1, 0 mask_length = 0 bit_value = 1 bit_nr = 0 while not mask & bit_value: bit_value <<= 1 bit_nr += 1 lowest_mask_bit = bit_nr while mask & bit_value: mask_length += 1 bit_value <<= 1 return lowest_mask_bit, mask_length def _parse_bin_ctrl_gate(self, stream: StringIO, instruction: QasmQobjInstruction) -> None: conditional_reg_idx = instruction.conditional conditional = next((x for x in self.bfunc_instructions if x.register == conditional_reg_idx), None) if conditional is None: raise ApiError(f'Conditional not found: reg_idx = {conditional_reg_idx}') self.bfunc_instructions.remove(conditional) conditional_type = conditional.relation if conditional_type != '==': raise ApiError(f'Conditional statement with relation {conditional_type} not supported') mask = int(conditional.mask, 16) if mask == 0: raise ApiError(f'Conditional statement {instruction.name.lower()} without a mask') lowest_mask_bit, mask_length = self.get_mask_data(mask) val = int(conditional.val, 16) masked_val = mask & val negate_zeroes_line = '' if masked_val != mask: negate_zeroes_line = 'not b[' + ','.join( str(i) for i in range(lowest_mask_bit, lowest_mask_bit + mask_length) if not (masked_val & (1 << i))) + ']\n' if mask_length == 1: binary_control = f'b[{lowest_mask_bit}], ' else: binary_control = f'b[{lowest_mask_bit}:{lowest_mask_bit + mask_length - 1}], ' with StringIO() as gate_stream: gate_name = f'_c_{instruction.name.lower()}' gate_function = getattr(self, gate_name, getattr(self, "_gate_not_supported")) gate_function(gate_stream, instruction, binary_control) line = gate_stream.getvalue() if len(line) != 0: stream.write(negate_zeroes_line) stream.write(line) stream.write(negate_zeroes_line)
Apache License 2.0
jackgoffinet/autoencoded-vocal-analysis
ava/plotting/mmd_plots.py
estimate_median_sigma
python
def estimate_median_sigma(latent, n=10000, seed=42): np.random.seed(seed) arr = np.zeros(n) for i in range(n): i1, i2 = np.random.randint(len(latent)), np.random.randint(len(latent)) arr[i] = np.sum(np.power(latent[i1]-latent[i2],2)) np.random.seed(None) return np.sqrt(np.median(arr) + EPSILON)
Estimate the median pairwise distance for use as a kernel bandwidth. Parameters ---------- latent : numpy.ndarray Latent means. n : int, optional Number of random pairs to draw. Defaults to `10000`. seed : {None, int} Random seed. Defaults to ``42``. Returns ------- sigma : float Median pairwise Euclidean distance between sampled latent means.
https://github.com/jackgoffinet/autoencoded-vocal-analysis/blob/f10257a834efa9bc2bb9cb0e7e350e7a7798c6e7/ava/plotting/mmd_plots.py#L448-L472
__date__ = "August 2019 - July 2020" from itertools import repeat from joblib import Parallel, delayed from matplotlib.collections import PolyCollection from matplotlib.colors import cnames from matplotlib.colors import to_rgba import matplotlib.pyplot as plt plt.switch_backend('agg') import numpy as np import os from scipy.cluster.hierarchy import linkage, leaves_list from scipy.spatial.distance import squareform from sklearn.manifold import TSNE, MDS EPSILON = 1e-8 NEAR_WHITE_COLORS = ['silver', 'whitesmoke', 'floralwhite', 'aliceblue', 'lightgoldenrodyellow', 'lightgray', 'w', 'seashell', 'ivory', 'lemonchiffon','ghostwhite', 'white', 'beige', 'honeydew', 'azure', 'lavender', 'snow', 'linen', 'antiquewhite', 'papayawhip', 'oldlace', 'cornsilk', 'lightyellow', 'mintcream', 'lightcyan', 'lavenderblush', 'blanchedalmond', 'lightcoral'] COLOR_LIST = [] for name, hex in cnames.items(): if name not in NEAR_WHITE_COLORS: COLOR_LIST.append(name) COLOR_LIST = np.array(COLOR_LIST) np.random.seed(42) np.random.shuffle(COLOR_LIST) np.random.seed(None) def mmd_matrix_plot_DC(dc, condition_from_fn, mmd2_fn, condition_fn, parallel=False, load_data=False, cluster=True, alg='quadratic', max_n=None, sigma=None, cmap='Greys', colorbar=True, cax=None, ticks=[0.0,0.3], filename='mmd_matrix.pdf', ax=None, save_and_close=True): assert mmd2_fn is not None loaded = False if load_data: try: mmd2 = np.load(mmd2_fn) loaded = True except: print("Unable to load data!") if not loaded: mmd2, _ = _calculate_mmd2(dc, condition_from_fn, mmd2_fn=mmd2_fn, condition_fn=condition_fn, parallel=parallel, alg=alg, max_n=max_n, sigma=sigma) filename = os.path.join(dc.plots_dir, filename) mmd_matrix_plot(mmd2, ax=ax, save_and_close=save_and_close, cluster=cluster, cmap=cmap, filename=filename, colorbar=colorbar, cax=cax, ticks=ticks) def mmd_matrix_plot(mmd2, cluster=True, cmap='viridis', ax=None, colorbar=True, cax=None, ticks=[0.0,0.3], filename='mmd_matrix.pdf', save_and_close=True): mmd = _mmd2_to_mmd(mmd2) if cluster: mmd = _cluster_matrix(mmd) if ax is None: ax = plt.gca() im = ax.imshow(mmd, cmap=cmap) ax.axis('off') if colorbar: fig = plt.gcf() cbar = fig.colorbar(im, cax=cax, fraction=0.046, orientation="horizontal", ticks=ticks) cbar.solids.set_edgecolor("face") cbar.solids.set_rasterized(True) labels = ["{0:.1f}".format(round(tick,1)) for tick in ticks] cbar.ax.set_xticklabels(labels) if save_and_close: plt.savefig(filename) plt.close('all') def mmd_tsne_plot_DC(dc, mmd2_fn=None, condition_fn=None, mmd2=None, conditions=None, perplexity=30.0, s=4.0, alpha=0.8, label_func=None, ax=None, save_and_close=True, filename='mmd_tsne.pdf', load_data=False): if load_data: assert mmd2_fn is not None and condition_fn is not None try: mmd2 = np.load(mmd2_fn) conditions = np.load(condition_fn) except: print("Unable to load data!") return else: assert mmd2 is not None and conditions is not None mmd2 = np.clip(mmd2, 0, None) all_conditions = list(np.unique(conditions)) colors = [COLOR_LIST[i%len(COLOR_LIST)] for i in conditions] all_colors = [COLOR_LIST[i%len(COLOR_LIST)] for i in all_conditions] transform = TSNE(n_components=2, random_state=42, metric='precomputed', method='exact', perplexity=perplexity) embed = transform.fit_transform(mmd2) if ax is None: ax = plt.gca() poly_colors = [] poly_vals = [] for i in range(len(conditions)-1): for j in range(i+1, len(conditions)): if conditions[i] == conditions[j]: color = to_rgba(colors[i], alpha=0.7) ax.plot([embed[i,0],embed[j,0]], [embed[i,1],embed[j,1]], c=color, lw=0.5) for k in range(j+1, len(conditions)): if conditions[k] == conditions[j]: arr = np.stack([embed[i], embed[j], embed[k]]) poly_colors.append(to_rgba(colors[i], alpha=0.2)) poly_vals.append(arr) pc = PolyCollection(poly_vals, color=poly_colors) ax.add_collection(pc) ax.scatter(embed[:,0], embed[:,1], color=colors, s=s, alpha=alpha) if label_func is not None: for i in range(len(embed)): ax.annotate(label_func(conditions[i]), embed[i]) plt.axis('off') if save_and_close: plt.savefig(os.path.join(dc.plots_dir, filename)) plt.close('all') def _estimate_mmd2(latent, i1, i2, sigma=None, max_n=None, seed=None): if sigma is None: sigma = estimate_median_sigma(latent) A = -0.5 / (sigma**2) n1, n2 = len(i1), len(i2) if max_n is not None: np.random.seed(seed) n1, n2 = min(max_n,n1), min(max_n,n2) if n1 < len(i1): np.random.shuffle(i1) i1 = i1[:n1] if n2 < len(i2): np.random.shuffle(i2) i2 = i2[:n2] np.random.seed(None) term_1 = 0.0 for i in range(n1-1): for j in range(i+1,n1): dist = np.sum(np.power(latent[i1[i]] - latent[i1[j]], 2)) term_1 += np.exp(A * dist) term_1 *= 2/(n1*(n1-1)) term_2 = 0.0 for i in range(n2-1): for j in range(i+1,n2): dist = np.sum(np.power(latent[i2[i]] - latent[i2[j]], 2)) term_2 += np.exp(A * dist) term_2 *= 2/(n2*(n2-1)) term_3 = 0.0 for i in range(n1): for j in range(n2): dist = np.sum(np.power(latent[i1[i]] - latent[i2[j]], 2)) term_3 += np.exp(A * dist) term_3 *= 2/(n1*n2) return term_1 + term_2 - term_3 def _estimate_mmd2_linear_time(latent, i1, i2, sigma=None): if sigma is None: sigma = estimate_median_sigma(latent) A = -0.5 / (sigma**2) n = min(len(i1), len(i2)) m = n // 2 assert m > 0 k = lambda x,y: np.exp(A * np.sum(np.power(x-y,2))) h = lambda x1,y1,x2,y2: k(x1,x2)+k(y1,y2)-k(x1,y2)-k(x2,y1) term = 0.0 for i in range(m): term += h(latent[i1[2*i]], latent[i2[2*i]], latent[i1[2*i+1]], latent[i2[2*i+1]]) return term / m def _cluster_matrix(matrix, index=None): if index is None: index = len(matrix) // 2 flat_dist1 = squareform(matrix[:index,:index]) Z1 = linkage(flat_dist1, optimal_ordering=True) leaves1 = leaves_list(Z1) flat_dist2 = squareform(matrix[index:,index:]) Z2 = linkage(flat_dist2, optimal_ordering=True) leaves2 = leaves_list(Z2) + index leaves = np.concatenate([leaves1, leaves2]) new_matrix = np.zeros_like(matrix) for i in range(len(matrix)-1): for j in range(i,len(matrix)): temp = matrix[leaves[i],leaves[j]] new_matrix[i,j] = temp new_matrix[j,i] = temp return new_matrix def _calculate_mmd2(dc, condition_from_fn, mmd2_fn=None, condition_fn=None, parallel=False, alg='quadratic', max_n=None, sigma=None, verbose=True): assert alg in ['linear', 'quadratic'] assert mmd2_fn is not None if verbose: print("Estimating an MMD matrix...") print("\talg:", alg) print("\tparallel:", parallel) print("\tmax_n:", max_n) latent = dc.request('latent_means') audio_fns = dc.request('audio_filenames') condition = np.array([condition_from_fn(str(i)) for i in audio_fns], dtype='int') all_conditions = np.unique(condition) n = len(all_conditions) result = np.zeros((n,n)) if sigma is None: sigma = estimate_median_sigma(latent) if verbose: print("\tconditions found:", n) print("\tsigma:", sigma) if parallel: i_vals, j_vals = [], [] for i in range(n-1): for j in range(i+1,n): i_vals.append(i) j_vals.append(j) gen = zip(i_vals, j_vals, repeat(condition), repeat(all_conditions), repeat(alg), repeat(latent), repeat(sigma), repeat(max_n)) n_jobs = os.cpu_count() temp_results = Parallel(n_jobs=n_jobs)(delayed(_mmd2_helper)(*args) for args in gen) for i, j, mmd2 in temp_results: result[i,j] = mmd2 result[j,i] = mmd2 else: for i in range(n-1): for j in range(i+1, n): i1 = np.argwhere(condition == all_conditions[i]).flatten() i2 = np.argwhere(condition == all_conditions[j]).flatten() if alg == 'linear': temp = _estimate_mmd2_linear_time(latent, i1, i2, sigma=sigma) elif alg == 'quadratic': temp = _estimate_mmd2(latent, i1, i2, sigma=sigma, max_n=max_n) else: raise NotImplementedError result[i,j] = temp result[j,i] = temp if mmd2_fn is not None: if verbose: print("\tSaving MMD^2 to:", mmd2_fn) np.save(mmd2_fn, result) if condition_fn is not None: if verbose: print("\tSaving conditions to:", condition_fn) np.save(condition_fn, all_conditions) if verbose: print("\tDone.") return result, all_conditions def _mmd2_helper(i, j, condition, all_conditions, alg, latent, sigma, max_n): i1 = np.argwhere(condition == all_conditions[i]).flatten() i2 = np.argwhere(condition == all_conditions[j]).flatten() if alg == 'linear': mmd2 = _estimate_mmd2_linear_time(latent, i1, i2, sigma=sigma) else: mmd2 = _estimate_mmd2(latent, i1, i2, sigma=sigma, max_n=max_n) print(i, j, mmd2, flush=True) return i, j, mmd2
MIT License
makgyver/rectorch
rectorch/nets.py
CDAE_net.init_weights
python
def init_weights(self): xavier_init(self.enc_layer.weight) normal_init(self.enc_layer.bias) xavier_init(self.dec_layer.weight) normal_init(self.dec_layer.bias)
r"""Initialize the weights of the network. Weights are initialized with the :py:func:`torch.nn.init.xavier_uniform_` initializer, while biases are initalized with the :py:func:`torch.nn.init.normal_` initializer.
https://github.com/makgyver/rectorch/blob/f4292d6baf381a8a356260ad87b616fcf38dbf51/rectorch/nets.py#L163-L172
import logging import torch import torch.nn as nn import torch.nn.functional as F from torch.nn.init import normal_ as normal_init from torch.nn.init import xavier_uniform_ as xavier_init __all__ = ['AE_net', 'MultiDAE_net', 'VAE_net', 'MultiVAE_net', 'CMultiVAE_net', 'CFGAN_G_net', 'CFGAN_D_net', 'SVAE_net'] logger = logging.getLogger(__name__) class AE_net(nn.Module): def __init__(self, dec_dims, enc_dims=None): super(AE_net, self).__init__() if enc_dims: self.enc_dims = enc_dims else: self.enc_dims = dec_dims[::-1] self.dec_dims = dec_dims def encode(self, x): raise NotImplementedError() def decode(self, z): raise NotImplementedError() def forward(self, x): z = self.encode(x) return self.decode(z) def init_weights(self): raise NotImplementedError() class CDAE_net(AE_net): def __init__(self, n_items, n_users, latent_size=50, dropout=0.5): super(CDAE_net, self).__init__([latent_size, n_items], [n_items+n_users, latent_size]) self.dropout = nn.Dropout(dropout) self.n_items = n_items self.enc_layer = nn.Linear(self.enc_dims[0], self.enc_dims[1]) self.dec_layer = nn.Linear(self.dec_dims[0], self.dec_dims[1]) self.init_weights() def encode(self, x): if self.training: x[:self.n_items] *= 1. / (1.-self.dropout.p) x[:self.n_items] = self.dropout(x[:self.n_items]) x = torch.sigmoid(self.enc_layer(x)) return x def decode(self, z): return torch.sigmoid(self.dec_layer(z))
MIT License
windelbouwman/ppci
ppci/binutils/dbg/cli.py
DebugCli.do_stepi
python
def do_stepi(self, _): self.debugger.step()
Single instruction step the debugger
https://github.com/windelbouwman/ppci/blob/915c069e0667042c085ec42c78e9e3c9a5295324/ppci/binutils/dbg/cli.py#L166-L168
import cmd import binascii from threading import Lock from ... import __version__ as ppci_version from ...common import str2int, CompilerError from .debug_driver import DebugState import logging import os import sys if os.name == "nt": from colorama import init def pos(stdout, y, x): stdout.write("\x1b[%d;%dH" % (y, x)) def savepos(stdout): stdout.write("\x1b[s") def restorepos(stdout): stdout.write("\x1b[u") CMDLINE = 12 screenlock = Lock() def clearscreen(stdout): stdout.write("\033[2J\033[1;1H") def cleartocursor(stdout): stdout.write("\033[1J") def clearaftercursor(stdout): stdout.write("\033[J") def print_file_line(stdout, filename, lineno): lines = open(filename).read().splitlines() s = "\033[37m\033[1mFile:{}\n".format(filename) stdout.write(s) s = "Line: [{} of {}]\n".format(lineno, len(lines)) stdout.write(s) s = "\033[0m\n" stdout.write(s) s = "\033[0m\n" stdout.write(s) for i in range(lineno - 3, lineno + 3): if i < 1: stdout.write("\n") elif i > len(lines): stdout.write("\n") elif i == lineno: s = "\033[33m\033[1m{}\033[32m->{}\033[0m\033[39m\n".format( str(i).rjust(4), lines[i - 1] ) stdout.write(s) else: s = "\033[33m\033[1m{}\033[0m\033[39m {}\n".format( str(i).rjust(4), lines[i - 1] ) stdout.write(s) class Proxy(object): logger = logging.getLogger("dbg") def __init__(self, debugger): self.buffer = [] self.debugger = debugger self.stdout = sys.stdout def write(self, data): if len(data) > 0: if data[-1] != "\n": self.buffer.append(data) else: if len(self.buffer): self.flush() with screenlock: self.stdout.write(data) self.logger.debug( "stdout writing: %s, [%s]", data, data.encode("utf-8").hex(), ) def flush(self): text = "".join(self.buffer) self.buffer = [] with screenlock: self.logger.debug( "stdout flushing: %s, [%s] ", text, text.encode("utf-8").hex() ) self.stdout.write(text) self.stdout.flush() class DebugCli(cmd.Cmd): prompt = "DBG>" intro = "ppci interactive debugger" def __init__(self, debugger, showsource=False): self.Proxy = Proxy(debugger) self.stdout_ori = sys.stdout sys.stdout = self.Proxy super().__init__(stdout=self.Proxy) self.debugger = debugger self.use_rawinput = False self.showsource = showsource if self.showsource is True: if os.name == "nt": init() clearscreen(sys.stdout) pos(sys.stdout, 1, 1) if self.debugger.is_running: print("\033[37m\033[1mTarget State: RUNNING") else: print("\033[37m\033[1mTarget State: STOPPED") pos(sys.stdout, CMDLINE, 1) self.debugger.events.on_stop += self.updatesourceview self.debugger.events.on_start += self.updatestatus def do_quit(self, _): sys.stdout = self.stdout_ori raise SystemExit return True do_q = do_quit def do_info(self, _): print("Architecture: ", self.debugger.arch) print("Debugger: ", self.debugger) print("Debug driver: ", self.debugger.driver) print("ppci version: ", ppci_version) text_status = { DebugState.STOPPED: "Stopped", DebugState.RUNNING: "Running", } print("Status: ", text_status[self.debugger.status]) def do_run(self, _): self.debugger.run() def do_step(self, _): self.debugger.step() do_s = do_step
BSD 2-Clause Simplified License