query
stringlengths 5
1.23k
| positive
stringlengths 53
15.2k
| id_
int64 0
252k
| task_name
stringlengths 87
242
| negative
sequencelengths 20
553
|
---|---|---|---|---|
Gets the InternalLinkSets API client . | def internal_link_sets ( self ) : if not self . __internal_link_sets : self . __internal_link_sets = InternalLinkSets ( self . __connection ) return self . __internal_link_sets | 251,600 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L693-L702 | [
"def",
"fromgroups",
"(",
"args",
")",
":",
"from",
"jcvi",
".",
"formats",
".",
"bed",
"import",
"Bed",
"p",
"=",
"OptionParser",
"(",
"fromgroups",
".",
"__doc__",
")",
"opts",
",",
"args",
"=",
"p",
".",
"parse_args",
"(",
"args",
")",
"if",
"len",
"(",
"args",
")",
"<",
"2",
":",
"sys",
".",
"exit",
"(",
"not",
"p",
".",
"print_help",
"(",
")",
")",
"groupsfile",
"=",
"args",
"[",
"0",
"]",
"bedfiles",
"=",
"args",
"[",
"1",
":",
"]",
"beds",
"=",
"[",
"Bed",
"(",
"x",
")",
"for",
"x",
"in",
"bedfiles",
"]",
"fp",
"=",
"open",
"(",
"groupsfile",
")",
"groups",
"=",
"[",
"row",
".",
"strip",
"(",
")",
".",
"split",
"(",
"\",\"",
")",
"for",
"row",
"in",
"fp",
"]",
"for",
"b1",
",",
"b2",
"in",
"product",
"(",
"beds",
",",
"repeat",
"=",
"2",
")",
":",
"extract_pairs",
"(",
"b1",
",",
"b2",
",",
"groups",
")"
] |
Gets the LogicalInterconnectGroups API client . | def logical_interconnect_groups ( self ) : if not self . __logical_interconnect_groups : self . __logical_interconnect_groups = LogicalInterconnectGroups ( self . __connection ) return self . __logical_interconnect_groups | 251,601 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L705-L715 | [
"def",
"get_needful_files",
"(",
"self",
")",
":",
"manifest",
"=",
"self",
".",
"storage",
".",
"load_manifest",
"(",
")",
"if",
"self",
".",
"keep_unhashed_files",
":",
"if",
"PY3",
":",
"needful_files",
"=",
"set",
"(",
"manifest",
".",
"keys",
"(",
")",
"|",
"manifest",
".",
"values",
"(",
")",
")",
"else",
":",
"needful_files",
"=",
"set",
"(",
"manifest",
".",
"keys",
"(",
")",
"+",
"manifest",
".",
"values",
"(",
")",
")",
"needful_files",
"=",
"{",
"self",
".",
"storage",
".",
"clean_name",
"(",
"file",
")",
"for",
"file",
"in",
"needful_files",
"}",
"else",
":",
"needful_files",
"=",
"set",
"(",
"manifest",
".",
"values",
"(",
")",
")",
"return",
"{",
"self",
".",
"process_file",
"(",
"file",
")",
"for",
"file",
"in",
"needful_files",
"}"
] |
Gets the SasLogicalInterconnects API client . | def sas_logical_interconnects ( self ) : if not self . __sas_logical_interconnects : self . __sas_logical_interconnects = SasLogicalInterconnects ( self . __connection ) return self . __sas_logical_interconnects | 251,602 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L728-L737 | [
"def",
"replace_emoticons",
"(",
"content",
",",
"excluded_markups",
")",
":",
"if",
"not",
"excluded_markups",
":",
"return",
"regexp_replace_emoticons",
"(",
"content",
")",
"excluded_markups",
"=",
"excluded_markups",
".",
"split",
"(",
"','",
")",
"+",
"[",
"'[document]'",
"]",
"soup",
"=",
"BeautifulSoup",
"(",
"content",
",",
"'html.parser'",
")",
"for",
"content_string",
"in",
"list",
"(",
"soup",
".",
"strings",
")",
":",
"if",
"content_string",
".",
"parent",
".",
"name",
"not",
"in",
"excluded_markups",
":",
"replaced_content_string",
"=",
"regexp_replace_emoticons",
"(",
"content_string",
")",
"if",
"content_string",
"!=",
"replaced_content_string",
":",
"content_string",
".",
"replace_with",
"(",
"BeautifulSoup",
"(",
"replaced_content_string",
",",
"'html.parser'",
")",
")",
"return",
"str",
"(",
"soup",
")"
] |
Gets the LogicalDownlinks API client . | def logical_downlinks ( self ) : if not self . __logical_downlinks : self . __logical_downlinks = LogicalDownlinks ( self . __connection ) return self . __logical_downlinks | 251,603 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L740-L750 | [
"def",
"parse_wrap_facets",
"(",
"facets",
")",
":",
"valid_forms",
"=",
"[",
"'~ var1'",
",",
"'~ var1 + var2'",
"]",
"error_msg",
"=",
"(",
"\"Valid formula for 'facet_wrap' look like\"",
"\" {}\"",
".",
"format",
"(",
"valid_forms",
")",
")",
"if",
"isinstance",
"(",
"facets",
",",
"(",
"list",
",",
"tuple",
")",
")",
":",
"return",
"facets",
"if",
"not",
"isinstance",
"(",
"facets",
",",
"str",
")",
":",
"raise",
"PlotnineError",
"(",
"error_msg",
")",
"if",
"'~'",
"in",
"facets",
":",
"variables_pattern",
"=",
"r'(\\w+(?:\\s*\\+\\s*\\w+)*|\\.)'",
"pattern",
"=",
"r'\\s*~\\s*{0}\\s*'",
".",
"format",
"(",
"variables_pattern",
")",
"match",
"=",
"re",
".",
"match",
"(",
"pattern",
",",
"facets",
")",
"if",
"not",
"match",
":",
"raise",
"PlotnineError",
"(",
"error_msg",
")",
"facets",
"=",
"[",
"var",
".",
"strip",
"(",
")",
"for",
"var",
"in",
"match",
".",
"group",
"(",
"1",
")",
".",
"split",
"(",
"'+'",
")",
"]",
"elif",
"re",
".",
"match",
"(",
"r'\\w+'",
",",
"facets",
")",
":",
"# allow plain string as the variable name",
"facets",
"=",
"[",
"facets",
"]",
"else",
":",
"raise",
"PlotnineError",
"(",
"error_msg",
")",
"return",
"facets"
] |
Gets the PowerDevices API client . | def power_devices ( self ) : if not self . __power_devices : self . __power_devices = PowerDevices ( self . __connection ) return self . __power_devices | 251,604 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L753-L762 | [
"def",
"_return_container_objects",
"(",
"self",
")",
":",
"container_objects",
"=",
"self",
".",
"job_args",
".",
"get",
"(",
"'object'",
")",
"if",
"container_objects",
":",
"return",
"True",
",",
"[",
"{",
"'container_object'",
":",
"i",
"}",
"for",
"i",
"in",
"container_objects",
"]",
"container_objects",
"=",
"self",
".",
"job_args",
".",
"get",
"(",
"'objects_file'",
")",
"if",
"container_objects",
":",
"container_objects",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"container_objects",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"container_objects",
")",
":",
"with",
"open",
"(",
"container_objects",
")",
"as",
"f",
":",
"return",
"True",
",",
"[",
"{",
"'container_object'",
":",
"i",
".",
"rstrip",
"(",
"'\\n'",
")",
"}",
"for",
"i",
"in",
"f",
".",
"readlines",
"(",
")",
"]",
"container_objects",
"=",
"self",
".",
"_list_contents",
"(",
")",
"pattern_match",
"=",
"self",
".",
"job_args",
".",
"get",
"(",
"'pattern_match'",
")",
"if",
"pattern_match",
":",
"container_objects",
"=",
"self",
".",
"match_filter",
"(",
"idx_list",
"=",
"container_objects",
",",
"pattern",
"=",
"pattern_match",
",",
"dict_type",
"=",
"True",
",",
"dict_key",
"=",
"'name'",
")",
"# Reformat list for processing",
"if",
"container_objects",
"and",
"isinstance",
"(",
"container_objects",
"[",
"0",
"]",
",",
"dict",
")",
":",
"return",
"False",
",",
"self",
".",
"_return_deque",
"(",
"[",
"{",
"'container_object'",
":",
"i",
"[",
"'name'",
"]",
"}",
"for",
"i",
"in",
"container_objects",
"]",
")",
"else",
":",
"return",
"False",
",",
"self",
".",
"_return_deque",
"(",
")"
] |
Gets the Unmanaged Devices API client . | def unmanaged_devices ( self ) : if not self . __unmanaged_devices : self . __unmanaged_devices = UnmanagedDevices ( self . __connection ) return self . __unmanaged_devices | 251,605 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L765-L774 | [
"def",
"get_stack_var",
"(",
"name",
",",
"depth",
"=",
"0",
")",
":",
"base_frame",
"=",
"_get_base_frame",
"(",
"depth",
")",
"if",
"not",
"base_frame",
":",
"# Frame not found",
"raise",
"RuntimeError",
"(",
"\"Base frame not found\"",
")",
"# Lookup up the frame stack starting at the base frame for the fiber state",
"level",
"=",
"0",
"frame",
"=",
"base_frame",
"while",
"frame",
":",
"locals",
"=",
"frame",
".",
"f_locals",
"value",
"=",
"locals",
".",
"get",
"(",
"name",
")",
"if",
"value",
"is",
"not",
"None",
":",
"if",
"level",
">",
"0",
":",
"# Copy a reference of the fiber state in the base frame",
"base_frame",
".",
"f_locals",
"[",
"name",
"]",
"=",
"value",
"return",
"value",
"if",
"locals",
".",
"get",
"(",
"SECTION_BOUNDARY_TAG",
")",
":",
"return",
"None",
"frame",
"=",
"frame",
".",
"f_back",
"level",
"+=",
"1",
"return",
"None"
] |
Gets the Racks API client . | def racks ( self ) : if not self . __racks : self . __racks = Racks ( self . __connection ) return self . __racks | 251,606 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L777-L786 | [
"def",
"get_from_cache",
"(",
"url",
":",
"str",
",",
"cache_dir",
":",
"str",
"=",
"None",
")",
"->",
"str",
":",
"if",
"cache_dir",
"is",
"None",
":",
"cache_dir",
"=",
"CACHE_DIRECTORY",
"os",
".",
"makedirs",
"(",
"cache_dir",
",",
"exist_ok",
"=",
"True",
")",
"# Get eTag to add to filename, if it exists.",
"if",
"url",
".",
"startswith",
"(",
"\"s3://\"",
")",
":",
"etag",
"=",
"s3_etag",
"(",
"url",
")",
"else",
":",
"response",
"=",
"requests",
".",
"head",
"(",
"url",
",",
"allow_redirects",
"=",
"True",
")",
"if",
"response",
".",
"status_code",
"!=",
"200",
":",
"raise",
"IOError",
"(",
"\"HEAD request failed for url {} with status code {}\"",
".",
"format",
"(",
"url",
",",
"response",
".",
"status_code",
")",
")",
"etag",
"=",
"response",
".",
"headers",
".",
"get",
"(",
"\"ETag\"",
")",
"filename",
"=",
"url_to_filename",
"(",
"url",
",",
"etag",
")",
"# get cache path to put the file",
"cache_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"cache_dir",
",",
"filename",
")",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"cache_path",
")",
":",
"# Download to temporary file, then copy to cache dir once finished.",
"# Otherwise you get corrupt cache entries if the download gets interrupted.",
"with",
"tempfile",
".",
"NamedTemporaryFile",
"(",
")",
"as",
"temp_file",
":",
"logger",
".",
"info",
"(",
"\"%s not found in cache, downloading to %s\"",
",",
"url",
",",
"temp_file",
".",
"name",
")",
"# GET file object",
"if",
"url",
".",
"startswith",
"(",
"\"s3://\"",
")",
":",
"s3_get",
"(",
"url",
",",
"temp_file",
")",
"else",
":",
"http_get",
"(",
"url",
",",
"temp_file",
")",
"# we are copying the file before closing it, so flush to avoid truncation",
"temp_file",
".",
"flush",
"(",
")",
"# shutil.copyfileobj() starts at the current position, so go to the start",
"temp_file",
".",
"seek",
"(",
"0",
")",
"logger",
".",
"info",
"(",
"\"copying %s to cache at %s\"",
",",
"temp_file",
".",
"name",
",",
"cache_path",
")",
"with",
"open",
"(",
"cache_path",
",",
"'wb'",
")",
"as",
"cache_file",
":",
"shutil",
".",
"copyfileobj",
"(",
"temp_file",
",",
"cache_file",
")",
"logger",
".",
"info",
"(",
"\"creating metadata file for %s\"",
",",
"cache_path",
")",
"meta",
"=",
"{",
"'url'",
":",
"url",
",",
"'etag'",
":",
"etag",
"}",
"meta_path",
"=",
"cache_path",
"+",
"'.json'",
"with",
"open",
"(",
"meta_path",
",",
"'w'",
")",
"as",
"meta_file",
":",
"json",
".",
"dump",
"(",
"meta",
",",
"meta_file",
")",
"logger",
".",
"info",
"(",
"\"removing temp file %s\"",
",",
"temp_file",
".",
"name",
")",
"return",
"cache_path"
] |
Gets the SanManagers API client . | def san_managers ( self ) : if not self . __san_managers : self . __san_managers = SanManagers ( self . __connection ) return self . __san_managers | 251,607 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L789-L798 | [
"def",
"future_set_exception_unless_cancelled",
"(",
"future",
":",
"\"Union[futures.Future[_T], Future[_T]]\"",
",",
"exc",
":",
"BaseException",
")",
"->",
"None",
":",
"if",
"not",
"future",
".",
"cancelled",
"(",
")",
":",
"future",
".",
"set_exception",
"(",
"exc",
")",
"else",
":",
"app_log",
".",
"error",
"(",
"\"Exception after Future was cancelled\"",
",",
"exc_info",
"=",
"exc",
")"
] |
Gets the Endpoints API client . | def endpoints ( self ) : if not self . __endpoints : self . __endpoints = Endpoints ( self . __connection ) return self . __endpoints | 251,608 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L801-L810 | [
"def",
"query",
"(",
"self",
",",
"q",
",",
"data",
"=",
"None",
",",
"union",
"=",
"True",
",",
"limit",
"=",
"None",
")",
":",
"if",
"data",
":",
"q",
"=",
"self",
".",
"_apply_handlebars",
"(",
"q",
",",
"data",
",",
"union",
")",
"if",
"limit",
":",
"q",
"=",
"self",
".",
"_assign_limit",
"(",
"q",
",",
"limit",
")",
"return",
"pd",
".",
"read_sql",
"(",
"q",
",",
"self",
".",
"con",
")"
] |
Gets the StorageSystems API client . | def storage_systems ( self ) : if not self . __storage_systems : self . __storage_systems = StorageSystems ( self . __connection ) return self . __storage_systems | 251,609 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L833-L842 | [
"def",
"fromgroups",
"(",
"args",
")",
":",
"from",
"jcvi",
".",
"formats",
".",
"bed",
"import",
"Bed",
"p",
"=",
"OptionParser",
"(",
"fromgroups",
".",
"__doc__",
")",
"opts",
",",
"args",
"=",
"p",
".",
"parse_args",
"(",
"args",
")",
"if",
"len",
"(",
"args",
")",
"<",
"2",
":",
"sys",
".",
"exit",
"(",
"not",
"p",
".",
"print_help",
"(",
")",
")",
"groupsfile",
"=",
"args",
"[",
"0",
"]",
"bedfiles",
"=",
"args",
"[",
"1",
":",
"]",
"beds",
"=",
"[",
"Bed",
"(",
"x",
")",
"for",
"x",
"in",
"bedfiles",
"]",
"fp",
"=",
"open",
"(",
"groupsfile",
")",
"groups",
"=",
"[",
"row",
".",
"strip",
"(",
")",
".",
"split",
"(",
"\",\"",
")",
"for",
"row",
"in",
"fp",
"]",
"for",
"b1",
",",
"b2",
"in",
"product",
"(",
"beds",
",",
"repeat",
"=",
"2",
")",
":",
"extract_pairs",
"(",
"b1",
",",
"b2",
",",
"groups",
")"
] |
Gets the StoragePools API client . | def storage_pools ( self ) : if not self . __storage_pools : self . __storage_pools = StoragePools ( self . __connection ) return self . __storage_pools | 251,610 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L845-L854 | [
"def",
"_export_corpus",
"(",
"self",
")",
":",
"# bin/mallet import-file --input /Users/erickpeirson/mycorpus_docs.txt",
"# --output mytopic-input.mallet --keep-sequence --remove-stopwords",
"if",
"not",
"os",
".",
"path",
".",
"exists",
"(",
"self",
".",
"mallet_bin",
")",
":",
"raise",
"IOError",
"(",
"\"MALLET path invalid or non-existent.\"",
")",
"self",
".",
"input_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"temp",
",",
"\"input.mallet\"",
")",
"exit",
"=",
"subprocess",
".",
"call",
"(",
"[",
"self",
".",
"mallet_bin",
",",
"'import-file'",
",",
"'--input'",
",",
"self",
".",
"corpus_path",
",",
"'--output'",
",",
"self",
".",
"input_path",
",",
"'--keep-sequence'",
",",
"# Required for LDA.",
"'--remove-stopwords'",
"]",
")",
"# Probably redundant.",
"if",
"exit",
"!=",
"0",
":",
"msg",
"=",
"\"MALLET import-file failed with exit code {0}.\"",
".",
"format",
"(",
"exit",
")",
"raise",
"RuntimeError",
"(",
"msg",
")"
] |
Gets the StorageVolumeTemplates API client . | def storage_volume_templates ( self ) : if not self . __storage_volume_templates : self . __storage_volume_templates = StorageVolumeTemplates ( self . __connection ) return self . __storage_volume_templates | 251,611 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L857-L866 | [
"def",
"get",
"(",
"self",
",",
"id",
"=",
"None",
",",
"name",
"=",
"None",
")",
":",
"# Validate arguments - use an xor",
"if",
"not",
"(",
"id",
"is",
"None",
")",
"^",
"(",
"name",
"is",
"None",
")",
":",
"raise",
"ValueError",
"(",
"\"Either id or name must be set (but not both!)\"",
")",
"# If it's just ID provided, call the parent function",
"if",
"id",
"is",
"not",
"None",
":",
"return",
"super",
"(",
"TaskQueueManager",
",",
"self",
")",
".",
"get",
"(",
"id",
"=",
"id",
")",
"# Try getting the task queue by name",
"return",
"self",
".",
"list",
"(",
"filters",
"=",
"{",
"\"name\"",
":",
"name",
"}",
")",
"[",
"0",
"]"
] |
Gets the StorageVolumeAttachments API client . | def storage_volume_attachments ( self ) : if not self . __storage_volume_attachments : self . __storage_volume_attachments = StorageVolumeAttachments ( self . __connection ) return self . __storage_volume_attachments | 251,612 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L869-L878 | [
"def",
"set_forbidden_uptodate",
"(",
"self",
",",
"uptodate",
")",
":",
"if",
"self",
".",
"_forbidden_uptodate",
"==",
"uptodate",
":",
"return",
"self",
".",
"_forbidden_uptodate",
"=",
"uptodate",
"self",
".",
"invalidateFilter",
"(",
")"
] |
Gets the FirmwareDrivers API client . | def firmware_drivers ( self ) : if not self . __firmware_drivers : self . __firmware_drivers = FirmwareDrivers ( self . __connection ) return self . __firmware_drivers | 251,613 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L881-L890 | [
"def",
"check_value",
"(",
"config",
",",
"section",
",",
"option",
",",
"jinja_pattern",
"=",
"JINJA_PATTERN",
",",
")",
":",
"value",
"=",
"config",
"[",
"section",
"]",
"[",
"option",
"]",
"if",
"re",
".",
"match",
"(",
"jinja_pattern",
",",
"value",
")",
":",
"return",
"None",
"return",
"value"
] |
Gets the FirmwareBundles API client . | def firmware_bundles ( self ) : if not self . __firmware_bundles : self . __firmware_bundles = FirmwareBundles ( self . __connection ) return self . __firmware_bundles | 251,614 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L893-L902 | [
"def",
"check_value",
"(",
"config",
",",
"section",
",",
"option",
",",
"jinja_pattern",
"=",
"JINJA_PATTERN",
",",
")",
":",
"value",
"=",
"config",
"[",
"section",
"]",
"[",
"option",
"]",
"if",
"re",
".",
"match",
"(",
"jinja_pattern",
",",
"value",
")",
":",
"return",
"None",
"return",
"value"
] |
Gets the Volumes API client . | def volumes ( self ) : if not self . __volumes : self . __volumes = Volumes ( self . __connection ) return self . __volumes | 251,615 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L915-L924 | [
"def",
"get_data_files",
"(",
"top",
")",
":",
"data_files",
"=",
"[",
"]",
"ntrim",
"=",
"len",
"(",
"here",
"+",
"os",
".",
"path",
".",
"sep",
")",
"for",
"(",
"d",
",",
"_",
",",
"filenames",
")",
"in",
"os",
".",
"walk",
"(",
"top",
")",
":",
"data_files",
".",
"append",
"(",
"(",
"d",
"[",
"ntrim",
":",
"]",
",",
"[",
"os",
".",
"path",
".",
"join",
"(",
"d",
",",
"f",
")",
"for",
"f",
"in",
"filenames",
"]",
")",
")",
"return",
"data_files"
] |
Gets the SAS Logical JBOD Attachments client . | def sas_logical_jbod_attachments ( self ) : if not self . __sas_logical_jbod_attachments : self . __sas_logical_jbod_attachments = SasLogicalJbodAttachments ( self . __connection ) return self . __sas_logical_jbod_attachments | 251,616 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L927-L936 | [
"def",
"_extract",
"(",
"self",
",",
"path",
",",
"outdir",
",",
"filter_func",
"=",
"None",
")",
":",
"with",
"open_zip",
"(",
"path",
")",
"as",
"archive_file",
":",
"for",
"name",
"in",
"archive_file",
".",
"namelist",
"(",
")",
":",
"# While we're at it, we also perform this safety test.",
"if",
"name",
".",
"startswith",
"(",
"'/'",
")",
"or",
"name",
".",
"startswith",
"(",
"'..'",
")",
":",
"raise",
"ValueError",
"(",
"'Zip file contains unsafe path: {}'",
".",
"format",
"(",
"name",
")",
")",
"if",
"(",
"not",
"filter_func",
"or",
"filter_func",
"(",
"name",
")",
")",
":",
"archive_file",
".",
"extract",
"(",
"name",
",",
"outdir",
")"
] |
Gets the Managed SANs API client . | def managed_sans ( self ) : if not self . __managed_sans : self . __managed_sans = ManagedSANs ( self . __connection ) return self . __managed_sans | 251,617 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L939-L948 | [
"def",
"_read_console_output",
"(",
"self",
",",
"ws",
",",
"out",
")",
":",
"while",
"True",
":",
"msg",
"=",
"yield",
"from",
"ws",
".",
"receive",
"(",
")",
"if",
"msg",
".",
"tp",
"==",
"aiohttp",
".",
"WSMsgType",
".",
"text",
":",
"out",
".",
"feed_data",
"(",
"msg",
".",
"data",
".",
"encode",
"(",
")",
")",
"elif",
"msg",
".",
"tp",
"==",
"aiohttp",
".",
"WSMsgType",
".",
"BINARY",
":",
"out",
".",
"feed_data",
"(",
"msg",
".",
"data",
")",
"elif",
"msg",
".",
"tp",
"==",
"aiohttp",
".",
"WSMsgType",
".",
"ERROR",
":",
"log",
".",
"critical",
"(",
"\"Docker WebSocket Error: {}\"",
".",
"format",
"(",
"msg",
".",
"data",
")",
")",
"else",
":",
"out",
".",
"feed_eof",
"(",
")",
"ws",
".",
"close",
"(",
")",
"break",
"yield",
"from",
"self",
".",
"stop",
"(",
")"
] |
Gets the VC Migration Manager API client . | def migratable_vc_domains ( self ) : if not self . __migratable_vc_domains : self . __migratable_vc_domains = MigratableVcDomains ( self . __connection ) return self . __migratable_vc_domains | 251,618 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L951-L960 | [
"def",
"setbit",
"(",
"self",
",",
"name",
",",
"offset",
",",
"value",
")",
":",
"with",
"self",
".",
"pipe",
"as",
"pipe",
":",
"return",
"pipe",
".",
"setbit",
"(",
"self",
".",
"redis_key",
"(",
"name",
")",
",",
"offset",
",",
"value",
")"
] |
Gets the SAS Interconnects API client . | def sas_interconnects ( self ) : if not self . __sas_interconnects : self . __sas_interconnects = SasInterconnects ( self . __connection ) return self . __sas_interconnects | 251,619 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L963-L972 | [
"def",
"dump",
"(",
"self",
")",
":",
"assert",
"self",
".",
"database",
"is",
"not",
"None",
"cmd",
"=",
"\"SELECT count from {} WHERE rowid={}\"",
"self",
".",
"_execute",
"(",
"cmd",
".",
"format",
"(",
"self",
".",
"STATE_INFO_TABLE",
",",
"self",
".",
"STATE_INFO_ROW",
")",
")",
"ret",
"=",
"self",
".",
"_fetchall",
"(",
")",
"assert",
"len",
"(",
"ret",
")",
"==",
"1",
"assert",
"len",
"(",
"ret",
"[",
"0",
"]",
")",
"==",
"1",
"count",
"=",
"self",
".",
"_from_sqlite",
"(",
"ret",
"[",
"0",
"]",
"[",
"0",
"]",
")",
"+",
"self",
".",
"inserts",
"if",
"count",
">",
"self",
".",
"row_limit",
":",
"msg",
"=",
"\"cleaning up state, this might take a while.\"",
"logger",
".",
"warning",
"(",
"msg",
")",
"delete",
"=",
"count",
"-",
"self",
".",
"row_limit",
"delete",
"+=",
"int",
"(",
"self",
".",
"row_limit",
"*",
"(",
"self",
".",
"row_cleanup_quota",
"/",
"100.0",
")",
")",
"cmd",
"=",
"(",
"\"DELETE FROM {} WHERE timestamp IN (\"",
"\"SELECT timestamp FROM {} ORDER BY timestamp ASC LIMIT {});\"",
")",
"self",
".",
"_execute",
"(",
"cmd",
".",
"format",
"(",
"self",
".",
"STATE_TABLE",
",",
"self",
".",
"STATE_TABLE",
",",
"delete",
")",
")",
"self",
".",
"_vacuum",
"(",
")",
"cmd",
"=",
"\"SELECT COUNT(*) FROM {}\"",
"self",
".",
"_execute",
"(",
"cmd",
".",
"format",
"(",
"self",
".",
"STATE_TABLE",
")",
")",
"ret",
"=",
"self",
".",
"_fetchall",
"(",
")",
"assert",
"len",
"(",
"ret",
")",
"==",
"1",
"assert",
"len",
"(",
"ret",
"[",
"0",
"]",
")",
"==",
"1",
"count",
"=",
"ret",
"[",
"0",
"]",
"[",
"0",
"]",
"cmd",
"=",
"\"UPDATE {} SET count = {} WHERE rowid = {}\"",
"self",
".",
"_execute",
"(",
"cmd",
".",
"format",
"(",
"self",
".",
"STATE_INFO_TABLE",
",",
"self",
".",
"_to_sqlite",
"(",
"count",
")",
",",
"self",
".",
"STATE_INFO_ROW",
",",
")",
")",
"self",
".",
"_update_cache_directory_state",
"(",
")",
"self",
".",
"database",
".",
"commit",
"(",
")",
"self",
".",
"cursor",
".",
"close",
"(",
")",
"self",
".",
"database",
".",
"close",
"(",
")",
"self",
".",
"database",
"=",
"None",
"self",
".",
"cursor",
"=",
"None",
"self",
".",
"inserts",
"=",
"0"
] |
Gets the SasLogicalInterconnectGroups API client . | def sas_logical_interconnect_groups ( self ) : if not self . __sas_logical_interconnect_groups : self . __sas_logical_interconnect_groups = SasLogicalInterconnectGroups ( self . __connection ) return self . __sas_logical_interconnect_groups | 251,620 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L975-L984 | [
"def",
"extractFromHTML",
"(",
"html",
",",
"blur",
"=",
"5",
")",
":",
"#html = html.encode('utf-8', errors='ignore')",
"try",
":",
"html",
"=",
"unicode",
"(",
"html",
",",
"errors",
"=",
"'ignore'",
")",
"except",
"TypeError",
":",
"pass",
"assert",
"isinstance",
"(",
"html",
",",
"unicode",
")",
"# Create memory file.",
"_file",
"=",
"StringIO",
"(",
")",
"# Convert html to text.",
"f",
"=",
"formatter",
".",
"AbstractFormatter",
"(",
"formatter",
".",
"DumbWriter",
"(",
"_file",
")",
")",
"p",
"=",
"TextExtractor",
"(",
")",
"p",
".",
"pathBlur",
"=",
"blur",
"p",
".",
"feed",
"(",
"html",
")",
"p",
".",
"close",
"(",
")",
"text",
"=",
"p",
".",
"get_plaintext",
"(",
")",
"# Remove stand-alone punctuation.",
"text",
"=",
"re",
".",
"sub",
"(",
"\"\\s[\\(\\),;\\.\\?\\!](?=\\s)\"",
",",
"\" \"",
",",
"text",
")",
".",
"strip",
"(",
")",
"# Compress whitespace.",
"text",
"=",
"re",
".",
"sub",
"(",
"\"[\\n\\s]+\"",
",",
"\" \"",
",",
"text",
")",
".",
"strip",
"(",
")",
"# Remove consequetive dashes.",
"text",
"=",
"re",
".",
"sub",
"(",
"\"\\-{2,}\"",
",",
"\"\"",
",",
"text",
")",
".",
"strip",
"(",
")",
"# Remove consequetive periods.",
"text",
"=",
"re",
".",
"sub",
"(",
"\"\\.{2,}\"",
",",
"\"\"",
",",
"text",
")",
".",
"strip",
"(",
")",
"return",
"text"
] |
Gets the Drive Enclosures API client . | def drive_enclosures ( self ) : if not self . __drive_enclures : self . __drive_enclures = DriveEnclosures ( self . __connection ) return self . __drive_enclures | 251,621 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L987-L996 | [
"def",
"_integerValue_to_int",
"(",
"value_str",
")",
":",
"m",
"=",
"BINARY_VALUE",
".",
"match",
"(",
"value_str",
")",
"if",
"m",
":",
"value",
"=",
"int",
"(",
"m",
".",
"group",
"(",
"1",
")",
",",
"2",
")",
"elif",
"OCTAL_VALUE",
".",
"match",
"(",
"value_str",
")",
":",
"value",
"=",
"int",
"(",
"value_str",
",",
"8",
")",
"elif",
"DECIMAL_VALUE",
".",
"match",
"(",
"value_str",
")",
":",
"value",
"=",
"int",
"(",
"value_str",
")",
"elif",
"HEX_VALUE",
".",
"match",
"(",
"value_str",
")",
":",
"value",
"=",
"int",
"(",
"value_str",
",",
"16",
")",
"else",
":",
"value",
"=",
"None",
"return",
"value"
] |
Gets the SAS Logical JBODs API client . | def sas_logical_jbods ( self ) : if not self . __sas_logical_jbods : self . __sas_logical_jbods = SasLogicalJbods ( self . __connection ) return self . __sas_logical_jbods | 251,622 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L999-L1008 | [
"def",
"update_comment",
"(",
"self",
",",
"comment_id",
",",
"body",
")",
":",
"path",
"=",
"'/msg/update_comment'",
"req",
"=",
"ET",
".",
"Element",
"(",
"'request'",
")",
"ET",
".",
"SubElement",
"(",
"req",
",",
"'comment_id'",
")",
".",
"text",
"=",
"str",
"(",
"int",
"(",
"comment_id",
")",
")",
"comment",
"=",
"ET",
".",
"SubElement",
"(",
"req",
",",
"'comment'",
")",
"ET",
".",
"SubElement",
"(",
"comment",
",",
"'body'",
")",
".",
"text",
"=",
"str",
"(",
"body",
")",
"return",
"self",
".",
"_request",
"(",
"path",
",",
"req",
")"
] |
Gets the Labels API client . | def labels ( self ) : if not self . __labels : self . __labels = Labels ( self . __connection ) return self . __labels | 251,623 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1011-L1020 | [
"def",
"_build",
"(",
"self",
",",
"name",
",",
"*",
"*",
"params",
")",
":",
"log",
"=",
"self",
".",
"_getparam",
"(",
"'log'",
",",
"self",
".",
"_discard",
",",
"*",
"*",
"params",
")",
"# Find all the modules that no longer need watching",
"#",
"rebuild",
"=",
"False",
"wparams",
"=",
"params",
".",
"copy",
"(",
")",
"wparams",
"[",
"'commit'",
"]",
"=",
"False",
"for",
"path",
"in",
"list",
"(",
"self",
".",
"_watch",
".",
"paths_open",
")",
":",
"if",
"path",
"in",
"self",
".",
"modules",
":",
"continue",
"try",
":",
"self",
".",
"_watch",
".",
"remove",
"(",
"path",
",",
"*",
"*",
"wparams",
")",
"rebuild",
"=",
"True",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"warning",
"(",
"\"Remove of watched module %r failed -- %s\"",
",",
"path",
",",
"e",
")",
"log",
".",
"debug",
"(",
"\"Removed watch for path %r\"",
",",
"path",
")",
"# Find all the modules that are new and should be watched",
"#",
"for",
"path",
"in",
"list",
"(",
"self",
".",
"modules",
")",
":",
"if",
"path",
"not",
"in",
"self",
".",
"_watch",
".",
"paths_open",
":",
"try",
":",
"self",
".",
"_watch",
".",
"add",
"(",
"path",
",",
"*",
"*",
"wparams",
")",
"rebuild",
"=",
"True",
"except",
"Exception",
"as",
"e",
":",
"log",
".",
"error",
"(",
"\"watch failed on module %r -- %s\"",
",",
"path",
",",
"e",
")",
"continue",
"if",
"rebuild",
":",
"self",
".",
"_watch",
".",
"commit",
"(",
"*",
"*",
"params",
")"
] |
Gets the Index Resources API client . | def index_resources ( self ) : if not self . __index_resources : self . __index_resources = IndexResources ( self . __connection ) return self . __index_resources | 251,624 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1023-L1032 | [
"def",
"delete",
"(",
"self",
",",
"msg",
",",
"claim_id",
"=",
"None",
")",
":",
"msg_id",
"=",
"utils",
".",
"get_id",
"(",
"msg",
")",
"if",
"claim_id",
":",
"uri",
"=",
"\"/%s/%s?claim_id=%s\"",
"%",
"(",
"self",
".",
"uri_base",
",",
"msg_id",
",",
"claim_id",
")",
"else",
":",
"uri",
"=",
"\"/%s/%s\"",
"%",
"(",
"self",
".",
"uri_base",
",",
"msg_id",
")",
"return",
"self",
".",
"_delete",
"(",
"uri",
")"
] |
Gets the Alerts API client . | def alerts ( self ) : if not self . __alerts : self . __alerts = Alerts ( self . __connection ) return self . __alerts | 251,625 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1035-L1044 | [
"def",
"parse_from_file",
"(",
"filename",
",",
"nodata",
"=",
"False",
")",
":",
"header",
"=",
"None",
"with",
"open",
"(",
"filename",
",",
"\"rb\"",
")",
"as",
"file",
":",
"header",
"=",
"read_machine_header",
"(",
"file",
")",
"meta_raw",
"=",
"file",
".",
"read",
"(",
"header",
"[",
"'meta_len'",
"]",
")",
"meta",
"=",
"__parse_meta",
"(",
"meta_raw",
",",
"header",
")",
"data",
"=",
"b''",
"if",
"not",
"nodata",
":",
"data",
"=",
"__decompress",
"(",
"meta",
",",
"file",
".",
"read",
"(",
"header",
"[",
"'data_len'",
"]",
")",
")",
"return",
"header",
",",
"meta",
",",
"data"
] |
Gets the Events API client . | def events ( self ) : if not self . __events : self . __events = Events ( self . __connection ) return self . __events | 251,626 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1047-L1056 | [
"def",
"saveVarsInMat",
"(",
"filename",
",",
"varNamesStr",
",",
"outOf",
"=",
"None",
",",
"*",
"*",
"opts",
")",
":",
"from",
"mlabwrap",
"import",
"mlab",
"filename",
",",
"varnames",
",",
"outOf",
"=",
"__saveVarsHelper",
"(",
"filename",
",",
"varNamesStr",
",",
"outOf",
",",
"'.mat'",
",",
"*",
"*",
"opts",
")",
"try",
":",
"for",
"varname",
"in",
"varnames",
":",
"mlab",
".",
"_set",
"(",
"varname",
",",
"outOf",
"[",
"varname",
"]",
")",
"mlab",
".",
"_do",
"(",
"\"save('%s','%s')\"",
"%",
"(",
"filename",
",",
"\"', '\"",
".",
"join",
"(",
"varnames",
")",
")",
",",
"nout",
"=",
"0",
")",
"finally",
":",
"assert",
"varnames",
"mlab",
".",
"_do",
"(",
"\"clear('%s')\"",
"%",
"\"', '\"",
".",
"join",
"(",
"varnames",
")",
",",
"nout",
"=",
"0",
")"
] |
Gets the Os Deployment Servers API client . | def os_deployment_servers ( self ) : if not self . __os_deployment_servers : self . __os_deployment_servers = OsDeploymentServers ( self . __connection ) return self . __os_deployment_servers | 251,627 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1069-L1078 | [
"def",
"_get_query",
"(",
"self",
",",
"callback",
",",
"schema",
",",
"query",
"=",
"None",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"query",
":",
"query",
"=",
"Query",
"(",
")",
"ret",
"=",
"None",
"with",
"self",
".",
"connection",
"(",
"*",
"*",
"kwargs",
")",
"as",
"connection",
":",
"kwargs",
"[",
"'connection'",
"]",
"=",
"connection",
"try",
":",
"if",
"connection",
".",
"in_transaction",
"(",
")",
":",
"# we wrap SELECT queries in a transaction if we are in a transaction because",
"# it could cause data loss if it failed by causing the db to discard",
"# anything in the current transaction if the query isn't wrapped,",
"# go ahead, ask me how I know this",
"with",
"self",
".",
"transaction",
"(",
"*",
"*",
"kwargs",
")",
":",
"ret",
"=",
"callback",
"(",
"schema",
",",
"query",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"ret",
"=",
"callback",
"(",
"schema",
",",
"query",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"except",
"Exception",
"as",
"e",
":",
"exc_info",
"=",
"sys",
".",
"exc_info",
"(",
")",
"if",
"self",
".",
"handle_error",
"(",
"schema",
",",
"e",
",",
"*",
"*",
"kwargs",
")",
":",
"ret",
"=",
"callback",
"(",
"schema",
",",
"query",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"else",
":",
"self",
".",
"raise_error",
"(",
"e",
",",
"exc_info",
")",
"return",
"ret"
] |
Gets the Certificate RabbitMQ API client . | def certificate_rabbitmq ( self ) : if not self . __certificate_rabbitmq : self . __certificate_rabbitmq = CertificateRabbitMQ ( self . __connection ) return self . __certificate_rabbitmq | 251,628 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1081-L1090 | [
"def",
"_subtractSky",
"(",
"image",
",",
"skyValue",
",",
"memmap",
"=",
"False",
")",
":",
"try",
":",
"np",
".",
"subtract",
"(",
"image",
".",
"data",
",",
"skyValue",
",",
"image",
".",
"data",
")",
"except",
"IOError",
":",
"print",
"(",
"\"Unable to perform sky subtraction on data array\"",
")",
"raise",
"IOError"
] |
Gets the Users API client . | def users ( self ) : if not self . __users : self . __users = Users ( self . __connection ) return self . __users | 251,629 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1093-L1102 | [
"def",
"_return_container_objects",
"(",
"self",
")",
":",
"container_objects",
"=",
"self",
".",
"job_args",
".",
"get",
"(",
"'object'",
")",
"if",
"container_objects",
":",
"return",
"True",
",",
"[",
"{",
"'container_object'",
":",
"i",
"}",
"for",
"i",
"in",
"container_objects",
"]",
"container_objects",
"=",
"self",
".",
"job_args",
".",
"get",
"(",
"'objects_file'",
")",
"if",
"container_objects",
":",
"container_objects",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"container_objects",
")",
"if",
"os",
".",
"path",
".",
"isfile",
"(",
"container_objects",
")",
":",
"with",
"open",
"(",
"container_objects",
")",
"as",
"f",
":",
"return",
"True",
",",
"[",
"{",
"'container_object'",
":",
"i",
".",
"rstrip",
"(",
"'\\n'",
")",
"}",
"for",
"i",
"in",
"f",
".",
"readlines",
"(",
")",
"]",
"container_objects",
"=",
"self",
".",
"_list_contents",
"(",
")",
"pattern_match",
"=",
"self",
".",
"job_args",
".",
"get",
"(",
"'pattern_match'",
")",
"if",
"pattern_match",
":",
"container_objects",
"=",
"self",
".",
"match_filter",
"(",
"idx_list",
"=",
"container_objects",
",",
"pattern",
"=",
"pattern_match",
",",
"dict_type",
"=",
"True",
",",
"dict_key",
"=",
"'name'",
")",
"# Reformat list for processing",
"if",
"container_objects",
"and",
"isinstance",
"(",
"container_objects",
"[",
"0",
"]",
",",
"dict",
")",
":",
"return",
"False",
",",
"self",
".",
"_return_deque",
"(",
"[",
"{",
"'container_object'",
":",
"i",
"[",
"'name'",
"]",
"}",
"for",
"i",
"in",
"container_objects",
"]",
")",
"else",
":",
"return",
"False",
",",
"self",
".",
"_return_deque",
"(",
")"
] |
Gets the ApplianceDeviceReadCommunity API client . | def appliance_device_read_community ( self ) : if not self . __appliance_device_read_community : self . __appliance_device_read_community = ApplianceDeviceReadCommunity ( self . __connection ) return self . __appliance_device_read_community | 251,630 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1105-L1114 | [
"def",
"refresh",
"(",
"self",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"result",
"=",
"self",
".",
"fetch",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"self",
".",
"store",
"(",
"self",
".",
"key",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
",",
"self",
".",
"expiry",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
",",
"result",
")",
"return",
"result"
] |
Gets the ApplianceDeviceSNMPv1TrapDestinations API client . | def appliance_device_snmp_v1_trap_destinations ( self ) : if not self . __appliance_device_snmp_v1_trap_destinations : self . __appliance_device_snmp_v1_trap_destinations = ApplianceDeviceSNMPv1TrapDestinations ( self . __connection ) return self . __appliance_device_snmp_v1_trap_destinations | 251,631 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1117-L1126 | [
"def",
"progress",
"(",
"self",
",",
"loaded",
",",
"total",
",",
"msg",
"=",
"''",
")",
":",
"self",
".",
"fire",
"(",
"'progress'",
",",
"{",
"'loaded'",
":",
"loaded",
",",
"'total'",
":",
"total",
",",
"'msg'",
":",
"msg",
"}",
")"
] |
Gets the ApplianceDeviceSNMPv3TrapDestinations API client . | def appliance_device_snmp_v3_trap_destinations ( self ) : if not self . __appliance_device_snmp_v3_trap_destinations : self . __appliance_device_snmp_v3_trap_destinations = ApplianceDeviceSNMPv3TrapDestinations ( self . __connection ) return self . __appliance_device_snmp_v3_trap_destinations | 251,632 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1129-L1138 | [
"def",
"progress",
"(",
"self",
",",
"loaded",
",",
"total",
",",
"msg",
"=",
"''",
")",
":",
"self",
".",
"fire",
"(",
"'progress'",
",",
"{",
"'loaded'",
":",
"loaded",
",",
"'total'",
":",
"total",
",",
"'msg'",
":",
"msg",
"}",
")"
] |
Gets the ApplianceDeviceSNMPv3Users API client . | def appliance_device_snmp_v3_users ( self ) : if not self . __appliance_device_snmp_v3_users : self . __appliance_device_snmp_v3_users = ApplianceDeviceSNMPv3Users ( self . __connection ) return self . __appliance_device_snmp_v3_users | 251,633 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1141-L1150 | [
"async",
"def",
"stop",
"(",
"self",
")",
":",
"for",
"task",
"in",
"self",
".",
"__tracks",
".",
"values",
"(",
")",
":",
"if",
"task",
"is",
"not",
"None",
":",
"task",
".",
"cancel",
"(",
")",
"self",
".",
"__tracks",
"=",
"{",
"}"
] |
Gets the ApplianceNodeInformation API client . | def appliance_node_information ( self ) : if not self . __appliance_node_information : self . __appliance_node_information = ApplianceNodeInformation ( self . __connection ) return self . __appliance_node_information | 251,634 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1153-L1162 | [
"def",
"decompress_messages",
"(",
"self",
",",
"offmsgs",
")",
":",
"for",
"offmsg",
"in",
"offmsgs",
":",
"yield",
"offmsg",
".",
"message",
".",
"key",
",",
"self",
".",
"decompress_fun",
"(",
"offmsg",
".",
"message",
".",
"value",
")"
] |
Gets the ApplianceTimeAndLocaleConfiguration API client . | def appliance_time_and_locale_configuration ( self ) : if not self . __appliance_time_and_locale_configuration : self . __appliance_time_and_locale_configuration = ApplianceTimeAndLocaleConfiguration ( self . __connection ) return self . __appliance_time_and_locale_configuration | 251,635 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1165-L1174 | [
"def",
"extract_string_pairs_in_directory",
"(",
"directory_path",
",",
"extract_func",
",",
"filter_func",
")",
":",
"result",
"=",
"{",
"}",
"for",
"root",
",",
"dirnames",
",",
"filenames",
"in",
"os",
".",
"walk",
"(",
"directory_path",
")",
":",
"for",
"file_name",
"in",
"filenames",
":",
"if",
"filter_func",
"(",
"file_name",
")",
":",
"file_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"root",
",",
"file_name",
")",
"try",
":",
"extract_func",
"(",
"result",
",",
"file_path",
")",
"except",
"Exception",
"as",
"e",
":",
"print",
"\"Error in file \"",
"+",
"file_name",
"print",
"e",
"return",
"result"
] |
Gets the Version API client . | def versions ( self ) : if not self . __versions : self . __versions = Versions ( self . __connection ) return self . __versions | 251,636 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1177-L1186 | [
"def",
"load",
"(",
"self",
",",
"filename",
",",
"offset",
")",
":",
"self",
".",
"offset",
"=",
"offset",
"self",
".",
"filename",
"=",
"filename",
"self",
".",
"bootsector",
"=",
"BootSector",
"(",
"filename",
"=",
"filename",
",",
"length",
"=",
"NTFS_BOOTSECTOR_SIZE",
",",
"offset",
"=",
"self",
".",
"offset",
")",
"self",
".",
"mft_table",
"=",
"MftTable",
"(",
"mft_entry_size",
"=",
"self",
".",
"bootsector",
".",
"mft_record_size",
",",
"filename",
"=",
"self",
".",
"filename",
",",
"offset",
"=",
"self",
".",
"mft_table_offset",
")",
"self",
".",
"mft_table",
".",
"preload_entries",
"(",
"NUM_SYSTEM_ENTRIES",
")",
"self",
".",
"_load_volume_information",
"(",
")"
] |
Gets the Backup API client . | def backups ( self ) : if not self . __backups : self . __backups = Backups ( self . __connection ) return self . __backups | 251,637 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1189-L1198 | [
"def",
"element_value_should_contain",
"(",
"self",
",",
"locator",
",",
"expected",
")",
":",
"self",
".",
"_info",
"(",
"\"Verifying element '%s' value contains '%s'\"",
"%",
"(",
"locator",
",",
"expected",
")",
")",
"element",
"=",
"self",
".",
"_element_find",
"(",
"locator",
",",
"True",
",",
"True",
")",
"value",
"=",
"str",
"(",
"element",
".",
"get_attribute",
"(",
"'value'",
")",
")",
"if",
"expected",
"in",
"value",
":",
"return",
"else",
":",
"raise",
"AssertionError",
"(",
"\"Value '%s' did not appear in element '%s'. It's value was '%s'\"",
"%",
"(",
"expected",
",",
"locator",
",",
"value",
")",
")"
] |
Gets the login details | def login_details ( self ) : if not self . __login_details : self . __login_details = LoginDetails ( self . __connection ) return self . __login_details | 251,638 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/oneview_client.py#L1201-L1210 | [
"def",
"_fuzzdb_get_strings",
"(",
"max_len",
"=",
"0",
")",
":",
"ignored",
"=",
"[",
"'integer-overflow'",
"]",
"for",
"subdir",
"in",
"pkg_resources",
".",
"resource_listdir",
"(",
"'protofuzz'",
",",
"BASE_PATH",
")",
":",
"if",
"subdir",
"in",
"ignored",
":",
"continue",
"path",
"=",
"'{}/{}'",
".",
"format",
"(",
"BASE_PATH",
",",
"subdir",
")",
"listing",
"=",
"pkg_resources",
".",
"resource_listdir",
"(",
"'protofuzz'",
",",
"path",
")",
"for",
"filename",
"in",
"listing",
":",
"if",
"not",
"filename",
".",
"endswith",
"(",
"'.txt'",
")",
":",
"continue",
"path",
"=",
"'{}/{}/{}'",
".",
"format",
"(",
"BASE_PATH",
",",
"subdir",
",",
"filename",
")",
"source",
"=",
"_open_fuzzdb_file",
"(",
"path",
")",
"for",
"line",
"in",
"source",
":",
"string",
"=",
"line",
".",
"decode",
"(",
"'utf-8'",
")",
".",
"strip",
"(",
")",
"if",
"not",
"string",
"or",
"string",
".",
"startswith",
"(",
"'#'",
")",
":",
"continue",
"if",
"max_len",
"!=",
"0",
"and",
"len",
"(",
"line",
")",
">",
"max_len",
":",
"continue",
"yield",
"string"
] |
Retrieves the list of Ethernet networks Fibre Channel networks and network sets that are available to a server profile template along with their respective ports . The scopeUris serverHardwareTypeUri and enclosureGroupUri parameters should be specified to get the available networks for a new server profile template . The serverHardwareTypeUri enclosureGroupUri and profileTemplateUri should be specified to get available networks for an existing server profile template . The scopeUris parameter is ignored when the profileTemplateUri is specified . | def get_available_networks ( self , * * kwargs ) : query_string = '&' . join ( '{}={}' . format ( key , value ) for key , value in kwargs . items ( ) if value ) uri = self . URI + "{}?{}" . format ( "/available-networks" , query_string ) return self . _helper . do_get ( uri ) | 251,639 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/servers/server_profile_templates.py#L181-L212 | [
"def",
"get_rng",
"(",
"obj",
"=",
"None",
")",
":",
"seed",
"=",
"(",
"id",
"(",
"obj",
")",
"+",
"os",
".",
"getpid",
"(",
")",
"+",
"int",
"(",
"datetime",
".",
"now",
"(",
")",
".",
"strftime",
"(",
"\"%Y%m%d%H%M%S%f\"",
")",
")",
")",
"%",
"4294967295",
"if",
"_RNG_SEED",
"is",
"not",
"None",
":",
"seed",
"=",
"_RNG_SEED",
"return",
"np",
".",
"random",
".",
"RandomState",
"(",
"seed",
")"
] |
Gets a paginated collection of logical downlinks without ethernet . The collection is based on optional sorting and filtering and is constrained by start and count parameters . | def get_all_without_ethernet ( self , start = 0 , count = - 1 , filter = '' , sort = '' ) : without_ethernet_client = ResourceClient ( self . _connection , "/rest/logical-downlinks/withoutEthernet" ) return without_ethernet_client . get_all ( start , count , filter = filter , sort = sort ) | 251,640 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/logical_downlinks.py#L99-L124 | [
"def",
"kill",
"(",
"self",
")",
":",
"BaseShellOperator",
".",
"_close_process_input_stdin",
"(",
"self",
".",
"_batcmd",
".",
"batch_to_file_s",
")",
"BaseShellOperator",
".",
"_wait_process",
"(",
"self",
".",
"_process",
",",
"self",
".",
"_batcmd",
".",
"sh_cmd",
",",
"self",
".",
"_success_exitcodes",
")",
"BaseShellOperator",
".",
"_rm_process_input_tmpfiles",
"(",
"self",
".",
"_batcmd",
".",
"batch_to_file_s",
")",
"self",
".",
"_process",
"=",
"None"
] |
Gets the logical downlink with the specified ID without ethernet . | def get_without_ethernet ( self , id_or_uri ) : uri = self . _client . build_uri ( id_or_uri ) + "/withoutEthernet" return self . _client . get ( uri ) | 251,641 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/logical_downlinks.py#L126-L137 | [
"def",
"RunOnce",
"(",
"self",
")",
":",
"# pylint: disable=unused-variable,g-import-not-at-top",
"from",
"grr_response_server",
".",
"gui",
"import",
"gui_plugins",
"# pylint: enable=unused-variable,g-import-not-at-top",
"if",
"config",
".",
"CONFIG",
".",
"Get",
"(",
"\"AdminUI.django_secret_key\"",
",",
"None",
")",
":",
"logging",
".",
"warning",
"(",
"\"The AdminUI.django_secret_key option has been deprecated, \"",
"\"please use AdminUI.csrf_secret_key instead.\"",
")"
] |
Installs firmware to the member interconnects of a SAS Logical Interconnect . | def update_firmware ( self , firmware_information , force = False ) : firmware_uri = "{}/firmware" . format ( self . data [ "uri" ] ) result = self . _helper . update ( firmware_information , firmware_uri , force = force ) self . refresh ( ) return result | 251,642 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/sas_logical_interconnects.py#L82-L97 | [
"def",
"_index_audio_cmu",
"(",
"self",
",",
"basename",
"=",
"None",
",",
"replace_already_indexed",
"=",
"False",
")",
":",
"self",
".",
"_prepare_audio",
"(",
"basename",
"=",
"basename",
",",
"replace_already_indexed",
"=",
"replace_already_indexed",
")",
"for",
"staging_audio_basename",
"in",
"self",
".",
"_list_audio_files",
"(",
"sub_dir",
"=",
"\"staging\"",
")",
":",
"original_audio_name",
"=",
"''",
".",
"join",
"(",
"staging_audio_basename",
".",
"split",
"(",
"'.'",
")",
"[",
":",
"-",
"1",
"]",
")",
"[",
":",
"-",
"3",
"]",
"pocketsphinx_command",
"=",
"''",
".",
"join",
"(",
"[",
"\"pocketsphinx_continuous\"",
",",
"\"-infile\"",
",",
"str",
"(",
"\"{}/staging/{}\"",
".",
"format",
"(",
"self",
".",
"src_dir",
",",
"staging_audio_basename",
")",
")",
",",
"\"-time\"",
",",
"\"yes\"",
",",
"\"-logfn\"",
",",
"\"/dev/null\"",
"]",
")",
"try",
":",
"if",
"self",
".",
"get_verbosity",
"(",
")",
":",
"print",
"(",
"\"Now indexing {}\"",
".",
"format",
"(",
"staging_audio_basename",
")",
")",
"output",
"=",
"subprocess",
".",
"check_output",
"(",
"[",
"\"pocketsphinx_continuous\"",
",",
"\"-infile\"",
",",
"str",
"(",
"\"{}/staging/{}\"",
".",
"format",
"(",
"self",
".",
"src_dir",
",",
"staging_audio_basename",
")",
")",
",",
"\"-time\"",
",",
"\"yes\"",
",",
"\"-logfn\"",
",",
"\"/dev/null\"",
"]",
",",
"universal_newlines",
"=",
"True",
")",
".",
"split",
"(",
"'\\n'",
")",
"str_timestamps_with_sil_conf",
"=",
"list",
"(",
"map",
"(",
"lambda",
"x",
":",
"x",
".",
"split",
"(",
"\" \"",
")",
",",
"filter",
"(",
"None",
",",
"output",
"[",
"1",
":",
"]",
")",
")",
")",
"# Timestamps are putted in a list of a single element. To match",
"# Watson's output.",
"self",
".",
"__timestamps_unregulated",
"[",
"original_audio_name",
"+",
"\".wav\"",
"]",
"=",
"[",
"(",
"self",
".",
"_timestamp_extractor_cmu",
"(",
"staging_audio_basename",
",",
"str_timestamps_with_sil_conf",
")",
")",
"]",
"if",
"self",
".",
"get_verbosity",
"(",
")",
":",
"print",
"(",
"\"Done indexing {}\"",
".",
"format",
"(",
"staging_audio_basename",
")",
")",
"except",
"OSError",
"as",
"e",
":",
"if",
"self",
".",
"get_verbosity",
"(",
")",
":",
"print",
"(",
"e",
",",
"\"The command was: {}\"",
".",
"format",
"(",
"pocketsphinx_command",
")",
")",
"self",
".",
"__errors",
"[",
"(",
"time",
"(",
")",
",",
"staging_audio_basename",
")",
"]",
"=",
"e",
"self",
".",
"_timestamp_regulator",
"(",
")",
"if",
"self",
".",
"get_verbosity",
"(",
")",
":",
"print",
"(",
"\"Finished indexing procedure\"",
")"
] |
Gets baseline firmware information for a SAS Logical Interconnect . | def get_firmware ( self ) : firmware_uri = "{}/firmware" . format ( self . data [ "uri" ] ) return self . _helper . do_get ( firmware_uri ) | 251,643 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/sas_logical_interconnects.py#L100-L108 | [
"def",
"automodsumm_to_autosummary_lines",
"(",
"fn",
",",
"app",
")",
":",
"fullfn",
"=",
"os",
".",
"path",
".",
"join",
"(",
"app",
".",
"builder",
".",
"env",
".",
"srcdir",
",",
"fn",
")",
"with",
"open",
"(",
"fullfn",
")",
"as",
"fr",
":",
"if",
"'astropy_helpers.sphinx.ext.automodapi'",
"in",
"app",
".",
"_extensions",
":",
"from",
"astropy_helpers",
".",
"sphinx",
".",
"ext",
".",
"automodapi",
"import",
"automodapi_replace",
"# Must do the automodapi on the source to get the automodsumm",
"# that might be in there",
"docname",
"=",
"os",
".",
"path",
".",
"splitext",
"(",
"fn",
")",
"[",
"0",
"]",
"filestr",
"=",
"automodapi_replace",
"(",
"fr",
".",
"read",
"(",
")",
",",
"app",
",",
"True",
",",
"docname",
",",
"False",
")",
"else",
":",
"filestr",
"=",
"fr",
".",
"read",
"(",
")",
"spl",
"=",
"_automodsummrex",
".",
"split",
"(",
"filestr",
")",
"#0th entry is the stuff before the first automodsumm line",
"indent1s",
"=",
"spl",
"[",
"1",
":",
":",
"5",
"]",
"mods",
"=",
"spl",
"[",
"2",
":",
":",
"5",
"]",
"opssecs",
"=",
"spl",
"[",
"3",
":",
":",
"5",
"]",
"indent2s",
"=",
"spl",
"[",
"4",
":",
":",
"5",
"]",
"remainders",
"=",
"spl",
"[",
"5",
":",
":",
"5",
"]",
"# only grab automodsumm sections and convert them to autosummary with the",
"# entries for all the public objects",
"newlines",
"=",
"[",
"]",
"#loop over all automodsumms in this document",
"for",
"i",
",",
"(",
"i1",
",",
"i2",
",",
"modnm",
",",
"ops",
",",
"rem",
")",
"in",
"enumerate",
"(",
"zip",
"(",
"indent1s",
",",
"indent2s",
",",
"mods",
",",
"opssecs",
",",
"remainders",
")",
")",
":",
"allindent",
"=",
"i1",
"+",
"(",
"''",
"if",
"i2",
"is",
"None",
"else",
"i2",
")",
"#filter out functions-only and classes-only options if present",
"oplines",
"=",
"ops",
".",
"split",
"(",
"'\\n'",
")",
"toskip",
"=",
"[",
"]",
"allowedpkgnms",
"=",
"[",
"]",
"funcsonly",
"=",
"clssonly",
"=",
"False",
"for",
"i",
",",
"ln",
"in",
"reversed",
"(",
"list",
"(",
"enumerate",
"(",
"oplines",
")",
")",
")",
":",
"if",
"':functions-only:'",
"in",
"ln",
":",
"funcsonly",
"=",
"True",
"del",
"oplines",
"[",
"i",
"]",
"if",
"':classes-only:'",
"in",
"ln",
":",
"clssonly",
"=",
"True",
"del",
"oplines",
"[",
"i",
"]",
"if",
"':skip:'",
"in",
"ln",
":",
"toskip",
".",
"extend",
"(",
"_str_list_converter",
"(",
"ln",
".",
"replace",
"(",
"':skip:'",
",",
"''",
")",
")",
")",
"del",
"oplines",
"[",
"i",
"]",
"if",
"':allowed-package-names:'",
"in",
"ln",
":",
"allowedpkgnms",
".",
"extend",
"(",
"_str_list_converter",
"(",
"ln",
".",
"replace",
"(",
"':allowed-package-names:'",
",",
"''",
")",
")",
")",
"del",
"oplines",
"[",
"i",
"]",
"if",
"funcsonly",
"and",
"clssonly",
":",
"msg",
"=",
"(",
"'Defined both functions-only and classes-only options. '",
"'Skipping this directive.'",
")",
"lnnum",
"=",
"sum",
"(",
"[",
"spl",
"[",
"j",
"]",
".",
"count",
"(",
"'\\n'",
")",
"for",
"j",
"in",
"range",
"(",
"i",
"*",
"5",
"+",
"1",
")",
"]",
")",
"app",
".",
"warn",
"(",
"'[automodsumm]'",
"+",
"msg",
",",
"(",
"fn",
",",
"lnnum",
")",
")",
"continue",
"# Use the currentmodule directive so we can just put the local names",
"# in the autosummary table. Note that this doesn't always seem to",
"# actually \"take\" in Sphinx's eyes, so in `Automodsumm.run`, we have to",
"# force it internally, as well.",
"newlines",
".",
"extend",
"(",
"[",
"i1",
"+",
"'.. currentmodule:: '",
"+",
"modnm",
",",
"''",
",",
"'.. autosummary::'",
"]",
")",
"newlines",
".",
"extend",
"(",
"oplines",
")",
"ols",
"=",
"True",
"if",
"len",
"(",
"allowedpkgnms",
")",
"==",
"0",
"else",
"allowedpkgnms",
"for",
"nm",
",",
"fqn",
",",
"obj",
"in",
"zip",
"(",
"*",
"find_mod_objs",
"(",
"modnm",
",",
"onlylocals",
"=",
"ols",
")",
")",
":",
"if",
"nm",
"in",
"toskip",
":",
"continue",
"if",
"funcsonly",
"and",
"not",
"inspect",
".",
"isroutine",
"(",
"obj",
")",
":",
"continue",
"if",
"clssonly",
"and",
"not",
"inspect",
".",
"isclass",
"(",
"obj",
")",
":",
"continue",
"newlines",
".",
"append",
"(",
"allindent",
"+",
"nm",
")",
"# add one newline at the end of the autosummary block",
"newlines",
".",
"append",
"(",
"''",
")",
"return",
"newlines"
] |
Returns SAS Logical Interconnects to a consistent state . The current SAS Logical Interconnect state is compared to the associated SAS Logical Interconnect group . | def update_compliance_all ( self , information , timeout = - 1 ) : uri = self . URI + "/compliance" result = self . _helper . update ( information , uri , timeout = timeout ) return result | 251,644 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/sas_logical_interconnects.py#L110-L127 | [
"def",
"get_info",
"(",
"self",
")",
"->",
"dict",
":",
"with",
"suppress_stdout",
"(",
")",
":",
"with",
"youtube_dl",
".",
"YoutubeDL",
"(",
")",
"as",
"ydl",
":",
"info_dict",
"=",
"ydl",
".",
"extract_info",
"(",
"self",
".",
"url",
",",
"download",
"=",
"False",
")",
"return",
"info_dict"
] |
When a drive enclosure has been physically replaced initiate the replacement operation that enables the new drive enclosure to take over as a replacement for the prior drive enclosure . The request requires specification of both the serial numbers of the original drive enclosure and its replacement to be provided . | def replace_drive_enclosure ( self , information ) : uri = "{}/replaceDriveEnclosure" . format ( self . data [ "uri" ] ) result = self . _helper . create ( information , uri ) self . refresh ( ) return result | 251,645 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/sas_logical_interconnects.py#L149-L166 | [
"def",
"add_signal_handler",
"(",
")",
":",
"import",
"signal",
"def",
"handler",
"(",
"sig",
",",
"frame",
")",
":",
"if",
"sig",
"==",
"signal",
".",
"SIGINT",
":",
"librtmp",
".",
"RTMP_UserInterrupt",
"(",
")",
"raise",
"KeyboardInterrupt",
"signal",
".",
"signal",
"(",
"signal",
".",
"SIGINT",
",",
"handler",
")"
] |
Asynchronously applies or re - applies the SAS Logical Interconnect configuration to all managed interconnects of a SAS Logical Interconnect . | def update_configuration ( self ) : uri = "{}/configuration" . format ( self . data [ "uri" ] ) result = self . _helper . update ( { } , uri ) self . refresh ( ) return result | 251,646 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/sas_logical_interconnects.py#L169-L181 | [
"def",
"get_rsa_key",
"(",
"self",
",",
"username",
")",
":",
"try",
":",
"resp",
"=",
"self",
".",
"session",
".",
"post",
"(",
"'https://steamcommunity.com/login/getrsakey/'",
",",
"timeout",
"=",
"15",
",",
"data",
"=",
"{",
"'username'",
":",
"username",
",",
"'donotchache'",
":",
"int",
"(",
"time",
"(",
")",
"*",
"1000",
")",
",",
"}",
",",
")",
".",
"json",
"(",
")",
"except",
"requests",
".",
"exceptions",
".",
"RequestException",
"as",
"e",
":",
"raise",
"HTTPError",
"(",
"str",
"(",
"e",
")",
")",
"return",
"resp"
] |
produce a hash with each key a nedit distance substitution for a set of strings . values of the hash is the set of strings the substitution could have come from | def mutationhash ( strings , nedit ) : maxlen = max ( [ len ( string ) for string in strings ] ) indexes = generate_idx ( maxlen , nedit ) muthash = defaultdict ( set ) for string in strings : muthash [ string ] . update ( [ string ] ) for x in substitution_set ( string , indexes ) : muthash [ x ] . update ( [ string ] ) return muthash | 251,647 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/barcodes.py#L150-L163 | [
"def",
"assert_interrupt_signal",
"(",
"library",
",",
"session",
",",
"mode",
",",
"status_id",
")",
":",
"return",
"library",
".",
"viAssertIntrSignal",
"(",
"session",
",",
"mode",
",",
"status_id",
")"
] |
for a string return a set of all possible substitutions | def substitution_set ( string , indexes ) : strlen = len ( string ) return { mutate_string ( string , x ) for x in indexes if valid_substitution ( strlen , x ) } | 251,648 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/barcodes.py#L165-L170 | [
"def",
"logline_timestamp_comparator",
"(",
"t1",
",",
"t2",
")",
":",
"dt1",
"=",
"_parse_logline_timestamp",
"(",
"t1",
")",
"dt2",
"=",
"_parse_logline_timestamp",
"(",
"t2",
")",
"for",
"u1",
",",
"u2",
"in",
"zip",
"(",
"dt1",
",",
"dt2",
")",
":",
"if",
"u1",
"<",
"u2",
":",
"return",
"-",
"1",
"elif",
"u1",
">",
"u2",
":",
"return",
"1",
"return",
"0"
] |
skip performing substitutions that are outside the bounds of the string | def valid_substitution ( strlen , index ) : values = index [ 0 ] return all ( [ strlen > i for i in values ] ) | 251,649 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/barcodes.py#L172-L177 | [
"def",
"calculate_priority",
"(",
"ratios",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"not",
"ratios",
":",
"ratios",
"=",
"PRIORITY_FEATURE_WEIGHTS",
"scores",
"=",
"[",
"DEFAULT_PRIORITY_SCORE",
"]",
"for",
"key",
",",
"value",
"in",
"kwargs",
".",
"items",
"(",
")",
":",
"if",
"key",
"not",
"in",
"PRIORITY_FEATURE_WEIGHTS",
":",
"raise",
"KeyError",
"(",
"'The following keyword arguments are supported: '",
"'{keys}'",
".",
"format",
"(",
"keys",
"=",
"PRIORITY_FEATURES",
")",
")",
"if",
"value",
"is",
"True",
":",
"scores",
".",
"append",
"(",
"PRIORITY_FEATURE_WEIGHTS",
"[",
"key",
"]",
")",
"return",
"float",
"(",
"sum",
"(",
"scores",
")",
")",
"/",
"len",
"(",
"scores",
")"
] |
returns True if sting consist of only A C G T | def acgt_match ( string ) : search = re . compile ( r'[^ACGT]' ) . search return not bool ( search ( string ) ) | 251,650 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/barcodes.py#L194-L199 | [
"def",
"merge_setting",
"(",
"request_setting",
",",
"session_setting",
",",
"dict_class",
"=",
"OrderedDict",
")",
":",
"if",
"session_setting",
"is",
"None",
":",
"return",
"request_setting",
"if",
"request_setting",
"is",
"None",
":",
"return",
"session_setting",
"# Bypass if not a dictionary (e.g. verify)",
"if",
"not",
"(",
"isinstance",
"(",
"session_setting",
",",
"Mapping",
")",
"and",
"isinstance",
"(",
"request_setting",
",",
"Mapping",
")",
")",
":",
"return",
"request_setting",
"merged_setting",
"=",
"dict_class",
"(",
"to_key_val_list",
"(",
"session_setting",
")",
")",
"merged_setting",
".",
"update",
"(",
"to_key_val_list",
"(",
"request_setting",
")",
")",
"# Remove keys that are set to None.",
"for",
"(",
"k",
",",
"v",
")",
"in",
"request_setting",
".",
"items",
"(",
")",
":",
"if",
"v",
"is",
"None",
":",
"del",
"merged_setting",
"[",
"k",
"]",
"merged_setting",
"=",
"dict",
"(",
"(",
"k",
",",
"v",
")",
"for",
"(",
"k",
",",
"v",
")",
"in",
"merged_setting",
".",
"items",
"(",
")",
"if",
"v",
"is",
"not",
"None",
")",
"return",
"merged_setting"
] |
Generator which gives all four lines if a fastq read as one string | def stream_fastq ( file_handler ) : next_element = '' for i , line in enumerate ( file_handler ) : next_element += line if i % 4 == 3 : yield next_element next_element = '' | 251,651 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L59-L67 | [
"def",
"numRegisteredForRole",
"(",
"self",
",",
"role",
",",
"includeTemporaryRegs",
"=",
"False",
")",
":",
"count",
"=",
"self",
".",
"eventregistration_set",
".",
"filter",
"(",
"cancelled",
"=",
"False",
",",
"dropIn",
"=",
"False",
",",
"role",
"=",
"role",
")",
".",
"count",
"(",
")",
"if",
"includeTemporaryRegs",
":",
"count",
"+=",
"self",
".",
"temporaryeventregistration_set",
".",
"filter",
"(",
"dropIn",
"=",
"False",
",",
"role",
"=",
"role",
")",
".",
"exclude",
"(",
"registration__expirationDate__lte",
"=",
"timezone",
".",
"now",
"(",
")",
")",
".",
"count",
"(",
")",
"return",
"count"
] |
return a stream of FASTQ entries handling gzipped and empty files | def read_fastq ( filename ) : if not filename : return itertools . cycle ( ( None , ) ) if filename == "-" : filename_fh = sys . stdin elif filename . endswith ( 'gz' ) : if is_python3 : filename_fh = gzip . open ( filename , mode = 'rt' ) else : filename_fh = BufferedReader ( gzip . open ( filename , mode = 'rt' ) ) else : filename_fh = open ( filename ) return stream_fastq ( filename_fh ) | 251,652 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L69-L84 | [
"def",
"plot_sphere",
"(",
"ax",
",",
"center",
",",
"radius",
",",
"color",
"=",
"'black'",
",",
"alpha",
"=",
"1.",
",",
"linspace_count",
"=",
"_LINSPACE_COUNT",
")",
":",
"u",
"=",
"np",
".",
"linspace",
"(",
"0",
",",
"2",
"*",
"np",
".",
"pi",
",",
"linspace_count",
")",
"v",
"=",
"np",
".",
"linspace",
"(",
"0",
",",
"np",
".",
"pi",
",",
"linspace_count",
")",
"sin_v",
"=",
"np",
".",
"sin",
"(",
"v",
")",
"x",
"=",
"center",
"[",
"0",
"]",
"+",
"radius",
"*",
"np",
".",
"outer",
"(",
"np",
".",
"cos",
"(",
"u",
")",
",",
"sin_v",
")",
"y",
"=",
"center",
"[",
"1",
"]",
"+",
"radius",
"*",
"np",
".",
"outer",
"(",
"np",
".",
"sin",
"(",
"u",
")",
",",
"sin_v",
")",
"z",
"=",
"center",
"[",
"2",
"]",
"+",
"radius",
"*",
"np",
".",
"outer",
"(",
"np",
".",
"ones_like",
"(",
"u",
")",
",",
"np",
".",
"cos",
"(",
"v",
")",
")",
"ax",
".",
"plot_surface",
"(",
"x",
",",
"y",
",",
"z",
",",
"linewidth",
"=",
"0.0",
",",
"color",
"=",
"color",
",",
"alpha",
"=",
"alpha",
")"
] |
return a handle for FASTQ writing handling gzipped files | def write_fastq ( filename ) : if filename : if filename . endswith ( 'gz' ) : filename_fh = gzip . open ( filename , mode = 'wb' ) else : filename_fh = open ( filename , mode = 'w' ) else : filename_fh = None return filename_fh | 251,653 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L96-L107 | [
"def",
"_intersection",
"(",
"self",
",",
"keys",
",",
"rows",
")",
":",
"# If there are no other keys with start and end date (i.e. nothing to merge) return immediately.",
"if",
"not",
"keys",
":",
"return",
"rows",
"ret",
"=",
"list",
"(",
")",
"for",
"row",
"in",
"rows",
":",
"start_date",
"=",
"row",
"[",
"self",
".",
"_key_start_date",
"]",
"end_date",
"=",
"row",
"[",
"self",
".",
"_key_end_date",
"]",
"for",
"key_start_date",
",",
"key_end_date",
"in",
"keys",
":",
"start_date",
",",
"end_date",
"=",
"Type2JoinHelper",
".",
"_intersect",
"(",
"start_date",
",",
"end_date",
",",
"row",
"[",
"key_start_date",
"]",
",",
"row",
"[",
"key_end_date",
"]",
")",
"if",
"not",
"start_date",
":",
"break",
"if",
"key_start_date",
"not",
"in",
"[",
"self",
".",
"_key_start_date",
",",
"self",
".",
"_key_end_date",
"]",
":",
"del",
"row",
"[",
"key_start_date",
"]",
"if",
"key_end_date",
"not",
"in",
"[",
"self",
".",
"_key_start_date",
",",
"self",
".",
"_key_end_date",
"]",
":",
"del",
"row",
"[",
"key_end_date",
"]",
"if",
"start_date",
":",
"row",
"[",
"self",
".",
"_key_start_date",
"]",
"=",
"start_date",
"row",
"[",
"self",
".",
"_key_end_date",
"]",
"=",
"end_date",
"ret",
".",
"append",
"(",
"row",
")",
"return",
"ret"
] |
detects the annotations present in a SAM file inspecting either the tags or the query names and returns a set of annotations present | def detect_alignment_annotations ( queryalignment , tags = False ) : annotations = set ( ) for k , v in BARCODEINFO . items ( ) : if tags : if queryalignment . has_tag ( v . bamtag ) : annotations . add ( k ) else : if v . readprefix in queryalignment . qname : annotations . add ( k ) return annotations | 251,654 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L122-L135 | [
"def",
"synchronize_simultaneous",
"(",
"self",
",",
"node_ip",
")",
":",
"for",
"candidate",
"in",
"self",
".",
"factory",
".",
"candidates",
"[",
"node_ip",
"]",
":",
"# Only if candidate is connected.\r",
"if",
"not",
"candidate",
"[",
"\"con\"",
"]",
".",
"connected",
":",
"continue",
"# Synchronise simultaneous node.\r",
"if",
"candidate",
"[",
"\"time\"",
"]",
"-",
"self",
".",
"factory",
".",
"nodes",
"[",
"\"simultaneous\"",
"]",
"[",
"node_ip",
"]",
"[",
"\"time\"",
"]",
">",
"self",
".",
"challege_timeout",
":",
"msg",
"=",
"\"RECONNECT\"",
"self",
".",
"factory",
".",
"nodes",
"[",
"\"simultaneous\"",
"]",
"[",
"node_ip",
"]",
"[",
"\"con\"",
"]",
".",
"send_line",
"(",
"msg",
")",
"return",
"self",
".",
"cleanup_candidates",
"(",
"node_ip",
")",
"self",
".",
"propogate_candidates",
"(",
"node_ip",
")"
] |
detects annotations preesent in a FASTQ file by examining the first read | def detect_fastq_annotations ( fastq_file ) : annotations = set ( ) queryread = tz . first ( read_fastq ( fastq_file ) ) for k , v in BARCODEINFO . items ( ) : if v . readprefix in queryread : annotations . add ( k ) return annotations | 251,655 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L137-L146 | [
"def",
"user_deleted_from_site_event",
"(",
"event",
")",
":",
"userid",
"=",
"event",
".",
"principal",
"catalog",
"=",
"api",
".",
"portal",
".",
"get_tool",
"(",
"'portal_catalog'",
")",
"query",
"=",
"{",
"'object_provides'",
":",
"WORKSPACE_INTERFACE",
"}",
"query",
"[",
"'workspace_members'",
"]",
"=",
"userid",
"workspaces",
"=",
"[",
"IWorkspace",
"(",
"b",
".",
"_unrestrictedGetObject",
"(",
")",
")",
"for",
"b",
"in",
"catalog",
".",
"unrestrictedSearchResults",
"(",
"query",
")",
"]",
"for",
"workspace",
"in",
"workspaces",
":",
"workspace",
".",
"remove_from_team",
"(",
"userid",
")"
] |
construct a regex that matches possible fields in a transformed file annotations is a set of which keys in BARCODEINFO are present in the file | def construct_transformed_regex ( annotations ) : re_string = '.*' if "cellular" in annotations : re_string += ":CELL_(?P<CB>.*)" if "molecular" in annotations : re_string += ":UMI_(?P<MB>\w*)" if "sample" in annotations : re_string += ":SAMPLE_(?P<SB>\w*)" if re_string == ".*" : logger . error ( "No annotation present on this file, aborting." ) sys . exit ( 1 ) return re_string | 251,656 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L148-L163 | [
"def",
"offer_random",
"(",
"pool",
",",
"answer",
",",
"rationale",
",",
"student_id",
",",
"options",
")",
":",
"offer_simple",
"(",
"pool",
",",
"answer",
",",
"rationale",
",",
"student_id",
",",
"options",
")"
] |
figure out what transform options should be by examining the provided regexes for keywords | def _infer_transform_options ( transform ) : TransformOptions = collections . namedtuple ( "TransformOptions" , [ 'CB' , 'dual_index' , 'triple_index' , 'MB' , 'SB' ] ) CB = False SB = False MB = False dual_index = False triple_index = False for rx in transform . values ( ) : if not rx : continue if "CB1" in rx : if "CB3" in rx : triple_index = True else : dual_index = True if "SB" in rx : SB = True if "CB" in rx : CB = True if "MB" in rx : MB = True return TransformOptions ( CB = CB , dual_index = dual_index , triple_index = triple_index , MB = MB , SB = SB ) | 251,657 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L306-L332 | [
"def",
"generation",
"(",
"self",
")",
":",
"with",
"self",
".",
"_lock",
":",
"if",
"self",
".",
"state",
"is",
"not",
"MemberState",
".",
"STABLE",
":",
"return",
"None",
"return",
"self",
".",
"_generation"
] |
Extract read numbers from old - style fastqs . | def _extract_readnum ( read_dict ) : pat = re . compile ( r"(?P<readnum>/\d+)$" ) parts = pat . split ( read_dict [ "name" ] ) if len ( parts ) == 3 : name , readnum , endofline = parts read_dict [ "name" ] = name read_dict [ "readnum" ] = readnum else : read_dict [ "readnum" ] = "" return read_dict | 251,658 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L334-L348 | [
"def",
"is_registration_possible",
"(",
"self",
",",
"user_info",
")",
":",
"return",
"self",
".",
"get_accessibility",
"(",
")",
".",
"is_open",
"(",
")",
"and",
"self",
".",
"_registration",
".",
"is_open",
"(",
")",
"and",
"self",
".",
"is_user_accepted_by_access_control",
"(",
"user_info",
")"
] |
Convert a CSV file to a sparse matrix with rows and column names saved as companion files . | def sparse ( csv , sparse ) : import pandas as pd df = pd . read_csv ( csv , index_col = 0 , header = 0 ) pd . Series ( df . index ) . to_csv ( sparse + ".rownames" , index = False ) pd . Series ( df . columns . values ) . to_csv ( sparse + ".colnames" , index = False ) with open ( sparse , "w+b" ) as out_handle : scipy . io . mmwrite ( out_handle , scipy . sparse . csr_matrix ( df ) ) | 251,659 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L943-L952 | [
"def",
"_visit_te_shape",
"(",
"self",
",",
"shape",
":",
"ShExJ",
".",
"shapeExpr",
",",
"visit_center",
":",
"_VisitorCenter",
")",
"->",
"None",
":",
"if",
"isinstance",
"(",
"shape",
",",
"ShExJ",
".",
"Shape",
")",
"and",
"shape",
".",
"expression",
"is",
"not",
"None",
":",
"visit_center",
".",
"f",
"(",
"visit_center",
".",
"arg_cntxt",
",",
"shape",
".",
"expression",
",",
"self",
")"
] |
Counts the number of reads for each cellular barcode | def cb_histogram ( fastq , umi_histogram ) : annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) parser_re = re . compile ( re_string ) cb_counter = collections . Counter ( ) umi_counter = collections . Counter ( ) for read in read_fastq ( fastq ) : match = parser_re . search ( read ) . groupdict ( ) cb = match [ 'CB' ] cb_counter [ cb ] += 1 if umi_histogram : umi = match [ 'MB' ] umi_counter [ ( cb , umi ) ] += 1 for bc , count in cb_counter . most_common ( ) : sys . stdout . write ( '{}\t{}\n' . format ( bc , count ) ) if umi_histogram : with open ( umi_histogram , "w" ) as umi_handle : for cbumi , count in umi_counter . most_common ( ) : umi_handle . write ( '{}\t{}\t{}\n' . format ( cbumi [ 0 ] , cbumi [ 1 ] , count ) ) | 251,660 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L959-L985 | [
"def",
"state",
"(",
"self",
",",
"time",
"=",
"None",
")",
":",
"if",
"time",
"is",
"None",
":",
"return",
"max",
"(",
"self",
".",
"infos",
"(",
"type",
"=",
"State",
")",
",",
"key",
"=",
"attrgetter",
"(",
"'creation_time'",
")",
")",
"else",
":",
"states",
"=",
"[",
"s",
"for",
"s",
"in",
"self",
".",
"infos",
"(",
"type",
"=",
"State",
")",
"if",
"s",
".",
"creation_time",
"<",
"time",
"]",
"return",
"max",
"(",
"states",
",",
"key",
"=",
"attrgetter",
"(",
"'creation_time'",
")",
")"
] |
Counts the number of reads for each UMI | def umi_histogram ( fastq ) : annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) parser_re = re . compile ( re_string ) counter = collections . Counter ( ) for read in read_fastq ( fastq ) : match = parser_re . search ( read ) . groupdict ( ) counter [ match [ 'MB' ] ] += 1 for bc , count in counter . most_common ( ) : sys . stdout . write ( '{}\t{}\n' . format ( bc , count ) ) | 251,661 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L989-L1004 | [
"def",
"_create_download_failed_message",
"(",
"exception",
",",
"url",
")",
":",
"message",
"=",
"'Failed to download from:\\n{}\\nwith {}:\\n{}'",
".",
"format",
"(",
"url",
",",
"exception",
".",
"__class__",
".",
"__name__",
",",
"exception",
")",
"if",
"_is_temporal_problem",
"(",
"exception",
")",
":",
"if",
"isinstance",
"(",
"exception",
",",
"requests",
".",
"ConnectionError",
")",
":",
"message",
"+=",
"'\\nPlease check your internet connection and try again.'",
"else",
":",
"message",
"+=",
"'\\nThere might be a problem in connection or the server failed to process '",
"'your request. Please try again.'",
"elif",
"isinstance",
"(",
"exception",
",",
"requests",
".",
"HTTPError",
")",
":",
"try",
":",
"server_message",
"=",
"''",
"for",
"elem",
"in",
"decode_data",
"(",
"exception",
".",
"response",
".",
"content",
",",
"MimeType",
".",
"XML",
")",
":",
"if",
"'ServiceException'",
"in",
"elem",
".",
"tag",
"or",
"'Message'",
"in",
"elem",
".",
"tag",
":",
"server_message",
"+=",
"elem",
".",
"text",
".",
"strip",
"(",
"'\\n\\t '",
")",
"except",
"ElementTree",
".",
"ParseError",
":",
"server_message",
"=",
"exception",
".",
"response",
".",
"text",
"message",
"+=",
"'\\nServer response: \"{}\"'",
".",
"format",
"(",
"server_message",
")",
"return",
"message"
] |
Returns a set of barcodes with a minimum number of reads | def get_cb_depth_set ( cb_histogram , cb_cutoff ) : cb_keep_set = set ( ) if not cb_histogram : return cb_keep_set with read_cbhistogram ( cb_histogram ) as fh : cb_map = dict ( p . strip ( ) . split ( ) for p in fh ) cb_keep_set = set ( [ k for k , v in cb_map . items ( ) if int ( v ) > cb_cutoff ] ) logger . info ( 'Keeping %d out of %d cellular barcodes.' % ( len ( cb_keep_set ) , len ( cb_map ) ) ) return cb_keep_set | 251,662 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1006-L1018 | [
"def",
"_couple_nic",
"(",
"self",
",",
"userid",
",",
"vdev",
",",
"vswitch_name",
",",
"active",
"=",
"False",
")",
":",
"if",
"active",
":",
"self",
".",
"_is_active",
"(",
"userid",
")",
"msg",
"=",
"(",
"'Start to couple nic device %(vdev)s of guest %(vm)s '",
"'with vswitch %(vsw)s'",
"%",
"{",
"'vdev'",
":",
"vdev",
",",
"'vm'",
":",
"userid",
",",
"'vsw'",
":",
"vswitch_name",
"}",
")",
"LOG",
".",
"info",
"(",
"msg",
")",
"requestData",
"=",
"' '",
".",
"join",
"(",
"(",
"'SMAPI %s'",
"%",
"userid",
",",
"\"API Virtual_Network_Adapter_Connect_Vswitch_DM\"",
",",
"\"--operands\"",
",",
"\"-v %s\"",
"%",
"vdev",
",",
"\"-n %s\"",
"%",
"vswitch_name",
")",
")",
"try",
":",
"self",
".",
"_request",
"(",
"requestData",
")",
"except",
"exception",
".",
"SDKSMTRequestFailed",
"as",
"err",
":",
"LOG",
".",
"error",
"(",
"\"Failed to couple nic %s to vswitch %s for user %s \"",
"\"in the guest's user direct, error: %s\"",
"%",
"(",
"vdev",
",",
"vswitch_name",
",",
"userid",
",",
"err",
".",
"format_message",
"(",
")",
")",
")",
"self",
".",
"_couple_inactive_exception",
"(",
"err",
",",
"userid",
",",
"vdev",
",",
"vswitch_name",
")",
"# the inst must be active, or this call will failed",
"if",
"active",
":",
"requestData",
"=",
"' '",
".",
"join",
"(",
"(",
"'SMAPI %s'",
"%",
"userid",
",",
"'API Virtual_Network_Adapter_Connect_Vswitch'",
",",
"\"--operands\"",
",",
"\"-v %s\"",
"%",
"vdev",
",",
"\"-n %s\"",
"%",
"vswitch_name",
")",
")",
"try",
":",
"self",
".",
"_request",
"(",
"requestData",
")",
"except",
"(",
"exception",
".",
"SDKSMTRequestFailed",
",",
"exception",
".",
"SDKInternalError",
")",
"as",
"err1",
":",
"results1",
"=",
"err1",
".",
"results",
"msg1",
"=",
"err1",
".",
"format_message",
"(",
")",
"if",
"(",
"(",
"results1",
"is",
"not",
"None",
")",
"and",
"(",
"results1",
"[",
"'rc'",
"]",
"==",
"204",
")",
"and",
"(",
"results1",
"[",
"'rs'",
"]",
"==",
"20",
")",
")",
":",
"LOG",
".",
"warning",
"(",
"\"Virtual device %s already connected \"",
"\"on the active guest system\"",
",",
"vdev",
")",
"else",
":",
"persist_OK",
"=",
"True",
"requestData",
"=",
"' '",
".",
"join",
"(",
"(",
"'SMAPI %s'",
"%",
"userid",
",",
"'API Virtual_Network_Adapter_Disconnect_DM'",
",",
"\"--operands\"",
",",
"'-v %s'",
"%",
"vdev",
")",
")",
"try",
":",
"self",
".",
"_request",
"(",
"requestData",
")",
"except",
"(",
"exception",
".",
"SDKSMTRequestFailed",
",",
"exception",
".",
"SDKInternalError",
")",
"as",
"err2",
":",
"results2",
"=",
"err2",
".",
"results",
"msg2",
"=",
"err2",
".",
"format_message",
"(",
")",
"if",
"(",
"(",
"results2",
"is",
"not",
"None",
")",
"and",
"(",
"results2",
"[",
"'rc'",
"]",
"==",
"212",
")",
"and",
"(",
"results2",
"[",
"'rs'",
"]",
"==",
"32",
")",
")",
":",
"persist_OK",
"=",
"True",
"else",
":",
"persist_OK",
"=",
"False",
"if",
"persist_OK",
":",
"self",
".",
"_couple_active_exception",
"(",
"err1",
",",
"userid",
",",
"vdev",
",",
"vswitch_name",
")",
"else",
":",
"raise",
"exception",
".",
"SDKNetworkOperationError",
"(",
"rs",
"=",
"3",
",",
"nic",
"=",
"vdev",
",",
"vswitch",
"=",
"vswitch_name",
",",
"couple_err",
"=",
"msg1",
",",
"revoke_err",
"=",
"msg2",
")",
"\"\"\"Update information in switch table.\"\"\"",
"self",
".",
"_NetDbOperator",
".",
"switch_update_record_with_switch",
"(",
"userid",
",",
"vdev",
",",
"vswitch_name",
")",
"msg",
"=",
"(",
"'Couple nic device %(vdev)s of guest %(vm)s '",
"'with vswitch %(vsw)s successfully'",
"%",
"{",
"'vdev'",
":",
"vdev",
",",
"'vm'",
":",
"userid",
",",
"'vsw'",
":",
"vswitch_name",
"}",
")",
"LOG",
".",
"info",
"(",
"msg",
")"
] |
Guesses at an appropriate barcode cutoff | def guess_depth_cutoff ( cb_histogram ) : with read_cbhistogram ( cb_histogram ) as fh : cb_vals = [ int ( p . strip ( ) . split ( ) [ 1 ] ) for p in fh ] histo = np . histogram ( np . log10 ( cb_vals ) , bins = 50 ) vals = histo [ 0 ] edges = histo [ 1 ] mids = np . array ( [ ( edges [ i ] + edges [ i + 1 ] ) / 2 for i in range ( edges . size - 1 ) ] ) wdensity = vals * ( 10 ** mids ) / sum ( vals * ( 10 ** mids ) ) baseline = np . median ( wdensity ) wdensity = list ( wdensity ) # find highest density in upper half of barcode distribution peak = wdensity . index ( max ( wdensity [ len ( wdensity ) / 2 : ] ) ) cutoff = None for index , dens in reversed ( list ( enumerate ( wdensity [ 1 : peak ] ) ) ) : if dens < 2 * baseline : cutoff = index break if not cutoff : return None else : cutoff = 10 ** mids [ cutoff ] logger . info ( 'Setting barcode cutoff to %d' % cutoff ) return cutoff | 251,663 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1020-L1044 | [
"def",
"to_dict",
"(",
"self",
",",
"include_readonly",
"=",
"True",
",",
"field_names",
"=",
"None",
")",
":",
"fields",
"=",
"self",
".",
"fields",
"(",
"writable",
"=",
"not",
"include_readonly",
")",
"if",
"field_names",
"is",
"not",
"None",
":",
"fields",
"=",
"[",
"f",
"for",
"f",
"in",
"fields",
"if",
"f",
"in",
"field_names",
"]",
"data",
"=",
"self",
".",
"__dict__",
"return",
"{",
"name",
":",
"data",
"[",
"name",
"]",
"for",
"name",
"in",
"fields",
"}"
] |
Filters reads with non - matching barcodes Expects formatted fastq files . | def cb_filter ( fastq , bc1 , bc2 , bc3 , cores , nedit ) : with open_gzipsafe ( bc1 ) as bc1_fh : bc1 = set ( cb . strip ( ) for cb in bc1_fh ) if bc2 : with open_gzipsafe ( bc2 ) as bc2_fh : bc2 = set ( cb . strip ( ) for cb in bc2_fh ) if bc3 : with open_gzipsafe ( bc3 ) as bc3_fh : bc3 = set ( cb . strip ( ) for cb in bc3_fh ) annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) if nedit == 0 : filter_cb = partial ( exact_barcode_filter , bc1 = bc1 , bc2 = bc2 , bc3 = bc3 , re_string = re_string ) else : bc1hash = MutationHash ( bc1 , nedit ) bc2hash = None bc3hash = None if bc2 : bc2hash = MutationHash ( bc2 , nedit ) if bc3 : bc3hash = MutationHash ( bc3 , nedit ) filter_cb = partial ( correcting_barcode_filter , bc1hash = bc1hash , bc2hash = bc2hash , bc3hash = bc3hash , re_string = re_string ) p = multiprocessing . Pool ( cores ) chunks = tz . partition_all ( 10000 , read_fastq ( fastq ) ) bigchunks = tz . partition_all ( cores , chunks ) for bigchunk in bigchunks : for chunk in p . map ( filter_cb , list ( bigchunk ) ) : for read in chunk : sys . stdout . write ( read ) | 251,664 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1053-L1090 | [
"def",
"grant_user_to_vswitch",
"(",
"self",
",",
"vswitch_name",
",",
"userid",
")",
":",
"smt_userid",
"=",
"zvmutils",
".",
"get_smt_userid",
"(",
")",
"requestData",
"=",
"' '",
".",
"join",
"(",
"(",
"'SMAPI %s API Virtual_Network_Vswitch_Set_Extended'",
"%",
"smt_userid",
",",
"\"--operands\"",
",",
"\"-k switch_name=%s\"",
"%",
"vswitch_name",
",",
"\"-k grant_userid=%s\"",
"%",
"userid",
",",
"\"-k persist=YES\"",
")",
")",
"try",
":",
"self",
".",
"_request",
"(",
"requestData",
")",
"except",
"exception",
".",
"SDKSMTRequestFailed",
"as",
"err",
":",
"LOG",
".",
"error",
"(",
"\"Failed to grant user %s to vswitch %s, error: %s\"",
"%",
"(",
"userid",
",",
"vswitch_name",
",",
"err",
".",
"format_message",
"(",
")",
")",
")",
"self",
".",
"_set_vswitch_exception",
"(",
"err",
",",
"vswitch_name",
")"
] |
Filters reads with non - matching sample barcodes Expects formatted fastq files . | def sb_filter ( fastq , bc , cores , nedit ) : barcodes = set ( sb . strip ( ) for sb in bc ) if nedit == 0 : filter_sb = partial ( exact_sample_filter2 , barcodes = barcodes ) else : barcodehash = MutationHash ( barcodes , nedit ) filter_sb = partial ( correcting_sample_filter2 , barcodehash = barcodehash ) p = multiprocessing . Pool ( cores ) chunks = tz . partition_all ( 10000 , read_fastq ( fastq ) ) bigchunks = tz . partition_all ( cores , chunks ) for bigchunk in bigchunks : for chunk in p . map ( filter_sb , list ( bigchunk ) ) : for read in chunk : sys . stdout . write ( read ) | 251,665 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1097-L1114 | [
"def",
"grant_user_to_vswitch",
"(",
"self",
",",
"vswitch_name",
",",
"userid",
")",
":",
"smt_userid",
"=",
"zvmutils",
".",
"get_smt_userid",
"(",
")",
"requestData",
"=",
"' '",
".",
"join",
"(",
"(",
"'SMAPI %s API Virtual_Network_Vswitch_Set_Extended'",
"%",
"smt_userid",
",",
"\"--operands\"",
",",
"\"-k switch_name=%s\"",
"%",
"vswitch_name",
",",
"\"-k grant_userid=%s\"",
"%",
"userid",
",",
"\"-k persist=YES\"",
")",
")",
"try",
":",
"self",
".",
"_request",
"(",
"requestData",
")",
"except",
"exception",
".",
"SDKSMTRequestFailed",
"as",
"err",
":",
"LOG",
".",
"error",
"(",
"\"Failed to grant user %s to vswitch %s, error: %s\"",
"%",
"(",
"userid",
",",
"vswitch_name",
",",
"err",
".",
"format_message",
"(",
")",
")",
")",
"self",
".",
"_set_vswitch_exception",
"(",
"err",
",",
"vswitch_name",
")"
] |
Filters umis with non - ACGT bases Expects formatted fastq files . | def mb_filter ( fastq , cores ) : filter_mb = partial ( umi_filter ) p = multiprocessing . Pool ( cores ) chunks = tz . partition_all ( 10000 , read_fastq ( fastq ) ) bigchunks = tz . partition_all ( cores , chunks ) for bigchunk in bigchunks : for chunk in p . map ( filter_mb , list ( bigchunk ) ) : for read in chunk : sys . stdout . write ( read ) | 251,666 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1119-L1131 | [
"def",
"list_blobs",
"(",
"call",
"=",
"None",
",",
"kwargs",
"=",
"None",
")",
":",
"# pylint: disable=unused-argument",
"if",
"kwargs",
"is",
"None",
":",
"kwargs",
"=",
"{",
"}",
"if",
"'container'",
"not",
"in",
"kwargs",
":",
"raise",
"SaltCloudSystemExit",
"(",
"'A container must be specified'",
")",
"storageservice",
"=",
"_get_block_blob_service",
"(",
"kwargs",
")",
"ret",
"=",
"{",
"}",
"try",
":",
"for",
"blob",
"in",
"storageservice",
".",
"list_blobs",
"(",
"kwargs",
"[",
"'container'",
"]",
")",
".",
"items",
":",
"ret",
"[",
"blob",
".",
"name",
"]",
"=",
"{",
"'blob_type'",
":",
"blob",
".",
"properties",
".",
"blob_type",
",",
"'last_modified'",
":",
"blob",
".",
"properties",
".",
"last_modified",
".",
"isoformat",
"(",
")",
",",
"'server_encrypted'",
":",
"blob",
".",
"properties",
".",
"server_encrypted",
",",
"}",
"except",
"Exception",
"as",
"exc",
":",
"log",
".",
"warning",
"(",
"six",
".",
"text_type",
"(",
"exc",
")",
")",
"return",
"ret"
] |
Convert fastqtransformed file to output format compatible with kallisto . | def kallisto ( fastq , out_dir , cb_histogram , cb_cutoff ) : parser_re = re . compile ( '(.*):CELL_(?<CB>.*):UMI_(?P<UMI>.*)\\n(.*)\\n\\+\\n(.*)\\n' ) if fastq . endswith ( 'gz' ) : fastq_fh = gzip . GzipFile ( fileobj = open ( fastq ) ) elif fastq == "-" : fastq_fh = sys . stdin else : fastq_fh = open ( fastq ) cb_depth_set = get_cb_depth_set ( cb_histogram , cb_cutoff ) cb_set = set ( ) cb_batch = collections . defaultdict ( list ) parsed = 0 for read in stream_fastq ( fastq_fh ) : match = parser_re . search ( read ) . groupdict ( ) umi = match [ 'UMI' ] cb = match [ 'CB' ] if cb_depth_set and cb not in cb_depth_set : continue parsed += 1 cb_set . add ( cb ) cb_batch [ cb ] . append ( ( read , umi ) ) # write in batches to avoid opening up file handles repeatedly if not parsed % 10000000 : for cb , chunk in cb_batch . items ( ) : write_kallisto_chunk ( out_dir , cb , chunk ) cb_batch = collections . defaultdict ( list ) for cb , chunk in cb_batch . items ( ) : write_kallisto_chunk ( out_dir , cb , chunk ) with open ( os . path . join ( out_dir , "barcodes.batch" ) , "w" ) as out_handle : out_handle . write ( "#id umi-file file-1\n" ) batchformat = "{cb} {cb}.umi {cb}.fq\n" for cb in cb_set : out_handle . write ( batchformat . format ( * * locals ( ) ) ) | 251,667 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1164-L1203 | [
"def",
"dSbus_dV",
"(",
"Y",
",",
"V",
")",
":",
"I",
"=",
"Y",
"*",
"V",
"diagV",
"=",
"spdiag",
"(",
"V",
")",
"diagIbus",
"=",
"spdiag",
"(",
"I",
")",
"diagVnorm",
"=",
"spdiag",
"(",
"div",
"(",
"V",
",",
"abs",
"(",
"V",
")",
")",
")",
"# Element-wise division.",
"dS_dVm",
"=",
"diagV",
"*",
"conj",
"(",
"Y",
"*",
"diagVnorm",
")",
"+",
"conj",
"(",
"diagIbus",
")",
"*",
"diagVnorm",
"dS_dVa",
"=",
"1j",
"*",
"diagV",
"*",
"conj",
"(",
"diagIbus",
"-",
"Y",
"*",
"diagV",
")",
"return",
"dS_dVm",
",",
"dS_dVa"
] |
Demultiplex a fastqtransformed FASTQ file into a FASTQ file for each sample . | def demultiplex_samples ( fastq , out_dir , nedit , barcodes ) : annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) parser_re = re . compile ( re_string ) if barcodes : barcodes = set ( barcode . strip ( ) for barcode in barcodes ) else : barcodes = set ( ) if nedit == 0 : filter_bc = partial ( exact_sample_filter , barcodes = barcodes ) else : barcodehash = MutationHash ( barcodes , nedit ) filter_bc = partial ( correcting_sample_filter , barcodehash = barcodehash ) sample_set = set ( ) batch = collections . defaultdict ( list ) parsed = 0 safe_makedir ( out_dir ) for read in read_fastq ( fastq ) : parsed += 1 read = filter_bc ( read ) if not read : continue match = parser_re . search ( read ) . groupdict ( ) sample = match [ 'SB' ] sample_set . add ( sample ) batch [ sample ] . append ( read ) # write in batches to avoid opening up file handles repeatedly if not parsed % 10000000 : for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , sample + ".fq" ) with open ( out_file , "a" ) as out_handle : for read in reads : fixed = filter_bc ( read ) if fixed : out_handle . write ( fixed ) batch = collections . defaultdict ( list ) for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , sample + ".fq" ) with open ( out_file , "a" ) as out_handle : for read in reads : fixed = filter_bc ( read ) if fixed : out_handle . write ( read ) | 251,668 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1256-L1305 | [
"def",
"websocket_connect",
"(",
"self",
",",
"message",
")",
":",
"self",
".",
"session_id",
"=",
"self",
".",
"scope",
"[",
"'url_route'",
"]",
"[",
"'kwargs'",
"]",
"[",
"'subscriber_id'",
"]",
"super",
"(",
")",
".",
"websocket_connect",
"(",
"message",
")",
"# Create new subscriber object.",
"Subscriber",
".",
"objects",
".",
"get_or_create",
"(",
"session_id",
"=",
"self",
".",
"session_id",
")"
] |
Demultiplex a fastqtransformed FASTQ file into a FASTQ file for each cell . | def demultiplex_cells ( fastq , out_dir , readnumber , prefix , cb_histogram , cb_cutoff ) : annotations = detect_fastq_annotations ( fastq ) re_string = construct_transformed_regex ( annotations ) parser_re = re . compile ( re_string ) readstring = "" if not readnumber else "_R{}" . format ( readnumber ) filestring = "{prefix}{sample}{readstring}.fq" cb_set = set ( ) if cb_histogram : cb_set = get_cb_depth_set ( cb_histogram , cb_cutoff ) sample_set = set ( ) batch = collections . defaultdict ( list ) parsed = 0 safe_makedir ( out_dir ) for read in read_fastq ( fastq ) : parsed += 1 match = parser_re . search ( read ) . groupdict ( ) sample = match [ 'CB' ] if cb_set and sample not in cb_set : continue sample_set . add ( sample ) batch [ sample ] . append ( read ) # write in batches to avoid opening up file handles repeatedly if not parsed % 10000000 : for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , filestring . format ( * * locals ( ) ) ) with open ( out_file , "a" ) as out_handle : for read in reads : out_handle . write ( read ) batch = collections . defaultdict ( list ) for sample , reads in batch . items ( ) : out_file = os . path . join ( out_dir , filestring . format ( * * locals ( ) ) ) with open ( out_file , "a" ) as out_handle : for read in reads : out_handle . write ( read ) | 251,669 | https://github.com/vals/umis/blob/e8adb8486d9e9134ab8a6cad9811a7e74dcc4a2c/umis/umis.py#L1317-L1355 | [
"def",
"websocket_connect",
"(",
"self",
",",
"message",
")",
":",
"self",
".",
"session_id",
"=",
"self",
".",
"scope",
"[",
"'url_route'",
"]",
"[",
"'kwargs'",
"]",
"[",
"'subscriber_id'",
"]",
"super",
"(",
")",
".",
"websocket_connect",
"(",
"message",
")",
"# Create new subscriber object.",
"Subscriber",
".",
"objects",
".",
"get_or_create",
"(",
"session_id",
"=",
"self",
".",
"session_id",
")"
] |
Allows conversion of Django ArrayField to SQLAlchemy Array . Takes care of mapping the type of the array element . | def array_type ( data_types , field ) : from sqlalchemy . dialects import postgresql internal_type = field . base_field . get_internal_type ( ) # currently no support for multi-dimensional arrays if internal_type in data_types and internal_type != 'ArrayField' : sub_type = data_types [ internal_type ] ( field ) if not isinstance ( sub_type , ( list , tuple ) ) : sub_type = [ sub_type ] else : raise RuntimeError ( 'Unsupported array element type' ) return postgresql . ARRAY ( sub_type ) | 251,670 | https://github.com/Deepwalker/aldjemy/blob/d58359a3710e7f21e47a70765b9d75c61143ceb1/aldjemy/postgres.py#L4-L21 | [
"def",
"_watcher_thread",
"(",
"self",
")",
":",
"while",
"1",
":",
"time",
".",
"sleep",
"(",
"self",
".",
"check_interval",
")",
"if",
"not",
"self",
".",
"_watcher_running",
":",
"break",
"self",
".",
"logger",
".",
"info",
"(",
"'CrashReporter: Attempting to send offline reports.'",
")",
"self",
".",
"submit_offline_reports",
"(",
")",
"remaining_reports",
"=",
"len",
"(",
"self",
".",
"get_offline_reports",
"(",
")",
")",
"if",
"remaining_reports",
"==",
"0",
":",
"break",
"self",
".",
"_watcher",
"=",
"None",
"self",
".",
"logger",
".",
"info",
"(",
"'CrashReporter: Watcher stopped.'",
")"
] |
Set logger handler formatters to more detail | def set_verbose_logger_handlers ( ) : # noqa # type: (None) -> None global _REGISTERED_LOGGER_HANDLERS formatter = logging . Formatter ( '%(asctime)s %(levelname)s %(name)s:%(funcName)s:%(lineno)d ' '%(message)s' ) formatter . default_msec_format = '%s.%03d' for handler in _REGISTERED_LOGGER_HANDLERS : handler . setFormatter ( formatter ) | 251,671 | https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/blobxfer/util.py#L114-L123 | [
"async",
"def",
"write_close_frame",
"(",
"self",
",",
"data",
":",
"bytes",
"=",
"b\"\"",
")",
"->",
"None",
":",
"# Test and set the connection state before sending the close frame to",
"# avoid sending two frames in case of concurrent calls.",
"if",
"self",
".",
"state",
"is",
"State",
".",
"OPEN",
":",
"# 7.1.3. The WebSocket Closing Handshake is Started",
"self",
".",
"state",
"=",
"State",
".",
"CLOSING",
"logger",
".",
"debug",
"(",
"\"%s - state = CLOSING\"",
",",
"self",
".",
"side",
")",
"# 7.1.2. Start the WebSocket Closing Handshake",
"await",
"self",
".",
"write_frame",
"(",
"True",
",",
"OP_CLOSE",
",",
"data",
",",
"_expected_state",
"=",
"State",
".",
"CLOSING",
")"
] |
Download blobs or files from Azure Storage | def download ( ctx ) : settings . add_cli_options ( ctx . cli_options , settings . TransferAction . Download ) ctx . initialize ( settings . TransferAction . Download ) specs = settings . create_download_specifications ( ctx . cli_options , ctx . config ) del ctx . cli_options for spec in specs : blobxfer . api . Downloader ( ctx . general_options , ctx . credentials , spec ) . start ( ) | 251,672 | https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/cli/cli.py#L1071-L1081 | [
"def",
"get_probes_config",
"(",
"self",
")",
":",
"probes",
"=",
"{",
"}",
"probes_table",
"=",
"junos_views",
".",
"junos_rpm_probes_config_table",
"(",
"self",
".",
"device",
")",
"probes_table",
".",
"get",
"(",
")",
"probes_table_items",
"=",
"probes_table",
".",
"items",
"(",
")",
"for",
"probe_test",
"in",
"probes_table_items",
":",
"test_name",
"=",
"py23_compat",
".",
"text_type",
"(",
"probe_test",
"[",
"0",
"]",
")",
"test_details",
"=",
"{",
"p",
"[",
"0",
"]",
":",
"p",
"[",
"1",
"]",
"for",
"p",
"in",
"probe_test",
"[",
"1",
"]",
"}",
"probe_name",
"=",
"napalm_base",
".",
"helpers",
".",
"convert",
"(",
"py23_compat",
".",
"text_type",
",",
"test_details",
".",
"pop",
"(",
"'probe_name'",
")",
")",
"target",
"=",
"napalm_base",
".",
"helpers",
".",
"convert",
"(",
"py23_compat",
".",
"text_type",
",",
"test_details",
".",
"pop",
"(",
"'target'",
",",
"''",
")",
")",
"test_interval",
"=",
"napalm_base",
".",
"helpers",
".",
"convert",
"(",
"int",
",",
"test_details",
".",
"pop",
"(",
"'test_interval'",
",",
"'0'",
")",
")",
"probe_count",
"=",
"napalm_base",
".",
"helpers",
".",
"convert",
"(",
"int",
",",
"test_details",
".",
"pop",
"(",
"'probe_count'",
",",
"'0'",
")",
")",
"probe_type",
"=",
"napalm_base",
".",
"helpers",
".",
"convert",
"(",
"py23_compat",
".",
"text_type",
",",
"test_details",
".",
"pop",
"(",
"'probe_type'",
",",
"''",
")",
")",
"source",
"=",
"napalm_base",
".",
"helpers",
".",
"convert",
"(",
"py23_compat",
".",
"text_type",
",",
"test_details",
".",
"pop",
"(",
"'source_address'",
",",
"''",
")",
")",
"if",
"probe_name",
"not",
"in",
"probes",
".",
"keys",
"(",
")",
":",
"probes",
"[",
"probe_name",
"]",
"=",
"{",
"}",
"probes",
"[",
"probe_name",
"]",
"[",
"test_name",
"]",
"=",
"{",
"'probe_type'",
":",
"probe_type",
",",
"'target'",
":",
"target",
",",
"'source'",
":",
"source",
",",
"'probe_count'",
":",
"probe_count",
",",
"'test_interval'",
":",
"test_interval",
"}",
"return",
"probes"
] |
Synchronously copy blobs or files between Azure Storage accounts | def synccopy ( ctx ) : settings . add_cli_options ( ctx . cli_options , settings . TransferAction . Synccopy ) ctx . initialize ( settings . TransferAction . Synccopy ) specs = settings . create_synccopy_specifications ( ctx . cli_options , ctx . config ) del ctx . cli_options for spec in specs : blobxfer . api . SyncCopy ( ctx . general_options , ctx . credentials , spec ) . start ( ) | 251,673 | https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/cli/cli.py#L1088-L1098 | [
"def",
"decode_image",
"(",
"data",
",",
"image_type",
")",
":",
"bytes_data",
"=",
"BytesIO",
"(",
"data",
")",
"if",
"image_type",
".",
"is_tiff_format",
"(",
")",
":",
"image",
"=",
"tiff",
".",
"imread",
"(",
"bytes_data",
")",
"else",
":",
"image",
"=",
"np",
".",
"array",
"(",
"Image",
".",
"open",
"(",
"bytes_data",
")",
")",
"if",
"image_type",
"is",
"MimeType",
".",
"JP2",
":",
"try",
":",
"bit_depth",
"=",
"get_jp2_bit_depth",
"(",
"bytes_data",
")",
"image",
"=",
"fix_jp2_image",
"(",
"image",
",",
"bit_depth",
")",
"except",
"ValueError",
":",
"pass",
"if",
"image",
"is",
"None",
":",
"raise",
"ImageDecodingError",
"(",
"'Unable to decode image'",
")",
"return",
"image"
] |
Upload files to Azure Storage | def upload ( ctx ) : settings . add_cli_options ( ctx . cli_options , settings . TransferAction . Upload ) ctx . initialize ( settings . TransferAction . Upload ) specs = settings . create_upload_specifications ( ctx . cli_options , ctx . config ) del ctx . cli_options for spec in specs : blobxfer . api . Uploader ( ctx . general_options , ctx . credentials , spec ) . start ( ) | 251,674 | https://github.com/Azure/blobxfer/blob/3eccbe7530cc6a20ab2d30f9e034b6f021817f34/cli/cli.py#L1106-L1116 | [
"def",
"get_opt",
"(",
"key",
",",
"config",
",",
"section",
",",
"booleans",
",",
"repeatable",
")",
":",
"# Handle repeatable non-boolean options (e.g. --file=file1.txt --file=file2.txt).",
"if",
"key",
"in",
"repeatable",
"and",
"key",
"not",
"in",
"booleans",
":",
"return",
"config",
".",
"get",
"(",
"section",
",",
"key",
"[",
"2",
":",
"]",
")",
".",
"strip",
"(",
"'\\n'",
")",
".",
"splitlines",
"(",
")",
"# Handle repeatable booleans.",
"if",
"key",
"in",
"repeatable",
"and",
"key",
"in",
"booleans",
":",
"try",
":",
"return",
"config",
".",
"getint",
"(",
"section",
",",
"key",
"[",
"2",
":",
"]",
")",
"except",
"ValueError",
"as",
"exc",
":",
"raise",
"DocoptcfgFileError",
"(",
"'Repeatable boolean option \"{0}\" invalid.'",
".",
"format",
"(",
"key",
"[",
"2",
":",
"]",
")",
",",
"str",
"(",
"exc",
")",
")",
"# Handle non-repeatable booleans.",
"if",
"key",
"in",
"booleans",
":",
"try",
":",
"return",
"config",
".",
"getboolean",
"(",
"section",
",",
"key",
"[",
"2",
":",
"]",
")",
"except",
"ValueError",
"as",
"exc",
":",
"raise",
"DocoptcfgFileError",
"(",
"'Boolean option \"{0}\" invalid.'",
".",
"format",
"(",
"key",
"[",
"2",
":",
"]",
")",
",",
"str",
"(",
"exc",
")",
")",
"# Handle the rest.",
"return",
"str",
"(",
"config",
".",
"get",
"(",
"section",
",",
"key",
"[",
"2",
":",
"]",
")",
")"
] |
Returns the list of bindings supported by an IDP This is not clear in the pysaml2 code so wrapping it in a util | def get_idp_sso_supported_bindings ( idp_entity_id = None , config = None ) : if config is None : # avoid circular import from djangosaml2 . conf import get_config config = get_config ( ) # load metadata store from config meta = getattr ( config , 'metadata' , { } ) # if idp is None, assume only one exists so just use that if idp_entity_id is None : # .keys() returns dict_keys in python3.5+ try : idp_entity_id = list ( available_idps ( config ) . keys ( ) ) [ 0 ] except IndexError : raise ImproperlyConfigured ( "No IdP configured!" ) try : return meta . service ( idp_entity_id , 'idpsso_descriptor' , 'single_sign_on_service' ) . keys ( ) except UnknownSystemEntity : return [ ] | 251,675 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/utils.py#L41-L60 | [
"def",
"_get_parent_timestamp",
"(",
"dirname",
",",
"mtime",
")",
":",
"parent_pathname",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"dirname",
")",
"# max between the parent timestamp the one passed in",
"mtime",
"=",
"_max_timestamps",
"(",
"parent_pathname",
",",
"False",
",",
"mtime",
")",
"if",
"dirname",
"!=",
"os",
".",
"path",
".",
"dirname",
"(",
"parent_pathname",
")",
":",
"# this is only called if we're not at the root",
"mtime",
"=",
"_get_parent_timestamp",
"(",
"parent_pathname",
",",
"mtime",
")",
"return",
"mtime"
] |
Serves as a common mechanism for ending ACS in case of any SAML related failure . Handling can be configured by setting the SAML_ACS_FAILURE_RESPONSE_FUNCTION as suitable for the project . | def fail_acs_response ( request , * args , * * kwargs ) : failure_function = import_string ( get_custom_setting ( 'SAML_ACS_FAILURE_RESPONSE_FUNCTION' , 'djangosaml2.acs_failures.template_failure' ) ) return failure_function ( request , * args , * * kwargs ) | 251,676 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/utils.py#L72-L82 | [
"def",
"clean_old_entries",
"(",
")",
":",
"from",
"indico_livesync",
".",
"plugin",
"import",
"LiveSyncPlugin",
"from",
"indico_livesync",
".",
"models",
".",
"queue",
"import",
"LiveSyncQueueEntry",
"queue_entry_ttl",
"=",
"LiveSyncPlugin",
".",
"settings",
".",
"get",
"(",
"'queue_entry_ttl'",
")",
"if",
"not",
"queue_entry_ttl",
":",
"return",
"expire_threshold",
"=",
"now_utc",
"(",
")",
"-",
"timedelta",
"(",
"days",
"=",
"queue_entry_ttl",
")",
"LiveSyncQueueEntry",
".",
"find",
"(",
"LiveSyncQueueEntry",
".",
"processed",
",",
"LiveSyncQueueEntry",
".",
"timestamp",
"<",
"expire_threshold",
")",
".",
"delete",
"(",
"synchronize_session",
"=",
"'fetch'",
")"
] |
Example view that echo the SAML attributes of an user | def echo_attributes ( request , config_loader_path = None , template = 'djangosaml2/echo_attributes.html' ) : state = StateCache ( request . session ) conf = get_config ( config_loader_path , request ) client = Saml2Client ( conf , state_cache = state , identity_cache = IdentityCache ( request . session ) ) subject_id = _get_subject_id ( request . session ) try : identity = client . users . get_identity ( subject_id , check_not_on_or_after = False ) except AttributeError : return HttpResponse ( "No active SAML identity found. Are you sure you have logged in via SAML?" ) return render ( request , template , { 'attributes' : identity [ 0 ] } ) | 251,677 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L342-L358 | [
"def",
"port_list_compress",
"(",
"port_list",
")",
":",
"if",
"not",
"port_list",
"or",
"len",
"(",
"port_list",
")",
"==",
"0",
":",
"LOGGER",
".",
"info",
"(",
"\"Invalid or empty port list.\"",
")",
"return",
"''",
"port_list",
"=",
"sorted",
"(",
"set",
"(",
"port_list",
")",
")",
"compressed_list",
"=",
"[",
"]",
"for",
"key",
",",
"group",
"in",
"itertools",
".",
"groupby",
"(",
"enumerate",
"(",
"port_list",
")",
",",
"lambda",
"t",
":",
"t",
"[",
"1",
"]",
"-",
"t",
"[",
"0",
"]",
")",
":",
"group",
"=",
"list",
"(",
"group",
")",
"if",
"group",
"[",
"0",
"]",
"[",
"1",
"]",
"==",
"group",
"[",
"-",
"1",
"]",
"[",
"1",
"]",
":",
"compressed_list",
".",
"append",
"(",
"str",
"(",
"group",
"[",
"0",
"]",
"[",
"1",
"]",
")",
")",
"else",
":",
"compressed_list",
".",
"append",
"(",
"str",
"(",
"group",
"[",
"0",
"]",
"[",
"1",
"]",
")",
"+",
"'-'",
"+",
"str",
"(",
"group",
"[",
"-",
"1",
"]",
"[",
"1",
"]",
")",
")",
"return",
"','",
".",
"join",
"(",
"compressed_list",
")"
] |
SAML Logout Request initiator | def logout ( request , config_loader_path = None ) : state = StateCache ( request . session ) conf = get_config ( config_loader_path , request ) client = Saml2Client ( conf , state_cache = state , identity_cache = IdentityCache ( request . session ) ) subject_id = _get_subject_id ( request . session ) if subject_id is None : logger . warning ( 'The session does not contain the subject id for user %s' , request . user ) result = client . global_logout ( subject_id ) state . sync ( ) if not result : logger . error ( "Looks like the user %s is not logged in any IdP/AA" , subject_id ) return HttpResponseBadRequest ( "You are not logged in any IdP/AA" ) if len ( result ) > 1 : logger . error ( 'Sorry, I do not know how to logout from several sources. I will logout just from the first one' ) for entityid , logout_info in result . items ( ) : if isinstance ( logout_info , tuple ) : binding , http_info = logout_info if binding == BINDING_HTTP_POST : logger . debug ( 'Returning form to the IdP to continue the logout process' ) body = '' . join ( http_info [ 'data' ] ) return HttpResponse ( body ) elif binding == BINDING_HTTP_REDIRECT : logger . debug ( 'Redirecting to the IdP to continue the logout process' ) return HttpResponseRedirect ( get_location ( http_info ) ) else : logger . error ( 'Unknown binding: %s' , binding ) return HttpResponseServerError ( 'Failed to log out' ) else : # We must have had a soap logout return finish_logout ( request , logout_info ) logger . error ( 'Could not logout because there only the HTTP_REDIRECT is supported' ) return HttpResponseServerError ( 'Logout Binding not supported' ) | 251,678 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L362-L408 | [
"def",
"cache_url_config",
"(",
"cls",
",",
"url",
",",
"backend",
"=",
"None",
")",
":",
"url",
"=",
"urlparse",
"(",
"url",
")",
"if",
"not",
"isinstance",
"(",
"url",
",",
"cls",
".",
"URL_CLASS",
")",
"else",
"url",
"location",
"=",
"url",
".",
"netloc",
".",
"split",
"(",
"','",
")",
"if",
"len",
"(",
"location",
")",
"==",
"1",
":",
"location",
"=",
"location",
"[",
"0",
"]",
"config",
"=",
"{",
"'BACKEND'",
":",
"cls",
".",
"CACHE_SCHEMES",
"[",
"url",
".",
"scheme",
"]",
",",
"'LOCATION'",
":",
"location",
",",
"}",
"# Add the drive to LOCATION",
"if",
"url",
".",
"scheme",
"==",
"'filecache'",
":",
"config",
".",
"update",
"(",
"{",
"'LOCATION'",
":",
"url",
".",
"netloc",
"+",
"url",
".",
"path",
",",
"}",
")",
"if",
"url",
".",
"path",
"and",
"url",
".",
"scheme",
"in",
"[",
"'memcache'",
",",
"'pymemcache'",
"]",
":",
"config",
".",
"update",
"(",
"{",
"'LOCATION'",
":",
"'unix:'",
"+",
"url",
".",
"path",
",",
"}",
")",
"elif",
"url",
".",
"scheme",
".",
"startswith",
"(",
"'redis'",
")",
":",
"if",
"url",
".",
"hostname",
":",
"scheme",
"=",
"url",
".",
"scheme",
".",
"replace",
"(",
"'cache'",
",",
"''",
")",
"else",
":",
"scheme",
"=",
"'unix'",
"locations",
"=",
"[",
"scheme",
"+",
"'://'",
"+",
"loc",
"+",
"url",
".",
"path",
"for",
"loc",
"in",
"url",
".",
"netloc",
".",
"split",
"(",
"','",
")",
"]",
"config",
"[",
"'LOCATION'",
"]",
"=",
"locations",
"[",
"0",
"]",
"if",
"len",
"(",
"locations",
")",
"==",
"1",
"else",
"locations",
"if",
"url",
".",
"query",
":",
"config_options",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"parse_qs",
"(",
"url",
".",
"query",
")",
".",
"items",
"(",
")",
":",
"opt",
"=",
"{",
"k",
".",
"upper",
"(",
")",
":",
"_cast",
"(",
"v",
"[",
"0",
"]",
")",
"}",
"if",
"k",
".",
"upper",
"(",
")",
"in",
"cls",
".",
"_CACHE_BASE_OPTIONS",
":",
"config",
".",
"update",
"(",
"opt",
")",
"else",
":",
"config_options",
".",
"update",
"(",
"opt",
")",
"config",
"[",
"'OPTIONS'",
"]",
"=",
"config_options",
"if",
"backend",
":",
"config",
"[",
"'BACKEND'",
"]",
"=",
"backend",
"return",
"config"
] |
SAML Logout Response endpoint | def do_logout_service ( request , data , binding , config_loader_path = None , next_page = None , logout_error_template = 'djangosaml2/logout_error.html' ) : logger . debug ( 'Logout service started' ) conf = get_config ( config_loader_path , request ) state = StateCache ( request . session ) client = Saml2Client ( conf , state_cache = state , identity_cache = IdentityCache ( request . session ) ) if 'SAMLResponse' in data : # we started the logout logger . debug ( 'Receiving a logout response from the IdP' ) response = client . parse_logout_request_response ( data [ 'SAMLResponse' ] , binding ) state . sync ( ) return finish_logout ( request , response , next_page = next_page ) elif 'SAMLRequest' in data : # logout started by the IdP logger . debug ( 'Receiving a logout request from the IdP' ) subject_id = _get_subject_id ( request . session ) if subject_id is None : logger . warning ( 'The session does not contain the subject id for user %s. Performing local logout' , request . user ) auth . logout ( request ) return render ( request , logout_error_template , status = 403 ) else : http_info = client . handle_logout_request ( data [ 'SAMLRequest' ] , subject_id , binding , relay_state = data . get ( 'RelayState' , '' ) ) state . sync ( ) auth . logout ( request ) return HttpResponseRedirect ( get_location ( http_info ) ) else : logger . error ( 'No SAMLResponse or SAMLRequest parameter found' ) raise Http404 ( 'No SAMLResponse or SAMLRequest parameter found' ) | 251,679 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L420-L464 | [
"def",
"load_currency",
"(",
"self",
",",
"mnemonic",
":",
"str",
")",
":",
"# , base_currency: str <= ignored for now.",
"if",
"self",
".",
"rate",
"and",
"self",
".",
"rate",
".",
"currency",
"==",
"mnemonic",
":",
"# Already loaded.",
"return",
"app",
"=",
"PriceDbApplication",
"(",
")",
"# TODO use the base_currency parameter for the query #33",
"symbol",
"=",
"SecuritySymbol",
"(",
"\"CURRENCY\"",
",",
"mnemonic",
")",
"self",
".",
"rate",
"=",
"app",
".",
"get_latest_price",
"(",
"symbol",
")",
"if",
"not",
"self",
".",
"rate",
":",
"raise",
"ValueError",
"(",
"f\"No rate found for {mnemonic}!\"",
")"
] |
Returns an XML with the SAML 2 . 0 metadata for this SP as configured in the settings . py file . | def metadata ( request , config_loader_path = None , valid_for = None ) : conf = get_config ( config_loader_path , request ) metadata = entity_descriptor ( conf ) return HttpResponse ( content = text_type ( metadata ) . encode ( 'utf-8' ) , content_type = "text/xml; charset=utf8" ) | 251,680 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/views.py#L479-L486 | [
"def",
"extract_paths",
"(",
"self",
",",
"paths",
",",
"ignore_nopath",
")",
":",
"try",
":",
"super",
"(",
")",
".",
"extract_paths",
"(",
"paths",
"=",
"paths",
",",
"ignore_nopath",
"=",
"ignore_nopath",
",",
")",
"except",
"ExtractPathError",
"as",
"err",
":",
"LOGGER",
".",
"debug",
"(",
"'%s: failed extracting files: %s'",
",",
"self",
".",
"vm",
".",
"name",
"(",
")",
",",
"err",
".",
"message",
")",
"if",
"self",
".",
"_has_guestfs",
":",
"self",
".",
"extract_paths_dead",
"(",
"paths",
",",
"ignore_nopath",
")",
"else",
":",
"raise"
] |
Configures a user after creation and returns the updated user . | def configure_user ( self , user , attributes , attribute_mapping ) : user . set_unusable_password ( ) return self . update_user ( user , attributes , attribute_mapping , force_save = True ) | 251,681 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/backends.py#L198-L205 | [
"def",
"hold",
"(",
"*",
"keys",
",",
"hold_time",
"=",
"0",
",",
"hold_while",
"=",
"None",
")",
":",
"for",
"key",
"in",
"keys",
":",
"win32api",
".",
"keybd_event",
"(",
"codes",
"[",
"key",
"]",
",",
"0",
",",
"0",
",",
"0",
")",
"if",
"callable",
"(",
"hold_while",
")",
":",
"while",
"hold_while",
"(",
")",
":",
"pass",
"else",
":",
"time",
".",
"sleep",
"(",
"hold_time",
")",
"release",
"(",
"*",
"keys",
")"
] |
Update a user with a set of attributes and returns the updated user . | def update_user ( self , user , attributes , attribute_mapping , force_save = False ) : if not attribute_mapping : return user user_modified = False for saml_attr , django_attrs in attribute_mapping . items ( ) : attr_value_list = attributes . get ( saml_attr ) if not attr_value_list : logger . debug ( 'Could not find value for "%s", not updating fields "%s"' , saml_attr , django_attrs ) continue for attr in django_attrs : if hasattr ( user , attr ) : user_attr = getattr ( user , attr ) if callable ( user_attr ) : modified = user_attr ( attr_value_list ) else : modified = self . _set_attribute ( user , attr , attr_value_list [ 0 ] ) user_modified = user_modified or modified else : logger . debug ( 'Could not find attribute "%s" on user "%s"' , attr , user ) logger . debug ( 'Sending the pre_save signal' ) signal_modified = any ( [ response for receiver , response in pre_user_save . send_robust ( sender = user . __class__ , instance = user , attributes = attributes , user_modified = user_modified ) ] ) if user_modified or signal_modified or force_save : user . save ( ) return user | 251,682 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/backends.py#L207-L252 | [
"def",
"split_volume_from_journal",
"(",
"citation_elements",
")",
":",
"for",
"el",
"in",
"citation_elements",
":",
"if",
"el",
"[",
"'type'",
"]",
"==",
"'JOURNAL'",
"and",
"';'",
"in",
"el",
"[",
"'title'",
"]",
":",
"el",
"[",
"'title'",
"]",
",",
"series",
"=",
"el",
"[",
"'title'",
"]",
".",
"rsplit",
"(",
"';'",
",",
"1",
")",
"el",
"[",
"'volume'",
"]",
"=",
"series",
"+",
"el",
"[",
"'volume'",
"]",
"return",
"citation_elements"
] |
Set an attribute of an object to a specific value . | def _set_attribute ( self , obj , attr , value ) : field = obj . _meta . get_field ( attr ) if field . max_length is not None and len ( value ) > field . max_length : cleaned_value = value [ : field . max_length ] logger . warn ( 'The attribute "%s" was trimmed from "%s" to "%s"' , attr , value , cleaned_value ) else : cleaned_value = value old_value = getattr ( obj , attr ) if cleaned_value != old_value : setattr ( obj , attr , cleaned_value ) return True return False | 251,683 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/backends.py#L254-L272 | [
"def",
"fetch",
"(",
"dataset",
",",
"annot",
",",
"cat",
"=",
"(",
"0",
",",
"0",
",",
"0",
",",
"0",
")",
",",
"evt_type",
"=",
"None",
",",
"stage",
"=",
"None",
",",
"cycle",
"=",
"None",
",",
"chan_full",
"=",
"None",
",",
"epoch",
"=",
"None",
",",
"epoch_dur",
"=",
"30",
",",
"epoch_overlap",
"=",
"0",
",",
"epoch_step",
"=",
"None",
",",
"reject_epoch",
"=",
"False",
",",
"reject_artf",
"=",
"False",
",",
"min_dur",
"=",
"0",
",",
"buffer",
"=",
"0",
")",
":",
"bundles",
"=",
"get_times",
"(",
"annot",
",",
"evt_type",
"=",
"evt_type",
",",
"stage",
"=",
"stage",
",",
"cycle",
"=",
"cycle",
",",
"chan",
"=",
"chan_full",
",",
"exclude",
"=",
"reject_epoch",
",",
"buffer",
"=",
"buffer",
")",
"# Remove artefacts",
"if",
"reject_artf",
"and",
"bundles",
":",
"for",
"bund",
"in",
"bundles",
":",
"bund",
"[",
"'times'",
"]",
"=",
"remove_artf_evts",
"(",
"bund",
"[",
"'times'",
"]",
",",
"annot",
",",
"bund",
"[",
"'chan'",
"]",
",",
"min_dur",
"=",
"0",
")",
"# Divide bundles into segments to be concatenated",
"if",
"bundles",
":",
"if",
"'locked'",
"==",
"epoch",
":",
"bundles",
"=",
"_divide_bundles",
"(",
"bundles",
")",
"elif",
"'unlocked'",
"==",
"epoch",
":",
"if",
"epoch_step",
"is",
"not",
"None",
":",
"step",
"=",
"epoch_step",
"else",
":",
"step",
"=",
"epoch_dur",
"-",
"(",
"epoch_dur",
"*",
"epoch_overlap",
")",
"bundles",
"=",
"_concat",
"(",
"bundles",
",",
"cat",
")",
"bundles",
"=",
"_find_intervals",
"(",
"bundles",
",",
"epoch_dur",
",",
"step",
")",
"elif",
"not",
"epoch",
":",
"bundles",
"=",
"_concat",
"(",
"bundles",
",",
"cat",
")",
"# Minimum duration",
"bundles",
"=",
"_longer_than",
"(",
"bundles",
",",
"min_dur",
")",
"segments",
"=",
"Segments",
"(",
"dataset",
")",
"segments",
".",
"segments",
"=",
"bundles",
"return",
"segments"
] |
Utility function to load the pysaml2 configuration . | def config_settings_loader ( request = None ) : conf = SPConfig ( ) conf . load ( copy . deepcopy ( settings . SAML_CONFIG ) ) return conf | 251,684 | https://github.com/knaperek/djangosaml2/blob/643969701d3b4257a8d64c5c577602ebaa61de70/djangosaml2/conf.py#L55-L62 | [
"def",
"get_queryset",
"(",
"self",
")",
":",
"return",
"Event",
".",
"objects",
".",
"filter",
"(",
"Q",
"(",
"startTime__gte",
"=",
"timezone",
".",
"now",
"(",
")",
"-",
"timedelta",
"(",
"days",
"=",
"90",
")",
")",
"&",
"(",
"Q",
"(",
"series__isnull",
"=",
"False",
")",
"|",
"Q",
"(",
"publicevent__isnull",
"=",
"False",
")",
")",
")",
".",
"annotate",
"(",
"count",
"=",
"Count",
"(",
"'eventregistration'",
")",
")",
".",
"annotate",
"(",
"*",
"*",
"self",
".",
"get_annotations",
"(",
")",
")",
".",
"exclude",
"(",
"Q",
"(",
"count",
"=",
"0",
")",
"&",
"Q",
"(",
"status__in",
"=",
"[",
"Event",
".",
"RegStatus",
".",
"hidden",
",",
"Event",
".",
"RegStatus",
".",
"regHidden",
",",
"Event",
".",
"RegStatus",
".",
"disabled",
"]",
")",
")"
] |
Constructs the path & query portion of a URI from path segments and a dict . | def mkpath ( * segments , * * query ) : # Remove empty segments (e.g. no key specified) segments = [ bytes_to_str ( s ) for s in segments if s is not None ] # Join the segments into a path pathstring = '/' . join ( segments ) # Remove extra slashes pathstring = re . sub ( '/+' , '/' , pathstring ) # Add the query string if it exists _query = { } for key in query : if query [ key ] in [ False , True ] : _query [ key ] = str ( query [ key ] ) . lower ( ) elif query [ key ] is not None : if PY2 and isinstance ( query [ key ] , unicode ) : # noqa _query [ key ] = query [ key ] . encode ( 'utf-8' ) else : _query [ key ] = query [ key ] if len ( _query ) > 0 : pathstring += "?" + urlencode ( _query ) if not pathstring . startswith ( '/' ) : pathstring = '/' + pathstring return pathstring | 251,685 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/resources.py#L275-L304 | [
"def",
"setOverlayTexelAspect",
"(",
"self",
",",
"ulOverlayHandle",
",",
"fTexelAspect",
")",
":",
"fn",
"=",
"self",
".",
"function_table",
".",
"setOverlayTexelAspect",
"result",
"=",
"fn",
"(",
"ulOverlayHandle",
",",
"fTexelAspect",
")",
"return",
"result"
] |
Builds a Yokozuna search index URL . | def search_index_path ( self , index = None , * * options ) : if not self . yz_wm_index : raise RiakError ( "Yokozuna search is unsupported by this Riak node" ) if index : quote_plus ( index ) return mkpath ( self . yz_wm_index , "index" , index , * * options ) | 251,686 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/resources.py#L110-L124 | [
"def",
"get_modifications",
"(",
"self",
")",
":",
"# Get all the specific mod types",
"mod_event_types",
"=",
"list",
"(",
"ont_to_mod_type",
".",
"keys",
"(",
")",
")",
"# Add ONT::PTMs as a special case",
"mod_event_types",
"+=",
"[",
"'ONT::PTM'",
"]",
"mod_events",
"=",
"[",
"]",
"for",
"mod_event_type",
"in",
"mod_event_types",
":",
"events",
"=",
"self",
".",
"tree",
".",
"findall",
"(",
"\"EVENT/[type='%s']\"",
"%",
"mod_event_type",
")",
"mod_extracted",
"=",
"self",
".",
"extracted_events",
".",
"get",
"(",
"mod_event_type",
",",
"[",
"]",
")",
"for",
"event",
"in",
"events",
":",
"event_id",
"=",
"event",
".",
"attrib",
".",
"get",
"(",
"'id'",
")",
"if",
"event_id",
"not",
"in",
"mod_extracted",
":",
"mod_events",
".",
"append",
"(",
"event",
")",
"# Iterate over all modification events",
"for",
"event",
"in",
"mod_events",
":",
"stmts",
"=",
"self",
".",
"_get_modification_event",
"(",
"event",
")",
"if",
"stmts",
":",
"for",
"stmt",
"in",
"stmts",
":",
"self",
".",
"statements",
".",
"append",
"(",
"stmt",
")"
] |
Builds a Yokozuna search Solr schema URL . | def search_schema_path ( self , index , * * options ) : if not self . yz_wm_schema : raise RiakError ( "Yokozuna search is unsupported by this Riak node" ) return mkpath ( self . yz_wm_schema , "schema" , quote_plus ( index ) , * * options ) | 251,687 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/resources.py#L126-L139 | [
"def",
"get_modifications",
"(",
"self",
")",
":",
"# Get all the specific mod types",
"mod_event_types",
"=",
"list",
"(",
"ont_to_mod_type",
".",
"keys",
"(",
")",
")",
"# Add ONT::PTMs as a special case",
"mod_event_types",
"+=",
"[",
"'ONT::PTM'",
"]",
"mod_events",
"=",
"[",
"]",
"for",
"mod_event_type",
"in",
"mod_event_types",
":",
"events",
"=",
"self",
".",
"tree",
".",
"findall",
"(",
"\"EVENT/[type='%s']\"",
"%",
"mod_event_type",
")",
"mod_extracted",
"=",
"self",
".",
"extracted_events",
".",
"get",
"(",
"mod_event_type",
",",
"[",
"]",
")",
"for",
"event",
"in",
"events",
":",
"event_id",
"=",
"event",
".",
"attrib",
".",
"get",
"(",
"'id'",
")",
"if",
"event_id",
"not",
"in",
"mod_extracted",
":",
"mod_events",
".",
"append",
"(",
"event",
")",
"# Iterate over all modification events",
"for",
"event",
"in",
"mod_events",
":",
"stmts",
"=",
"self",
".",
"_get_modification_event",
"(",
"event",
")",
"if",
"stmts",
":",
"for",
"stmt",
"in",
"stmts",
":",
"self",
".",
"statements",
".",
"append",
"(",
"stmt",
")"
] |
Extracts the modification operation from the Hll . | def to_op ( self ) : if not self . _adds : return None changes = { } if self . _adds : changes [ 'adds' ] = list ( self . _adds ) return changes | 251,688 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/hll.py#L49-L60 | [
"def",
"check_webhook_secret",
"(",
"app_configs",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
".",
"import",
"settings",
"as",
"djstripe_settings",
"messages",
"=",
"[",
"]",
"secret",
"=",
"djstripe_settings",
".",
"WEBHOOK_SECRET",
"if",
"secret",
"and",
"not",
"secret",
".",
"startswith",
"(",
"\"whsec_\"",
")",
":",
"messages",
".",
"append",
"(",
"checks",
".",
"Warning",
"(",
"\"DJSTRIPE_WEBHOOK_SECRET does not look valid\"",
",",
"hint",
"=",
"\"It should start with whsec_...\"",
",",
"id",
"=",
"\"djstripe.W003\"",
",",
")",
")",
"return",
"messages"
] |
Adds an element to the HyperLogLog . Datatype cardinality will be updated when the object is saved . | def add ( self , element ) : if not isinstance ( element , six . string_types ) : raise TypeError ( "Hll elements can only be strings" ) self . _adds . add ( element ) | 251,689 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/datatypes/hll.py#L62-L72 | [
"def",
"get_transition",
"(",
"self",
",",
"# suppress(too-many-arguments)",
"line",
",",
"line_index",
",",
"column",
",",
"is_escaped",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"del",
"line_index",
"del",
"args",
"del",
"kwargs",
"wait_until_len",
"=",
"len",
"(",
"self",
".",
"_waiting_until",
")",
"if",
"(",
"_token_at_col_in_line",
"(",
"line",
",",
"column",
",",
"self",
".",
"_waiting_until",
",",
"wait_until_len",
")",
"and",
"not",
"_is_escaped",
"(",
"line",
",",
"column",
",",
"is_escaped",
")",
")",
":",
"return",
"(",
"InTextParser",
"(",
")",
",",
"1",
",",
"None",
")",
"return",
"(",
"self",
",",
"1",
",",
"None",
")"
] |
Check server is alive over HTTP | def ping ( self ) : status , _ , body = self . _request ( 'GET' , self . ping_path ( ) ) return ( status is not None ) and ( bytes_to_str ( body ) == 'OK' ) | 251,690 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L68-L73 | [
"def",
"grep_log",
"(",
"self",
",",
"expr",
",",
"filename",
"=",
"'system.log'",
",",
"from_mark",
"=",
"None",
")",
":",
"matchings",
"=",
"[",
"]",
"pattern",
"=",
"re",
".",
"compile",
"(",
"expr",
")",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"self",
".",
"get_path",
"(",
")",
",",
"'logs'",
",",
"filename",
")",
")",
"as",
"f",
":",
"if",
"from_mark",
":",
"f",
".",
"seek",
"(",
"from_mark",
")",
"for",
"line",
"in",
"f",
":",
"m",
"=",
"pattern",
".",
"search",
"(",
"line",
")",
"if",
"m",
":",
"matchings",
".",
"append",
"(",
"(",
"line",
",",
"m",
")",
")",
"return",
"matchings"
] |
Gets performance statistics and server information | def stats ( self ) : status , _ , body = self . _request ( 'GET' , self . stats_path ( ) , { 'Accept' : 'application/json' } ) if status == 200 : return json . loads ( bytes_to_str ( body ) ) else : return None | 251,691 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L75-L84 | [
"def",
"match_files",
"(",
"self",
",",
"files",
",",
"separators",
"=",
"None",
")",
":",
"if",
"isinstance",
"(",
"files",
",",
"(",
"bytes",
",",
"unicode",
")",
")",
":",
"raise",
"TypeError",
"(",
"\"files:{!r} is not an iterable.\"",
".",
"format",
"(",
"files",
")",
")",
"file_map",
"=",
"util",
".",
"normalize_files",
"(",
"files",
",",
"separators",
"=",
"separators",
")",
"matched_files",
"=",
"util",
".",
"match_files",
"(",
"self",
".",
"patterns",
",",
"iterkeys",
"(",
"file_map",
")",
")",
"for",
"path",
"in",
"matched_files",
":",
"yield",
"file_map",
"[",
"path",
"]"
] |
Fetch a list of keys for the bucket | def get_keys ( self , bucket , timeout = None ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . key_list_path ( bucket . name , bucket_type = bucket_type , timeout = timeout ) status , _ , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'keys' ] else : raise RiakError ( 'Error listing keys.' ) | 251,692 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L199-L212 | [
"def",
"on_recv",
"(",
"self",
",",
"cf",
")",
":",
"data",
"=",
"bytes",
"(",
"cf",
".",
"data",
")",
"if",
"len",
"(",
"data",
")",
"<",
"2",
":",
"return",
"ae",
"=",
"0",
"if",
"self",
".",
"extended_rx_addr",
"is",
"not",
"None",
":",
"ae",
"=",
"1",
"if",
"len",
"(",
"data",
")",
"<",
"3",
":",
"return",
"if",
"six",
".",
"indexbytes",
"(",
"data",
",",
"0",
")",
"!=",
"self",
".",
"extended_rx_addr",
":",
"return",
"n_pci",
"=",
"six",
".",
"indexbytes",
"(",
"data",
",",
"ae",
")",
"&",
"0xf0",
"if",
"n_pci",
"==",
"N_PCI_FC",
":",
"with",
"self",
".",
"tx_mutex",
":",
"self",
".",
"_recv_fc",
"(",
"data",
"[",
"ae",
":",
"]",
")",
"elif",
"n_pci",
"==",
"N_PCI_SF",
":",
"with",
"self",
".",
"rx_mutex",
":",
"self",
".",
"_recv_sf",
"(",
"data",
"[",
"ae",
":",
"]",
")",
"elif",
"n_pci",
"==",
"N_PCI_FF",
":",
"with",
"self",
".",
"rx_mutex",
":",
"self",
".",
"_recv_ff",
"(",
"data",
"[",
"ae",
":",
"]",
")",
"elif",
"n_pci",
"==",
"N_PCI_CF",
":",
"with",
"self",
".",
"rx_mutex",
":",
"self",
".",
"_recv_cf",
"(",
"data",
"[",
"ae",
":",
"]",
")"
] |
Fetch a list of all buckets | def get_buckets ( self , bucket_type = None , timeout = None ) : bucket_type = self . _get_bucket_type ( bucket_type ) url = self . bucket_list_path ( bucket_type = bucket_type , timeout = timeout ) status , headers , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'buckets' ] else : raise RiakError ( 'Error getting buckets.' ) | 251,693 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L225-L238 | [
"def",
"ReadFD",
"(",
"self",
",",
"Channel",
")",
":",
"try",
":",
"if",
"platform",
".",
"system",
"(",
")",
"==",
"'Darwin'",
":",
"msg",
"=",
"TPCANMsgFDMac",
"(",
")",
"else",
":",
"msg",
"=",
"TPCANMsgFD",
"(",
")",
"timestamp",
"=",
"TPCANTimestampFD",
"(",
")",
"res",
"=",
"self",
".",
"__m_dllBasic",
".",
"CAN_ReadFD",
"(",
"Channel",
",",
"byref",
"(",
"msg",
")",
",",
"byref",
"(",
"timestamp",
")",
")",
"return",
"TPCANStatus",
"(",
"res",
")",
",",
"msg",
",",
"timestamp",
"except",
":",
"logger",
".",
"error",
"(",
"\"Exception on PCANBasic.ReadFD\"",
")",
"raise"
] |
Get properties for a bucket | def get_bucket_props ( self , bucket ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . bucket_properties_path ( bucket . name , bucket_type = bucket_type ) status , headers , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'props' ] else : raise RiakError ( 'Error getting bucket properties.' ) | 251,694 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L258-L271 | [
"def",
"ReadFD",
"(",
"self",
",",
"Channel",
")",
":",
"try",
":",
"if",
"platform",
".",
"system",
"(",
")",
"==",
"'Darwin'",
":",
"msg",
"=",
"TPCANMsgFDMac",
"(",
")",
"else",
":",
"msg",
"=",
"TPCANMsgFD",
"(",
")",
"timestamp",
"=",
"TPCANTimestampFD",
"(",
")",
"res",
"=",
"self",
".",
"__m_dllBasic",
".",
"CAN_ReadFD",
"(",
"Channel",
",",
"byref",
"(",
"msg",
")",
",",
"byref",
"(",
"timestamp",
")",
")",
"return",
"TPCANStatus",
"(",
"res",
")",
",",
"msg",
",",
"timestamp",
"except",
":",
"logger",
".",
"error",
"(",
"\"Exception on PCANBasic.ReadFD\"",
")",
"raise"
] |
Set the properties on the bucket object given | def set_bucket_props ( self , bucket , props ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . bucket_properties_path ( bucket . name , bucket_type = bucket_type ) headers = { 'Content-Type' : 'application/json' } content = json . dumps ( { 'props' : props } ) # Run the request... status , _ , body = self . _request ( 'PUT' , url , headers , content ) if status == 401 : raise SecurityError ( 'Not authorized to set bucket properties.' ) elif status != 204 : raise RiakError ( 'Error setting bucket properties.' ) return True | 251,695 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L273-L290 | [
"def",
"on_recv",
"(",
"self",
",",
"cf",
")",
":",
"data",
"=",
"bytes",
"(",
"cf",
".",
"data",
")",
"if",
"len",
"(",
"data",
")",
"<",
"2",
":",
"return",
"ae",
"=",
"0",
"if",
"self",
".",
"extended_rx_addr",
"is",
"not",
"None",
":",
"ae",
"=",
"1",
"if",
"len",
"(",
"data",
")",
"<",
"3",
":",
"return",
"if",
"six",
".",
"indexbytes",
"(",
"data",
",",
"0",
")",
"!=",
"self",
".",
"extended_rx_addr",
":",
"return",
"n_pci",
"=",
"six",
".",
"indexbytes",
"(",
"data",
",",
"ae",
")",
"&",
"0xf0",
"if",
"n_pci",
"==",
"N_PCI_FC",
":",
"with",
"self",
".",
"tx_mutex",
":",
"self",
".",
"_recv_fc",
"(",
"data",
"[",
"ae",
":",
"]",
")",
"elif",
"n_pci",
"==",
"N_PCI_SF",
":",
"with",
"self",
".",
"rx_mutex",
":",
"self",
".",
"_recv_sf",
"(",
"data",
"[",
"ae",
":",
"]",
")",
"elif",
"n_pci",
"==",
"N_PCI_FF",
":",
"with",
"self",
".",
"rx_mutex",
":",
"self",
".",
"_recv_ff",
"(",
"data",
"[",
"ae",
":",
"]",
")",
"elif",
"n_pci",
"==",
"N_PCI_CF",
":",
"with",
"self",
".",
"rx_mutex",
":",
"self",
".",
"_recv_cf",
"(",
"data",
"[",
"ae",
":",
"]",
")"
] |
reset the properties on the bucket object given | def clear_bucket_props ( self , bucket ) : bucket_type = self . _get_bucket_type ( bucket . bucket_type ) url = self . bucket_properties_path ( bucket . name , bucket_type = bucket_type ) url = self . bucket_properties_path ( bucket . name ) headers = { 'Content-Type' : 'application/json' } # Run the request... status , _ , _ = self . _request ( 'DELETE' , url , headers , None ) if status == 204 : return True elif status == 405 : return False else : raise RiakError ( 'Error %s clearing bucket properties.' % status ) | 251,696 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L292-L311 | [
"def",
"on_recv",
"(",
"self",
",",
"cf",
")",
":",
"data",
"=",
"bytes",
"(",
"cf",
".",
"data",
")",
"if",
"len",
"(",
"data",
")",
"<",
"2",
":",
"return",
"ae",
"=",
"0",
"if",
"self",
".",
"extended_rx_addr",
"is",
"not",
"None",
":",
"ae",
"=",
"1",
"if",
"len",
"(",
"data",
")",
"<",
"3",
":",
"return",
"if",
"six",
".",
"indexbytes",
"(",
"data",
",",
"0",
")",
"!=",
"self",
".",
"extended_rx_addr",
":",
"return",
"n_pci",
"=",
"six",
".",
"indexbytes",
"(",
"data",
",",
"ae",
")",
"&",
"0xf0",
"if",
"n_pci",
"==",
"N_PCI_FC",
":",
"with",
"self",
".",
"tx_mutex",
":",
"self",
".",
"_recv_fc",
"(",
"data",
"[",
"ae",
":",
"]",
")",
"elif",
"n_pci",
"==",
"N_PCI_SF",
":",
"with",
"self",
".",
"rx_mutex",
":",
"self",
".",
"_recv_sf",
"(",
"data",
"[",
"ae",
":",
"]",
")",
"elif",
"n_pci",
"==",
"N_PCI_FF",
":",
"with",
"self",
".",
"rx_mutex",
":",
"self",
".",
"_recv_ff",
"(",
"data",
"[",
"ae",
":",
"]",
")",
"elif",
"n_pci",
"==",
"N_PCI_CF",
":",
"with",
"self",
".",
"rx_mutex",
":",
"self",
".",
"_recv_cf",
"(",
"data",
"[",
"ae",
":",
"]",
")"
] |
Get properties for a bucket - type | def get_bucket_type_props ( self , bucket_type ) : self . _check_bucket_types ( bucket_type ) url = self . bucket_type_properties_path ( bucket_type . name ) status , headers , body = self . _request ( 'GET' , url ) if status == 200 : props = json . loads ( bytes_to_str ( body ) ) return props [ 'props' ] else : raise RiakError ( 'Error getting bucket-type properties.' ) | 251,697 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L313-L325 | [
"def",
"_mod_repo_in_file",
"(",
"repo",
",",
"repostr",
",",
"filepath",
")",
":",
"with",
"salt",
".",
"utils",
".",
"files",
".",
"fopen",
"(",
"filepath",
")",
"as",
"fhandle",
":",
"output",
"=",
"[",
"]",
"for",
"line",
"in",
"fhandle",
":",
"cols",
"=",
"salt",
".",
"utils",
".",
"args",
".",
"shlex_split",
"(",
"salt",
".",
"utils",
".",
"stringutils",
".",
"to_unicode",
"(",
"line",
")",
".",
"strip",
"(",
")",
")",
"if",
"repo",
"not",
"in",
"cols",
":",
"output",
".",
"append",
"(",
"line",
")",
"else",
":",
"output",
".",
"append",
"(",
"salt",
".",
"utils",
".",
"stringutils",
".",
"to_str",
"(",
"repostr",
"+",
"'\\n'",
")",
")",
"with",
"salt",
".",
"utils",
".",
"files",
".",
"fopen",
"(",
"filepath",
",",
"'w'",
")",
"as",
"fhandle",
":",
"fhandle",
".",
"writelines",
"(",
"output",
")"
] |
Set the properties on the bucket - type | def set_bucket_type_props ( self , bucket_type , props ) : self . _check_bucket_types ( bucket_type ) url = self . bucket_type_properties_path ( bucket_type . name ) headers = { 'Content-Type' : 'application/json' } content = json . dumps ( { 'props' : props } ) # Run the request... status , _ , _ = self . _request ( 'PUT' , url , headers , content ) if status != 204 : raise RiakError ( 'Error setting bucket-type properties.' ) return True | 251,698 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L327-L341 | [
"def",
"_recv_msg",
"(",
"self",
")",
":",
"command",
"=",
"ord",
"(",
"recv_blocking",
"(",
"self",
".",
"_soc",
",",
"1",
")",
")",
"msglen",
"=",
"recv_blocking",
"(",
"self",
".",
"_soc",
",",
"4",
")",
"msglen",
"=",
"(",
"(",
"msglen",
"[",
"0",
"]",
"<<",
"24",
")",
"+",
"(",
"msglen",
"[",
"1",
"]",
"<<",
"16",
")",
"+",
"(",
"msglen",
"[",
"2",
"]",
"<<",
"8",
")",
"+",
"msglen",
"[",
"3",
"]",
")",
"msg",
"=",
"recv_blocking",
"(",
"self",
".",
"_soc",
",",
"msglen",
")",
"return",
"command",
",",
"msg"
] |
Run a MapReduce query . | def mapred ( self , inputs , query , timeout = None ) : # Construct the job, optionally set the timeout... content = self . _construct_mapred_json ( inputs , query , timeout ) # Do the request... url = self . mapred_path ( ) headers = { 'Content-Type' : 'application/json' } status , headers , body = self . _request ( 'POST' , url , headers , content ) # Make sure the expected status code came back... if status != 200 : raise RiakError ( 'Error running MapReduce operation. Headers: %s Body: %s' % ( repr ( headers ) , repr ( body ) ) ) result = json . loads ( bytes_to_str ( body ) ) return result | 251,699 | https://github.com/basho/riak-python-client/blob/91de13a16607cdf553d1a194e762734e3bec4231/riak/transports/http/transport.py#L343-L362 | [
"def",
"fetch",
"(",
"self",
",",
"vault_client",
")",
":",
"backends",
"=",
"[",
"(",
"self",
".",
"mounts",
",",
"SecretBackend",
")",
",",
"(",
"self",
".",
"auths",
",",
"AuthBackend",
")",
",",
"(",
"self",
".",
"logs",
",",
"LogBackend",
")",
"]",
"for",
"b_list",
",",
"b_class",
"in",
"backends",
":",
"backend_list",
"=",
"b_list",
"(",
")",
"if",
"backend_list",
":",
"existing",
"=",
"getattr",
"(",
"vault_client",
",",
"b_class",
".",
"list_fun",
")",
"(",
")",
"for",
"backend",
"in",
"backend_list",
":",
"backend",
".",
"fetch",
"(",
"vault_client",
",",
"existing",
")",
"for",
"rsc",
"in",
"self",
".",
"resources",
"(",
")",
":",
"if",
"issubclass",
"(",
"type",
"(",
"rsc",
")",
",",
"Secret",
")",
":",
"nc_exists",
"=",
"(",
"rsc",
".",
"mount",
"!=",
"'cubbyhole'",
"and",
"find_backend",
"(",
"rsc",
".",
"mount",
",",
"self",
".",
"_mounts",
")",
".",
"existing",
")",
"if",
"nc_exists",
"or",
"rsc",
".",
"mount",
"==",
"'cubbyhole'",
":",
"rsc",
".",
"fetch",
"(",
"vault_client",
")",
"elif",
"issubclass",
"(",
"type",
"(",
"rsc",
")",
",",
"Auth",
")",
":",
"if",
"find_backend",
"(",
"rsc",
".",
"mount",
",",
"self",
".",
"_auths",
")",
".",
"existing",
":",
"rsc",
".",
"fetch",
"(",
"vault_client",
")",
"elif",
"issubclass",
"(",
"type",
"(",
"rsc",
")",
",",
"Mount",
")",
":",
"rsc",
".",
"existing",
"=",
"find_backend",
"(",
"rsc",
".",
"mount",
",",
"self",
".",
"_mounts",
")",
".",
"existing",
"else",
":",
"rsc",
".",
"fetch",
"(",
"vault_client",
")",
"return",
"self"
] |