repo
stringlengths 7
55
| path
stringlengths 4
127
| func_name
stringlengths 1
88
| original_string
stringlengths 75
19.8k
| language
stringclasses 1
value | code
stringlengths 75
19.8k
| code_tokens
sequencelengths 20
707
| docstring
stringlengths 3
17.3k
| docstring_tokens
sequencelengths 3
222
| sha
stringlengths 40
40
| url
stringlengths 87
242
| partition
stringclasses 1
value | idx
int64 0
252k
|
---|---|---|---|---|---|---|---|---|---|---|---|---|
getsentry/semaphore | py/semaphore/utils.py | decode_str | def decode_str(s, free=False):
"""Decodes a SymbolicStr"""
try:
if s.len == 0:
return u""
return ffi.unpack(s.data, s.len).decode("utf-8", "replace")
finally:
if free:
lib.semaphore_str_free(ffi.addressof(s)) | python | def decode_str(s, free=False):
"""Decodes a SymbolicStr"""
try:
if s.len == 0:
return u""
return ffi.unpack(s.data, s.len).decode("utf-8", "replace")
finally:
if free:
lib.semaphore_str_free(ffi.addressof(s)) | [
"def",
"decode_str",
"(",
"s",
",",
"free",
"=",
"False",
")",
":",
"try",
":",
"if",
"s",
".",
"len",
"==",
"0",
":",
"return",
"u\"\"",
"return",
"ffi",
".",
"unpack",
"(",
"s",
".",
"data",
",",
"s",
".",
"len",
")",
".",
"decode",
"(",
"\"utf-8\"",
",",
"\"replace\"",
")",
"finally",
":",
"if",
"free",
":",
"lib",
".",
"semaphore_str_free",
"(",
"ffi",
".",
"addressof",
"(",
"s",
")",
")"
] | Decodes a SymbolicStr | [
"Decodes",
"a",
"SymbolicStr"
] | 6f260b4092261e893b4debd9a3a7a78232f46c5e | https://github.com/getsentry/semaphore/blob/6f260b4092261e893b4debd9a3a7a78232f46c5e/py/semaphore/utils.py#L69-L77 | train | 251,300 |
getsentry/semaphore | py/semaphore/utils.py | encode_str | def encode_str(s, mutable=False):
"""Encodes a SemaphoreStr"""
rv = ffi.new("SemaphoreStr *")
if isinstance(s, text_type):
s = s.encode("utf-8")
if mutable:
s = bytearray(s)
rv.data = ffi.from_buffer(s)
rv.len = len(s)
# we have to hold a weak reference here to ensure our string does not
# get collected before the string is used.
attached_refs[rv] = s
return rv | python | def encode_str(s, mutable=False):
"""Encodes a SemaphoreStr"""
rv = ffi.new("SemaphoreStr *")
if isinstance(s, text_type):
s = s.encode("utf-8")
if mutable:
s = bytearray(s)
rv.data = ffi.from_buffer(s)
rv.len = len(s)
# we have to hold a weak reference here to ensure our string does not
# get collected before the string is used.
attached_refs[rv] = s
return rv | [
"def",
"encode_str",
"(",
"s",
",",
"mutable",
"=",
"False",
")",
":",
"rv",
"=",
"ffi",
".",
"new",
"(",
"\"SemaphoreStr *\"",
")",
"if",
"isinstance",
"(",
"s",
",",
"text_type",
")",
":",
"s",
"=",
"s",
".",
"encode",
"(",
"\"utf-8\"",
")",
"if",
"mutable",
":",
"s",
"=",
"bytearray",
"(",
"s",
")",
"rv",
".",
"data",
"=",
"ffi",
".",
"from_buffer",
"(",
"s",
")",
"rv",
".",
"len",
"=",
"len",
"(",
"s",
")",
"# we have to hold a weak reference here to ensure our string does not",
"# get collected before the string is used.",
"attached_refs",
"[",
"rv",
"]",
"=",
"s",
"return",
"rv"
] | Encodes a SemaphoreStr | [
"Encodes",
"a",
"SemaphoreStr"
] | 6f260b4092261e893b4debd9a3a7a78232f46c5e | https://github.com/getsentry/semaphore/blob/6f260b4092261e893b4debd9a3a7a78232f46c5e/py/semaphore/utils.py#L80-L92 | train | 251,301 |
getsentry/semaphore | py/semaphore/utils.py | decode_uuid | def decode_uuid(value):
"""Decodes the given uuid value."""
return uuid.UUID(bytes=bytes(bytearray(ffi.unpack(value.data, 16)))) | python | def decode_uuid(value):
"""Decodes the given uuid value."""
return uuid.UUID(bytes=bytes(bytearray(ffi.unpack(value.data, 16)))) | [
"def",
"decode_uuid",
"(",
"value",
")",
":",
"return",
"uuid",
".",
"UUID",
"(",
"bytes",
"=",
"bytes",
"(",
"bytearray",
"(",
"ffi",
".",
"unpack",
"(",
"value",
".",
"data",
",",
"16",
")",
")",
")",
")"
] | Decodes the given uuid value. | [
"Decodes",
"the",
"given",
"uuid",
"value",
"."
] | 6f260b4092261e893b4debd9a3a7a78232f46c5e | https://github.com/getsentry/semaphore/blob/6f260b4092261e893b4debd9a3a7a78232f46c5e/py/semaphore/utils.py#L104-L106 | train | 251,302 |
getsentry/semaphore | scripts/git-precommit-hook.py | has_cargo_fmt | def has_cargo_fmt():
"""Runs a quick check to see if cargo fmt is installed."""
try:
c = subprocess.Popen(
["cargo", "fmt", "--", "--help"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
return c.wait() == 0
except OSError:
return False | python | def has_cargo_fmt():
"""Runs a quick check to see if cargo fmt is installed."""
try:
c = subprocess.Popen(
["cargo", "fmt", "--", "--help"],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE,
)
return c.wait() == 0
except OSError:
return False | [
"def",
"has_cargo_fmt",
"(",
")",
":",
"try",
":",
"c",
"=",
"subprocess",
".",
"Popen",
"(",
"[",
"\"cargo\"",
",",
"\"fmt\"",
",",
"\"--\"",
",",
"\"--help\"",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
",",
"stderr",
"=",
"subprocess",
".",
"PIPE",
",",
")",
"return",
"c",
".",
"wait",
"(",
")",
"==",
"0",
"except",
"OSError",
":",
"return",
"False"
] | Runs a quick check to see if cargo fmt is installed. | [
"Runs",
"a",
"quick",
"check",
"to",
"see",
"if",
"cargo",
"fmt",
"is",
"installed",
"."
] | 6f260b4092261e893b4debd9a3a7a78232f46c5e | https://github.com/getsentry/semaphore/blob/6f260b4092261e893b4debd9a3a7a78232f46c5e/scripts/git-precommit-hook.py#L8-L18 | train | 251,303 |
getsentry/semaphore | scripts/git-precommit-hook.py | get_modified_files | def get_modified_files():
"""Returns a list of all modified files."""
c = subprocess.Popen(
["git", "diff-index", "--cached", "--name-only", "HEAD"], stdout=subprocess.PIPE
)
return c.communicate()[0].splitlines() | python | def get_modified_files():
"""Returns a list of all modified files."""
c = subprocess.Popen(
["git", "diff-index", "--cached", "--name-only", "HEAD"], stdout=subprocess.PIPE
)
return c.communicate()[0].splitlines() | [
"def",
"get_modified_files",
"(",
")",
":",
"c",
"=",
"subprocess",
".",
"Popen",
"(",
"[",
"\"git\"",
",",
"\"diff-index\"",
",",
"\"--cached\"",
",",
"\"--name-only\"",
",",
"\"HEAD\"",
"]",
",",
"stdout",
"=",
"subprocess",
".",
"PIPE",
")",
"return",
"c",
".",
"communicate",
"(",
")",
"[",
"0",
"]",
".",
"splitlines",
"(",
")"
] | Returns a list of all modified files. | [
"Returns",
"a",
"list",
"of",
"all",
"modified",
"files",
"."
] | 6f260b4092261e893b4debd9a3a7a78232f46c5e | https://github.com/getsentry/semaphore/blob/6f260b4092261e893b4debd9a3a7a78232f46c5e/scripts/git-precommit-hook.py#L21-L26 | train | 251,304 |
RobinNil/file_read_backwards | file_read_backwards/buffer_work_space.py | _get_next_chunk | def _get_next_chunk(fp, previously_read_position, chunk_size):
"""Return next chunk of data that we would from the file pointer.
Args:
fp: file-like object
previously_read_position: file pointer position that we have read from
chunk_size: desired read chunk_size
Returns:
(bytestring, int): data that has been read in, the file pointer position where the data has been read from
"""
seek_position, read_size = _get_what_to_read_next(fp, previously_read_position, chunk_size)
fp.seek(seek_position)
read_content = fp.read(read_size)
read_position = seek_position
return read_content, read_position | python | def _get_next_chunk(fp, previously_read_position, chunk_size):
"""Return next chunk of data that we would from the file pointer.
Args:
fp: file-like object
previously_read_position: file pointer position that we have read from
chunk_size: desired read chunk_size
Returns:
(bytestring, int): data that has been read in, the file pointer position where the data has been read from
"""
seek_position, read_size = _get_what_to_read_next(fp, previously_read_position, chunk_size)
fp.seek(seek_position)
read_content = fp.read(read_size)
read_position = seek_position
return read_content, read_position | [
"def",
"_get_next_chunk",
"(",
"fp",
",",
"previously_read_position",
",",
"chunk_size",
")",
":",
"seek_position",
",",
"read_size",
"=",
"_get_what_to_read_next",
"(",
"fp",
",",
"previously_read_position",
",",
"chunk_size",
")",
"fp",
".",
"seek",
"(",
"seek_position",
")",
"read_content",
"=",
"fp",
".",
"read",
"(",
"read_size",
")",
"read_position",
"=",
"seek_position",
"return",
"read_content",
",",
"read_position"
] | Return next chunk of data that we would from the file pointer.
Args:
fp: file-like object
previously_read_position: file pointer position that we have read from
chunk_size: desired read chunk_size
Returns:
(bytestring, int): data that has been read in, the file pointer position where the data has been read from | [
"Return",
"next",
"chunk",
"of",
"data",
"that",
"we",
"would",
"from",
"the",
"file",
"pointer",
"."
] | e56443095b58aae309fbc43a0943eba867dc8500 | https://github.com/RobinNil/file_read_backwards/blob/e56443095b58aae309fbc43a0943eba867dc8500/file_read_backwards/buffer_work_space.py#L95-L110 | train | 251,305 |
RobinNil/file_read_backwards | file_read_backwards/buffer_work_space.py | _get_what_to_read_next | def _get_what_to_read_next(fp, previously_read_position, chunk_size):
"""Return information on which file pointer position to read from and how many bytes.
Args:
fp
past_read_positon (int): The file pointer position that has been read previously
chunk_size(int): ideal io chunk_size
Returns:
(int, int): The next seek position, how many bytes to read next
"""
seek_position = max(previously_read_position - chunk_size, 0)
read_size = chunk_size
# examples: say, our new_lines are potentially "\r\n", "\n", "\r"
# find a reading point where it is not "\n", rewind further if necessary
# if we have "\r\n" and we read in "\n",
# the next iteration would treat "\r" as a different new line.
# Q: why don't I just check if it is b"\n", but use a function ?
# A: so that we can potentially expand this into generic sets of separators, later on.
while seek_position > 0:
fp.seek(seek_position)
if _is_partially_read_new_line(fp.read(1)):
seek_position -= 1
read_size += 1 # as we rewind further, let's make sure we read more to compensate
else:
break
# take care of special case when we are back to the beginnin of the file
read_size = min(previously_read_position - seek_position, read_size)
return seek_position, read_size | python | def _get_what_to_read_next(fp, previously_read_position, chunk_size):
"""Return information on which file pointer position to read from and how many bytes.
Args:
fp
past_read_positon (int): The file pointer position that has been read previously
chunk_size(int): ideal io chunk_size
Returns:
(int, int): The next seek position, how many bytes to read next
"""
seek_position = max(previously_read_position - chunk_size, 0)
read_size = chunk_size
# examples: say, our new_lines are potentially "\r\n", "\n", "\r"
# find a reading point where it is not "\n", rewind further if necessary
# if we have "\r\n" and we read in "\n",
# the next iteration would treat "\r" as a different new line.
# Q: why don't I just check if it is b"\n", but use a function ?
# A: so that we can potentially expand this into generic sets of separators, later on.
while seek_position > 0:
fp.seek(seek_position)
if _is_partially_read_new_line(fp.read(1)):
seek_position -= 1
read_size += 1 # as we rewind further, let's make sure we read more to compensate
else:
break
# take care of special case when we are back to the beginnin of the file
read_size = min(previously_read_position - seek_position, read_size)
return seek_position, read_size | [
"def",
"_get_what_to_read_next",
"(",
"fp",
",",
"previously_read_position",
",",
"chunk_size",
")",
":",
"seek_position",
"=",
"max",
"(",
"previously_read_position",
"-",
"chunk_size",
",",
"0",
")",
"read_size",
"=",
"chunk_size",
"# examples: say, our new_lines are potentially \"\\r\\n\", \"\\n\", \"\\r\"",
"# find a reading point where it is not \"\\n\", rewind further if necessary",
"# if we have \"\\r\\n\" and we read in \"\\n\",",
"# the next iteration would treat \"\\r\" as a different new line.",
"# Q: why don't I just check if it is b\"\\n\", but use a function ?",
"# A: so that we can potentially expand this into generic sets of separators, later on.",
"while",
"seek_position",
">",
"0",
":",
"fp",
".",
"seek",
"(",
"seek_position",
")",
"if",
"_is_partially_read_new_line",
"(",
"fp",
".",
"read",
"(",
"1",
")",
")",
":",
"seek_position",
"-=",
"1",
"read_size",
"+=",
"1",
"# as we rewind further, let's make sure we read more to compensate",
"else",
":",
"break",
"# take care of special case when we are back to the beginnin of the file",
"read_size",
"=",
"min",
"(",
"previously_read_position",
"-",
"seek_position",
",",
"read_size",
")",
"return",
"seek_position",
",",
"read_size"
] | Return information on which file pointer position to read from and how many bytes.
Args:
fp
past_read_positon (int): The file pointer position that has been read previously
chunk_size(int): ideal io chunk_size
Returns:
(int, int): The next seek position, how many bytes to read next | [
"Return",
"information",
"on",
"which",
"file",
"pointer",
"position",
"to",
"read",
"from",
"and",
"how",
"many",
"bytes",
"."
] | e56443095b58aae309fbc43a0943eba867dc8500 | https://github.com/RobinNil/file_read_backwards/blob/e56443095b58aae309fbc43a0943eba867dc8500/file_read_backwards/buffer_work_space.py#L113-L143 | train | 251,306 |
RobinNil/file_read_backwards | file_read_backwards/buffer_work_space.py | _remove_trailing_new_line | def _remove_trailing_new_line(l):
"""Remove a single instance of new line at the end of l if it exists.
Returns:
bytestring
"""
# replace only 1 instance of newline
# match longest line first (hence the reverse=True), we want to match "\r\n" rather than "\n" if we can
for n in sorted(new_lines_bytes, key=lambda x: len(x), reverse=True):
if l.endswith(n):
remove_new_line = slice(None, -len(n))
return l[remove_new_line]
return l | python | def _remove_trailing_new_line(l):
"""Remove a single instance of new line at the end of l if it exists.
Returns:
bytestring
"""
# replace only 1 instance of newline
# match longest line first (hence the reverse=True), we want to match "\r\n" rather than "\n" if we can
for n in sorted(new_lines_bytes, key=lambda x: len(x), reverse=True):
if l.endswith(n):
remove_new_line = slice(None, -len(n))
return l[remove_new_line]
return l | [
"def",
"_remove_trailing_new_line",
"(",
"l",
")",
":",
"# replace only 1 instance of newline",
"# match longest line first (hence the reverse=True), we want to match \"\\r\\n\" rather than \"\\n\" if we can",
"for",
"n",
"in",
"sorted",
"(",
"new_lines_bytes",
",",
"key",
"=",
"lambda",
"x",
":",
"len",
"(",
"x",
")",
",",
"reverse",
"=",
"True",
")",
":",
"if",
"l",
".",
"endswith",
"(",
"n",
")",
":",
"remove_new_line",
"=",
"slice",
"(",
"None",
",",
"-",
"len",
"(",
"n",
")",
")",
"return",
"l",
"[",
"remove_new_line",
"]",
"return",
"l"
] | Remove a single instance of new line at the end of l if it exists.
Returns:
bytestring | [
"Remove",
"a",
"single",
"instance",
"of",
"new",
"line",
"at",
"the",
"end",
"of",
"l",
"if",
"it",
"exists",
"."
] | e56443095b58aae309fbc43a0943eba867dc8500 | https://github.com/RobinNil/file_read_backwards/blob/e56443095b58aae309fbc43a0943eba867dc8500/file_read_backwards/buffer_work_space.py#L146-L158 | train | 251,307 |
RobinNil/file_read_backwards | file_read_backwards/buffer_work_space.py | _find_furthest_new_line | def _find_furthest_new_line(read_buffer):
"""Return -1 if read_buffer does not contain new line otherwise the position of the rightmost newline.
Args:
read_buffer (bytestring)
Returns:
int: The right most position of new line character in read_buffer if found, else -1
"""
new_line_positions = [read_buffer.rfind(n) for n in new_lines_bytes]
return max(new_line_positions) | python | def _find_furthest_new_line(read_buffer):
"""Return -1 if read_buffer does not contain new line otherwise the position of the rightmost newline.
Args:
read_buffer (bytestring)
Returns:
int: The right most position of new line character in read_buffer if found, else -1
"""
new_line_positions = [read_buffer.rfind(n) for n in new_lines_bytes]
return max(new_line_positions) | [
"def",
"_find_furthest_new_line",
"(",
"read_buffer",
")",
":",
"new_line_positions",
"=",
"[",
"read_buffer",
".",
"rfind",
"(",
"n",
")",
"for",
"n",
"in",
"new_lines_bytes",
"]",
"return",
"max",
"(",
"new_line_positions",
")"
] | Return -1 if read_buffer does not contain new line otherwise the position of the rightmost newline.
Args:
read_buffer (bytestring)
Returns:
int: The right most position of new line character in read_buffer if found, else -1 | [
"Return",
"-",
"1",
"if",
"read_buffer",
"does",
"not",
"contain",
"new",
"line",
"otherwise",
"the",
"position",
"of",
"the",
"rightmost",
"newline",
"."
] | e56443095b58aae309fbc43a0943eba867dc8500 | https://github.com/RobinNil/file_read_backwards/blob/e56443095b58aae309fbc43a0943eba867dc8500/file_read_backwards/buffer_work_space.py#L161-L171 | train | 251,308 |
RobinNil/file_read_backwards | file_read_backwards/buffer_work_space.py | BufferWorkSpace.add_to_buffer | def add_to_buffer(self, content, read_position):
"""Add additional bytes content as read from the read_position.
Args:
content (bytes): data to be added to buffer working BufferWorkSpac.
read_position (int): where in the file pointer the data was read from.
"""
self.read_position = read_position
if self.read_buffer is None:
self.read_buffer = content
else:
self.read_buffer = content + self.read_buffer | python | def add_to_buffer(self, content, read_position):
"""Add additional bytes content as read from the read_position.
Args:
content (bytes): data to be added to buffer working BufferWorkSpac.
read_position (int): where in the file pointer the data was read from.
"""
self.read_position = read_position
if self.read_buffer is None:
self.read_buffer = content
else:
self.read_buffer = content + self.read_buffer | [
"def",
"add_to_buffer",
"(",
"self",
",",
"content",
",",
"read_position",
")",
":",
"self",
".",
"read_position",
"=",
"read_position",
"if",
"self",
".",
"read_buffer",
"is",
"None",
":",
"self",
".",
"read_buffer",
"=",
"content",
"else",
":",
"self",
".",
"read_buffer",
"=",
"content",
"+",
"self",
".",
"read_buffer"
] | Add additional bytes content as read from the read_position.
Args:
content (bytes): data to be added to buffer working BufferWorkSpac.
read_position (int): where in the file pointer the data was read from. | [
"Add",
"additional",
"bytes",
"content",
"as",
"read",
"from",
"the",
"read_position",
"."
] | e56443095b58aae309fbc43a0943eba867dc8500 | https://github.com/RobinNil/file_read_backwards/blob/e56443095b58aae309fbc43a0943eba867dc8500/file_read_backwards/buffer_work_space.py#L29-L40 | train | 251,309 |
RobinNil/file_read_backwards | file_read_backwards/buffer_work_space.py | BufferWorkSpace.yieldable | def yieldable(self):
"""Return True if there is a line that the buffer can return, False otherwise."""
if self.read_buffer is None:
return False
t = _remove_trailing_new_line(self.read_buffer)
n = _find_furthest_new_line(t)
if n >= 0:
return True
# we have read in entire file and have some unprocessed lines
if self.read_position == 0 and self.read_buffer is not None:
return True
return False | python | def yieldable(self):
"""Return True if there is a line that the buffer can return, False otherwise."""
if self.read_buffer is None:
return False
t = _remove_trailing_new_line(self.read_buffer)
n = _find_furthest_new_line(t)
if n >= 0:
return True
# we have read in entire file and have some unprocessed lines
if self.read_position == 0 and self.read_buffer is not None:
return True
return False | [
"def",
"yieldable",
"(",
"self",
")",
":",
"if",
"self",
".",
"read_buffer",
"is",
"None",
":",
"return",
"False",
"t",
"=",
"_remove_trailing_new_line",
"(",
"self",
".",
"read_buffer",
")",
"n",
"=",
"_find_furthest_new_line",
"(",
"t",
")",
"if",
"n",
">=",
"0",
":",
"return",
"True",
"# we have read in entire file and have some unprocessed lines",
"if",
"self",
".",
"read_position",
"==",
"0",
"and",
"self",
".",
"read_buffer",
"is",
"not",
"None",
":",
"return",
"True",
"return",
"False"
] | Return True if there is a line that the buffer can return, False otherwise. | [
"Return",
"True",
"if",
"there",
"is",
"a",
"line",
"that",
"the",
"buffer",
"can",
"return",
"False",
"otherwise",
"."
] | e56443095b58aae309fbc43a0943eba867dc8500 | https://github.com/RobinNil/file_read_backwards/blob/e56443095b58aae309fbc43a0943eba867dc8500/file_read_backwards/buffer_work_space.py#L42-L55 | train | 251,310 |
RobinNil/file_read_backwards | file_read_backwards/buffer_work_space.py | BufferWorkSpace.return_line | def return_line(self):
"""Return a new line if it is available.
Precondition: self.yieldable() must be True
"""
assert(self.yieldable())
t = _remove_trailing_new_line(self.read_buffer)
i = _find_furthest_new_line(t)
if i >= 0:
l = i + 1
after_new_line = slice(l, None)
up_to_include_new_line = slice(0, l)
r = t[after_new_line]
self.read_buffer = t[up_to_include_new_line]
else: # the case where we have read in entire file and at the "last" line
r = t
self.read_buffer = None
return r | python | def return_line(self):
"""Return a new line if it is available.
Precondition: self.yieldable() must be True
"""
assert(self.yieldable())
t = _remove_trailing_new_line(self.read_buffer)
i = _find_furthest_new_line(t)
if i >= 0:
l = i + 1
after_new_line = slice(l, None)
up_to_include_new_line = slice(0, l)
r = t[after_new_line]
self.read_buffer = t[up_to_include_new_line]
else: # the case where we have read in entire file and at the "last" line
r = t
self.read_buffer = None
return r | [
"def",
"return_line",
"(",
"self",
")",
":",
"assert",
"(",
"self",
".",
"yieldable",
"(",
")",
")",
"t",
"=",
"_remove_trailing_new_line",
"(",
"self",
".",
"read_buffer",
")",
"i",
"=",
"_find_furthest_new_line",
"(",
"t",
")",
"if",
"i",
">=",
"0",
":",
"l",
"=",
"i",
"+",
"1",
"after_new_line",
"=",
"slice",
"(",
"l",
",",
"None",
")",
"up_to_include_new_line",
"=",
"slice",
"(",
"0",
",",
"l",
")",
"r",
"=",
"t",
"[",
"after_new_line",
"]",
"self",
".",
"read_buffer",
"=",
"t",
"[",
"up_to_include_new_line",
"]",
"else",
":",
"# the case where we have read in entire file and at the \"last\" line",
"r",
"=",
"t",
"self",
".",
"read_buffer",
"=",
"None",
"return",
"r"
] | Return a new line if it is available.
Precondition: self.yieldable() must be True | [
"Return",
"a",
"new",
"line",
"if",
"it",
"is",
"available",
"."
] | e56443095b58aae309fbc43a0943eba867dc8500 | https://github.com/RobinNil/file_read_backwards/blob/e56443095b58aae309fbc43a0943eba867dc8500/file_read_backwards/buffer_work_space.py#L57-L76 | train | 251,311 |
RobinNil/file_read_backwards | file_read_backwards/buffer_work_space.py | BufferWorkSpace.read_until_yieldable | def read_until_yieldable(self):
"""Read in additional chunks until it is yieldable."""
while not self.yieldable():
read_content, read_position = _get_next_chunk(self.fp, self.read_position, self.chunk_size)
self.add_to_buffer(read_content, read_position) | python | def read_until_yieldable(self):
"""Read in additional chunks until it is yieldable."""
while not self.yieldable():
read_content, read_position = _get_next_chunk(self.fp, self.read_position, self.chunk_size)
self.add_to_buffer(read_content, read_position) | [
"def",
"read_until_yieldable",
"(",
"self",
")",
":",
"while",
"not",
"self",
".",
"yieldable",
"(",
")",
":",
"read_content",
",",
"read_position",
"=",
"_get_next_chunk",
"(",
"self",
".",
"fp",
",",
"self",
".",
"read_position",
",",
"self",
".",
"chunk_size",
")",
"self",
".",
"add_to_buffer",
"(",
"read_content",
",",
"read_position",
")"
] | Read in additional chunks until it is yieldable. | [
"Read",
"in",
"additional",
"chunks",
"until",
"it",
"is",
"yieldable",
"."
] | e56443095b58aae309fbc43a0943eba867dc8500 | https://github.com/RobinNil/file_read_backwards/blob/e56443095b58aae309fbc43a0943eba867dc8500/file_read_backwards/buffer_work_space.py#L78-L82 | train | 251,312 |
RobinNil/file_read_backwards | file_read_backwards/file_read_backwards.py | FileReadBackwardsIterator.next | def next(self):
"""Returns unicode string from the last line until the beginning of file.
Gets exhausted if::
* already reached the beginning of the file on previous iteration
* the file got closed
When it gets exhausted, it closes the file handler.
"""
# Using binary mode, because some encodings such as "utf-8" use variable number of
# bytes to encode different Unicode points.
# Without using binary mode, we would probably need to understand each encoding more
# and do the seek operations to find the proper boundary before issuing read
if self.closed:
raise StopIteration
if self.__buf.has_returned_every_line():
self.close()
raise StopIteration
self.__buf.read_until_yieldable()
r = self.__buf.return_line()
return r.decode(self.encoding) | python | def next(self):
"""Returns unicode string from the last line until the beginning of file.
Gets exhausted if::
* already reached the beginning of the file on previous iteration
* the file got closed
When it gets exhausted, it closes the file handler.
"""
# Using binary mode, because some encodings such as "utf-8" use variable number of
# bytes to encode different Unicode points.
# Without using binary mode, we would probably need to understand each encoding more
# and do the seek operations to find the proper boundary before issuing read
if self.closed:
raise StopIteration
if self.__buf.has_returned_every_line():
self.close()
raise StopIteration
self.__buf.read_until_yieldable()
r = self.__buf.return_line()
return r.decode(self.encoding) | [
"def",
"next",
"(",
"self",
")",
":",
"# Using binary mode, because some encodings such as \"utf-8\" use variable number of",
"# bytes to encode different Unicode points.",
"# Without using binary mode, we would probably need to understand each encoding more",
"# and do the seek operations to find the proper boundary before issuing read",
"if",
"self",
".",
"closed",
":",
"raise",
"StopIteration",
"if",
"self",
".",
"__buf",
".",
"has_returned_every_line",
"(",
")",
":",
"self",
".",
"close",
"(",
")",
"raise",
"StopIteration",
"self",
".",
"__buf",
".",
"read_until_yieldable",
"(",
")",
"r",
"=",
"self",
".",
"__buf",
".",
"return_line",
"(",
")",
"return",
"r",
".",
"decode",
"(",
"self",
".",
"encoding",
")"
] | Returns unicode string from the last line until the beginning of file.
Gets exhausted if::
* already reached the beginning of the file on previous iteration
* the file got closed
When it gets exhausted, it closes the file handler. | [
"Returns",
"unicode",
"string",
"from",
"the",
"last",
"line",
"until",
"the",
"beginning",
"of",
"file",
"."
] | e56443095b58aae309fbc43a0943eba867dc8500 | https://github.com/RobinNil/file_read_backwards/blob/e56443095b58aae309fbc43a0943eba867dc8500/file_read_backwards/file_read_backwards.py#L91-L112 | train | 251,313 |
hendrix/hendrix | examples/django_hx_chatserver/example_app/chat/views.py | home | def home(request, chat_channel_name=None):
"""
if we have a chat_channel_name kwarg,
have the response include that channel name
so the javascript knows to subscribe to that
channel...
"""
if not chat_channel_name:
chat_channel_name = 'homepage'
context = {
'address': chat_channel_name,
'history': [],
}
if ChatMessage.objects.filter(channel=chat_channel_name).exists():
context['history'] = ChatMessage.objects.filter(
channel=chat_channel_name)
# TODO add https
websocket_prefix = "ws"
websocket_port = 9000
context['websocket_prefix'] = websocket_prefix
context['websocket_port'] = websocket_port
return render(request, 'chat.html', context) | python | def home(request, chat_channel_name=None):
"""
if we have a chat_channel_name kwarg,
have the response include that channel name
so the javascript knows to subscribe to that
channel...
"""
if not chat_channel_name:
chat_channel_name = 'homepage'
context = {
'address': chat_channel_name,
'history': [],
}
if ChatMessage.objects.filter(channel=chat_channel_name).exists():
context['history'] = ChatMessage.objects.filter(
channel=chat_channel_name)
# TODO add https
websocket_prefix = "ws"
websocket_port = 9000
context['websocket_prefix'] = websocket_prefix
context['websocket_port'] = websocket_port
return render(request, 'chat.html', context) | [
"def",
"home",
"(",
"request",
",",
"chat_channel_name",
"=",
"None",
")",
":",
"if",
"not",
"chat_channel_name",
":",
"chat_channel_name",
"=",
"'homepage'",
"context",
"=",
"{",
"'address'",
":",
"chat_channel_name",
",",
"'history'",
":",
"[",
"]",
",",
"}",
"if",
"ChatMessage",
".",
"objects",
".",
"filter",
"(",
"channel",
"=",
"chat_channel_name",
")",
".",
"exists",
"(",
")",
":",
"context",
"[",
"'history'",
"]",
"=",
"ChatMessage",
".",
"objects",
".",
"filter",
"(",
"channel",
"=",
"chat_channel_name",
")",
"# TODO add https ",
"websocket_prefix",
"=",
"\"ws\"",
"websocket_port",
"=",
"9000",
"context",
"[",
"'websocket_prefix'",
"]",
"=",
"websocket_prefix",
"context",
"[",
"'websocket_port'",
"]",
"=",
"websocket_port",
"return",
"render",
"(",
"request",
",",
"'chat.html'",
",",
"context",
")"
] | if we have a chat_channel_name kwarg,
have the response include that channel name
so the javascript knows to subscribe to that
channel... | [
"if",
"we",
"have",
"a",
"chat_channel_name",
"kwarg",
"have",
"the",
"response",
"include",
"that",
"channel",
"name",
"so",
"the",
"javascript",
"knows",
"to",
"subscribe",
"to",
"that",
"channel",
"..."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/examples/django_hx_chatserver/example_app/chat/views.py#L9-L37 | train | 251,314 |
hendrix/hendrix | hendrix/ux.py | hendrixLauncher | def hendrixLauncher(action, options, with_tiempo=False):
"""
Decides which version of HendrixDeploy to use and then
launches it.
"""
if options['key'] and options['cert'] and options['cache']:
from hendrix.deploy import hybrid
HendrixDeploy = hybrid.HendrixDeployHybrid
elif options['key'] and options['cert']:
from hendrix.deploy import tls
HendrixDeploy = tls.HendrixDeployTLS
elif options['cache']:
HendrixDeploy = cache.HendrixDeployCache
else:
HendrixDeploy = base.HendrixDeploy
if with_tiempo:
deploy = HendrixDeploy(action='start', options=options)
deploy.run()
else:
deploy = HendrixDeploy(action, options)
deploy.run() | python | def hendrixLauncher(action, options, with_tiempo=False):
"""
Decides which version of HendrixDeploy to use and then
launches it.
"""
if options['key'] and options['cert'] and options['cache']:
from hendrix.deploy import hybrid
HendrixDeploy = hybrid.HendrixDeployHybrid
elif options['key'] and options['cert']:
from hendrix.deploy import tls
HendrixDeploy = tls.HendrixDeployTLS
elif options['cache']:
HendrixDeploy = cache.HendrixDeployCache
else:
HendrixDeploy = base.HendrixDeploy
if with_tiempo:
deploy = HendrixDeploy(action='start', options=options)
deploy.run()
else:
deploy = HendrixDeploy(action, options)
deploy.run() | [
"def",
"hendrixLauncher",
"(",
"action",
",",
"options",
",",
"with_tiempo",
"=",
"False",
")",
":",
"if",
"options",
"[",
"'key'",
"]",
"and",
"options",
"[",
"'cert'",
"]",
"and",
"options",
"[",
"'cache'",
"]",
":",
"from",
"hendrix",
".",
"deploy",
"import",
"hybrid",
"HendrixDeploy",
"=",
"hybrid",
".",
"HendrixDeployHybrid",
"elif",
"options",
"[",
"'key'",
"]",
"and",
"options",
"[",
"'cert'",
"]",
":",
"from",
"hendrix",
".",
"deploy",
"import",
"tls",
"HendrixDeploy",
"=",
"tls",
".",
"HendrixDeployTLS",
"elif",
"options",
"[",
"'cache'",
"]",
":",
"HendrixDeploy",
"=",
"cache",
".",
"HendrixDeployCache",
"else",
":",
"HendrixDeploy",
"=",
"base",
".",
"HendrixDeploy",
"if",
"with_tiempo",
":",
"deploy",
"=",
"HendrixDeploy",
"(",
"action",
"=",
"'start'",
",",
"options",
"=",
"options",
")",
"deploy",
".",
"run",
"(",
")",
"else",
":",
"deploy",
"=",
"HendrixDeploy",
"(",
"action",
",",
"options",
")",
"deploy",
".",
"run",
"(",
")"
] | Decides which version of HendrixDeploy to use and then
launches it. | [
"Decides",
"which",
"version",
"of",
"HendrixDeploy",
"to",
"use",
"and",
"then",
"launches",
"it",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/ux.py#L67-L87 | train | 251,315 |
hendrix/hendrix | hendrix/ux.py | logReload | def logReload(options):
"""
encompasses all the logic for reloading observer.
"""
event_handler = Reload(options)
observer = Observer()
observer.schedule(event_handler, path='.', recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
pid = os.getpid()
chalk.eraser()
chalk.green('\nHendrix successfully closed.')
os.kill(pid, 15)
observer.join()
exit('\n') | python | def logReload(options):
"""
encompasses all the logic for reloading observer.
"""
event_handler = Reload(options)
observer = Observer()
observer.schedule(event_handler, path='.', recursive=True)
observer.start()
try:
while True:
time.sleep(1)
except KeyboardInterrupt:
observer.stop()
pid = os.getpid()
chalk.eraser()
chalk.green('\nHendrix successfully closed.')
os.kill(pid, 15)
observer.join()
exit('\n') | [
"def",
"logReload",
"(",
"options",
")",
":",
"event_handler",
"=",
"Reload",
"(",
"options",
")",
"observer",
"=",
"Observer",
"(",
")",
"observer",
".",
"schedule",
"(",
"event_handler",
",",
"path",
"=",
"'.'",
",",
"recursive",
"=",
"True",
")",
"observer",
".",
"start",
"(",
")",
"try",
":",
"while",
"True",
":",
"time",
".",
"sleep",
"(",
"1",
")",
"except",
"KeyboardInterrupt",
":",
"observer",
".",
"stop",
"(",
")",
"pid",
"=",
"os",
".",
"getpid",
"(",
")",
"chalk",
".",
"eraser",
"(",
")",
"chalk",
".",
"green",
"(",
"'\\nHendrix successfully closed.'",
")",
"os",
".",
"kill",
"(",
"pid",
",",
"15",
")",
"observer",
".",
"join",
"(",
")",
"exit",
"(",
"'\\n'",
")"
] | encompasses all the logic for reloading observer. | [
"encompasses",
"all",
"the",
"logic",
"for",
"reloading",
"observer",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/ux.py#L100-L118 | train | 251,316 |
hendrix/hendrix | hendrix/ux.py | launch | def launch(*args, **options):
"""
launch acts on the user specified action and options by executing
Hedrix.run
"""
action = args[0]
if options['reload']:
logReload(options)
else:
assignDeploymentInstance(action, options) | python | def launch(*args, **options):
"""
launch acts on the user specified action and options by executing
Hedrix.run
"""
action = args[0]
if options['reload']:
logReload(options)
else:
assignDeploymentInstance(action, options) | [
"def",
"launch",
"(",
"*",
"args",
",",
"*",
"*",
"options",
")",
":",
"action",
"=",
"args",
"[",
"0",
"]",
"if",
"options",
"[",
"'reload'",
"]",
":",
"logReload",
"(",
"options",
")",
"else",
":",
"assignDeploymentInstance",
"(",
"action",
",",
"options",
")"
] | launch acts on the user specified action and options by executing
Hedrix.run | [
"launch",
"acts",
"on",
"the",
"user",
"specified",
"action",
"and",
"options",
"by",
"executing",
"Hedrix",
".",
"run"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/ux.py#L121-L130 | train | 251,317 |
hendrix/hendrix | hendrix/ux.py | findSettingsModule | def findSettingsModule():
"Find the settings module dot path within django's manage.py file"
try:
with open('manage.py', 'r') as manage:
manage_contents = manage.read()
search = re.search(
r"([\"\'](?P<module>[a-z\.]+)[\"\'])", manage_contents
)
if search: # django version < 1.7
settings_mod = search.group("module")
else:
# in 1.7, manage.py settings declaration looks like:
# os.environ.setdefault(
# "DJANGO_SETTINGS_MODULE", "example_app.settings"
# )
search = re.search(
"\".*?\"(,\\s)??\"(?P<module>.*?)\"\\)$",
manage_contents, re.I | re.S | re.M
)
settings_mod = search.group("module")
os.environ.setdefault('DJANGO_SETTINGS_MODULE', settings_mod)
except IOError as e:
msg = (
str(e) + '\nPlease ensure that you are in the same directory '
'as django\'s "manage.py" file.'
)
raise IOError(chalk.red(msg), None, sys.exc_info()[2])
except AttributeError:
settings_mod = ''
return settings_mod | python | def findSettingsModule():
"Find the settings module dot path within django's manage.py file"
try:
with open('manage.py', 'r') as manage:
manage_contents = manage.read()
search = re.search(
r"([\"\'](?P<module>[a-z\.]+)[\"\'])", manage_contents
)
if search: # django version < 1.7
settings_mod = search.group("module")
else:
# in 1.7, manage.py settings declaration looks like:
# os.environ.setdefault(
# "DJANGO_SETTINGS_MODULE", "example_app.settings"
# )
search = re.search(
"\".*?\"(,\\s)??\"(?P<module>.*?)\"\\)$",
manage_contents, re.I | re.S | re.M
)
settings_mod = search.group("module")
os.environ.setdefault('DJANGO_SETTINGS_MODULE', settings_mod)
except IOError as e:
msg = (
str(e) + '\nPlease ensure that you are in the same directory '
'as django\'s "manage.py" file.'
)
raise IOError(chalk.red(msg), None, sys.exc_info()[2])
except AttributeError:
settings_mod = ''
return settings_mod | [
"def",
"findSettingsModule",
"(",
")",
":",
"try",
":",
"with",
"open",
"(",
"'manage.py'",
",",
"'r'",
")",
"as",
"manage",
":",
"manage_contents",
"=",
"manage",
".",
"read",
"(",
")",
"search",
"=",
"re",
".",
"search",
"(",
"r\"([\\\"\\'](?P<module>[a-z\\.]+)[\\\"\\'])\"",
",",
"manage_contents",
")",
"if",
"search",
":",
"# django version < 1.7",
"settings_mod",
"=",
"search",
".",
"group",
"(",
"\"module\"",
")",
"else",
":",
"# in 1.7, manage.py settings declaration looks like:",
"# os.environ.setdefault(",
"# \"DJANGO_SETTINGS_MODULE\", \"example_app.settings\"",
"# )",
"search",
"=",
"re",
".",
"search",
"(",
"\"\\\".*?\\\"(,\\\\s)??\\\"(?P<module>.*?)\\\"\\\\)$\"",
",",
"manage_contents",
",",
"re",
".",
"I",
"|",
"re",
".",
"S",
"|",
"re",
".",
"M",
")",
"settings_mod",
"=",
"search",
".",
"group",
"(",
"\"module\"",
")",
"os",
".",
"environ",
".",
"setdefault",
"(",
"'DJANGO_SETTINGS_MODULE'",
",",
"settings_mod",
")",
"except",
"IOError",
"as",
"e",
":",
"msg",
"=",
"(",
"str",
"(",
"e",
")",
"+",
"'\\nPlease ensure that you are in the same directory '",
"'as django\\'s \"manage.py\" file.'",
")",
"raise",
"IOError",
"(",
"chalk",
".",
"red",
"(",
"msg",
")",
",",
"None",
",",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
")",
"except",
"AttributeError",
":",
"settings_mod",
"=",
"''",
"return",
"settings_mod"
] | Find the settings module dot path within django's manage.py file | [
"Find",
"the",
"settings",
"module",
"dot",
"path",
"within",
"django",
"s",
"manage",
".",
"py",
"file"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/ux.py#L133-L165 | train | 251,318 |
hendrix/hendrix | hendrix/ux.py | subprocessLaunch | def subprocessLaunch():
"""
This function is called by the hxw script.
It takes no arguments, and returns an instance of HendrixDeploy
"""
if not redis_available:
raise RedisException("can't launch this subprocess without tiempo/redis.")
try:
action = 'start'
options = REDIS.get('worker_args')
assignDeploymentInstance(action='start', options=options)
except Exception:
chalk.red('\n Encountered an unhandled exception while trying to %s hendrix.\n' % action, pipe=chalk.stderr)
raise | python | def subprocessLaunch():
"""
This function is called by the hxw script.
It takes no arguments, and returns an instance of HendrixDeploy
"""
if not redis_available:
raise RedisException("can't launch this subprocess without tiempo/redis.")
try:
action = 'start'
options = REDIS.get('worker_args')
assignDeploymentInstance(action='start', options=options)
except Exception:
chalk.red('\n Encountered an unhandled exception while trying to %s hendrix.\n' % action, pipe=chalk.stderr)
raise | [
"def",
"subprocessLaunch",
"(",
")",
":",
"if",
"not",
"redis_available",
":",
"raise",
"RedisException",
"(",
"\"can't launch this subprocess without tiempo/redis.\"",
")",
"try",
":",
"action",
"=",
"'start'",
"options",
"=",
"REDIS",
".",
"get",
"(",
"'worker_args'",
")",
"assignDeploymentInstance",
"(",
"action",
"=",
"'start'",
",",
"options",
"=",
"options",
")",
"except",
"Exception",
":",
"chalk",
".",
"red",
"(",
"'\\n Encountered an unhandled exception while trying to %s hendrix.\\n'",
"%",
"action",
",",
"pipe",
"=",
"chalk",
".",
"stderr",
")",
"raise"
] | This function is called by the hxw script.
It takes no arguments, and returns an instance of HendrixDeploy | [
"This",
"function",
"is",
"called",
"by",
"the",
"hxw",
"script",
".",
"It",
"takes",
"no",
"arguments",
"and",
"returns",
"an",
"instance",
"of",
"HendrixDeploy"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/ux.py#L227-L240 | train | 251,319 |
hendrix/hendrix | hendrix/ux.py | main | def main(args=None):
"The function to execute when running hx"
if args is None:
args = sys.argv[1:]
options, args = HendrixOptionParser.parse_args(args)
options = vars(options)
try:
action = args[0]
except IndexError:
HendrixOptionParser.print_help()
return
exposeProject(options)
options = djangoVsWsgi(options)
options = devFriendly(options)
redirect = noiseControl(options)
try:
launch(*args, **options)
except Exception:
chalk.red('\n Encountered an unhandled exception while trying to %s hendrix.\n' % action, pipe=chalk.stderr)
raise | python | def main(args=None):
"The function to execute when running hx"
if args is None:
args = sys.argv[1:]
options, args = HendrixOptionParser.parse_args(args)
options = vars(options)
try:
action = args[0]
except IndexError:
HendrixOptionParser.print_help()
return
exposeProject(options)
options = djangoVsWsgi(options)
options = devFriendly(options)
redirect = noiseControl(options)
try:
launch(*args, **options)
except Exception:
chalk.red('\n Encountered an unhandled exception while trying to %s hendrix.\n' % action, pipe=chalk.stderr)
raise | [
"def",
"main",
"(",
"args",
"=",
"None",
")",
":",
"if",
"args",
"is",
"None",
":",
"args",
"=",
"sys",
".",
"argv",
"[",
"1",
":",
"]",
"options",
",",
"args",
"=",
"HendrixOptionParser",
".",
"parse_args",
"(",
"args",
")",
"options",
"=",
"vars",
"(",
"options",
")",
"try",
":",
"action",
"=",
"args",
"[",
"0",
"]",
"except",
"IndexError",
":",
"HendrixOptionParser",
".",
"print_help",
"(",
")",
"return",
"exposeProject",
"(",
"options",
")",
"options",
"=",
"djangoVsWsgi",
"(",
"options",
")",
"options",
"=",
"devFriendly",
"(",
"options",
")",
"redirect",
"=",
"noiseControl",
"(",
"options",
")",
"try",
":",
"launch",
"(",
"*",
"args",
",",
"*",
"*",
"options",
")",
"except",
"Exception",
":",
"chalk",
".",
"red",
"(",
"'\\n Encountered an unhandled exception while trying to %s hendrix.\\n'",
"%",
"action",
",",
"pipe",
"=",
"chalk",
".",
"stderr",
")",
"raise"
] | The function to execute when running hx | [
"The",
"function",
"to",
"execute",
"when",
"running",
"hx"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/ux.py#L243-L268 | train | 251,320 |
hendrix/hendrix | hendrix/contrib/cache/resource.py | CacheClient.handleHeader | def handleHeader(self, key, value):
"extends handleHeader to save headers to a local response object"
key_lower = key.lower()
if key_lower == 'location':
value = self.modLocationPort(value)
self._response.headers[key_lower] = value
if key_lower != 'cache-control':
# This causes us to not pass on the 'cache-control' parameter
# to the browser
# TODO: we should have a means of giving the user the option to
# configure how they want to manage browser-side cache control
proxy.ProxyClient.handleHeader(self, key, value) | python | def handleHeader(self, key, value):
"extends handleHeader to save headers to a local response object"
key_lower = key.lower()
if key_lower == 'location':
value = self.modLocationPort(value)
self._response.headers[key_lower] = value
if key_lower != 'cache-control':
# This causes us to not pass on the 'cache-control' parameter
# to the browser
# TODO: we should have a means of giving the user the option to
# configure how they want to manage browser-side cache control
proxy.ProxyClient.handleHeader(self, key, value) | [
"def",
"handleHeader",
"(",
"self",
",",
"key",
",",
"value",
")",
":",
"key_lower",
"=",
"key",
".",
"lower",
"(",
")",
"if",
"key_lower",
"==",
"'location'",
":",
"value",
"=",
"self",
".",
"modLocationPort",
"(",
"value",
")",
"self",
".",
"_response",
".",
"headers",
"[",
"key_lower",
"]",
"=",
"value",
"if",
"key_lower",
"!=",
"'cache-control'",
":",
"# This causes us to not pass on the 'cache-control' parameter",
"# to the browser",
"# TODO: we should have a means of giving the user the option to",
"# configure how they want to manage browser-side cache control",
"proxy",
".",
"ProxyClient",
".",
"handleHeader",
"(",
"self",
",",
"key",
",",
"value",
")"
] | extends handleHeader to save headers to a local response object | [
"extends",
"handleHeader",
"to",
"save",
"headers",
"to",
"a",
"local",
"response",
"object"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/resource.py#L38-L49 | train | 251,321 |
hendrix/hendrix | hendrix/contrib/cache/resource.py | CacheClient.handleStatus | def handleStatus(self, version, code, message):
"extends handleStatus to instantiate a local response object"
proxy.ProxyClient.handleStatus(self, version, code, message)
# client.Response is currently just a container for needed data
self._response = client.Response(version, code, message, {}, None) | python | def handleStatus(self, version, code, message):
"extends handleStatus to instantiate a local response object"
proxy.ProxyClient.handleStatus(self, version, code, message)
# client.Response is currently just a container for needed data
self._response = client.Response(version, code, message, {}, None) | [
"def",
"handleStatus",
"(",
"self",
",",
"version",
",",
"code",
",",
"message",
")",
":",
"proxy",
".",
"ProxyClient",
".",
"handleStatus",
"(",
"self",
",",
"version",
",",
"code",
",",
"message",
")",
"# client.Response is currently just a container for needed data",
"self",
".",
"_response",
"=",
"client",
".",
"Response",
"(",
"version",
",",
"code",
",",
"message",
",",
"{",
"}",
",",
"None",
")"
] | extends handleStatus to instantiate a local response object | [
"extends",
"handleStatus",
"to",
"instantiate",
"a",
"local",
"response",
"object"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/resource.py#L51-L55 | train | 251,322 |
hendrix/hendrix | hendrix/contrib/cache/resource.py | CacheClient.modLocationPort | def modLocationPort(self, location):
"""
Ensures that the location port is a the given port value
Used in `handleHeader`
"""
components = urlparse.urlparse(location)
reverse_proxy_port = self.father.getHost().port
reverse_proxy_host = self.father.getHost().host
# returns an ordered dict of urlparse.ParseResult components
_components = components._asdict()
_components['netloc'] = '%s:%d' % (
reverse_proxy_host, reverse_proxy_port
)
return urlparse.urlunparse(_components.values()) | python | def modLocationPort(self, location):
"""
Ensures that the location port is a the given port value
Used in `handleHeader`
"""
components = urlparse.urlparse(location)
reverse_proxy_port = self.father.getHost().port
reverse_proxy_host = self.father.getHost().host
# returns an ordered dict of urlparse.ParseResult components
_components = components._asdict()
_components['netloc'] = '%s:%d' % (
reverse_proxy_host, reverse_proxy_port
)
return urlparse.urlunparse(_components.values()) | [
"def",
"modLocationPort",
"(",
"self",
",",
"location",
")",
":",
"components",
"=",
"urlparse",
".",
"urlparse",
"(",
"location",
")",
"reverse_proxy_port",
"=",
"self",
".",
"father",
".",
"getHost",
"(",
")",
".",
"port",
"reverse_proxy_host",
"=",
"self",
".",
"father",
".",
"getHost",
"(",
")",
".",
"host",
"# returns an ordered dict of urlparse.ParseResult components",
"_components",
"=",
"components",
".",
"_asdict",
"(",
")",
"_components",
"[",
"'netloc'",
"]",
"=",
"'%s:%d'",
"%",
"(",
"reverse_proxy_host",
",",
"reverse_proxy_port",
")",
"return",
"urlparse",
".",
"urlunparse",
"(",
"_components",
".",
"values",
"(",
")",
")"
] | Ensures that the location port is a the given port value
Used in `handleHeader` | [
"Ensures",
"that",
"the",
"location",
"port",
"is",
"a",
"the",
"given",
"port",
"value",
"Used",
"in",
"handleHeader"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/resource.py#L57-L70 | train | 251,323 |
hendrix/hendrix | hendrix/contrib/cache/resource.py | CacheClient.handleResponsePart | def handleResponsePart(self, buffer):
"""
Sends the content to the browser and keeps a local copy of it.
buffer is just a str of the content to be shown, father is the intial
request.
"""
self.father.write(buffer)
self.buffer.write(buffer) | python | def handleResponsePart(self, buffer):
"""
Sends the content to the browser and keeps a local copy of it.
buffer is just a str of the content to be shown, father is the intial
request.
"""
self.father.write(buffer)
self.buffer.write(buffer) | [
"def",
"handleResponsePart",
"(",
"self",
",",
"buffer",
")",
":",
"self",
".",
"father",
".",
"write",
"(",
"buffer",
")",
"self",
".",
"buffer",
".",
"write",
"(",
"buffer",
")"
] | Sends the content to the browser and keeps a local copy of it.
buffer is just a str of the content to be shown, father is the intial
request. | [
"Sends",
"the",
"content",
"to",
"the",
"browser",
"and",
"keeps",
"a",
"local",
"copy",
"of",
"it",
".",
"buffer",
"is",
"just",
"a",
"str",
"of",
"the",
"content",
"to",
"be",
"shown",
"father",
"is",
"the",
"intial",
"request",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/resource.py#L92-L99 | train | 251,324 |
hendrix/hendrix | hendrix/contrib/cache/resource.py | CacheProxyResource.getChild | def getChild(self, path, request):
"""
This is necessary because the parent class would call
proxy.ReverseProxyResource instead of CacheProxyResource
"""
return CacheProxyResource(
self.host, self.port, self.path + '/' + urlquote(path, safe=""),
self.reactor
) | python | def getChild(self, path, request):
"""
This is necessary because the parent class would call
proxy.ReverseProxyResource instead of CacheProxyResource
"""
return CacheProxyResource(
self.host, self.port, self.path + '/' + urlquote(path, safe=""),
self.reactor
) | [
"def",
"getChild",
"(",
"self",
",",
"path",
",",
"request",
")",
":",
"return",
"CacheProxyResource",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
",",
"self",
".",
"path",
"+",
"'/'",
"+",
"urlquote",
"(",
"path",
",",
"safe",
"=",
"\"\"",
")",
",",
"self",
".",
"reactor",
")"
] | This is necessary because the parent class would call
proxy.ReverseProxyResource instead of CacheProxyResource | [
"This",
"is",
"necessary",
"because",
"the",
"parent",
"class",
"would",
"call",
"proxy",
".",
"ReverseProxyResource",
"instead",
"of",
"CacheProxyResource"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/resource.py#L148-L156 | train | 251,325 |
hendrix/hendrix | hendrix/contrib/cache/resource.py | CacheProxyResource.getChildWithDefault | def getChildWithDefault(self, path, request):
"""
Retrieve a static or dynamically generated child resource from me.
"""
cached_resource = self.getCachedResource(request)
if cached_resource:
reactor.callInThread(
responseInColor,
request,
'200 OK',
cached_resource,
'Cached',
'underscore'
)
return cached_resource
# original logic
if path in self.children:
return self.children[path]
return self.getChild(path, request) | python | def getChildWithDefault(self, path, request):
"""
Retrieve a static or dynamically generated child resource from me.
"""
cached_resource = self.getCachedResource(request)
if cached_resource:
reactor.callInThread(
responseInColor,
request,
'200 OK',
cached_resource,
'Cached',
'underscore'
)
return cached_resource
# original logic
if path in self.children:
return self.children[path]
return self.getChild(path, request) | [
"def",
"getChildWithDefault",
"(",
"self",
",",
"path",
",",
"request",
")",
":",
"cached_resource",
"=",
"self",
".",
"getCachedResource",
"(",
"request",
")",
"if",
"cached_resource",
":",
"reactor",
".",
"callInThread",
"(",
"responseInColor",
",",
"request",
",",
"'200 OK'",
",",
"cached_resource",
",",
"'Cached'",
",",
"'underscore'",
")",
"return",
"cached_resource",
"# original logic",
"if",
"path",
"in",
"self",
".",
"children",
":",
"return",
"self",
".",
"children",
"[",
"path",
"]",
"return",
"self",
".",
"getChild",
"(",
"path",
",",
"request",
")"
] | Retrieve a static or dynamically generated child resource from me. | [
"Retrieve",
"a",
"static",
"or",
"dynamically",
"generated",
"child",
"resource",
"from",
"me",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/resource.py#L158-L176 | train | 251,326 |
hendrix/hendrix | hendrix/contrib/cache/resource.py | CacheProxyResource.render | def render(self, request):
"""
Render a request by forwarding it to the proxied server.
"""
# set up and evaluate a connection to the target server
if self.port == 80:
host = self.host
else:
host = "%s:%d" % (self.host, self.port)
request.requestHeaders.addRawHeader('host', host)
request.content.seek(0, 0)
qs = urlparse.urlparse(request.uri)[4]
if qs:
rest = self.path + '?' + qs
else:
rest = self.path
global_self = self.getGlobalSelf()
clientFactory = self.proxyClientFactoryClass(
request.method, rest, request.clientproto,
request.getAllHeaders(), request.content.read(), request,
global_self # this is new
)
self.reactor.connectTCP(self.host, self.port, clientFactory)
return NOT_DONE_YET | python | def render(self, request):
"""
Render a request by forwarding it to the proxied server.
"""
# set up and evaluate a connection to the target server
if self.port == 80:
host = self.host
else:
host = "%s:%d" % (self.host, self.port)
request.requestHeaders.addRawHeader('host', host)
request.content.seek(0, 0)
qs = urlparse.urlparse(request.uri)[4]
if qs:
rest = self.path + '?' + qs
else:
rest = self.path
global_self = self.getGlobalSelf()
clientFactory = self.proxyClientFactoryClass(
request.method, rest, request.clientproto,
request.getAllHeaders(), request.content.read(), request,
global_self # this is new
)
self.reactor.connectTCP(self.host, self.port, clientFactory)
return NOT_DONE_YET | [
"def",
"render",
"(",
"self",
",",
"request",
")",
":",
"# set up and evaluate a connection to the target server",
"if",
"self",
".",
"port",
"==",
"80",
":",
"host",
"=",
"self",
".",
"host",
"else",
":",
"host",
"=",
"\"%s:%d\"",
"%",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
")",
"request",
".",
"requestHeaders",
".",
"addRawHeader",
"(",
"'host'",
",",
"host",
")",
"request",
".",
"content",
".",
"seek",
"(",
"0",
",",
"0",
")",
"qs",
"=",
"urlparse",
".",
"urlparse",
"(",
"request",
".",
"uri",
")",
"[",
"4",
"]",
"if",
"qs",
":",
"rest",
"=",
"self",
".",
"path",
"+",
"'?'",
"+",
"qs",
"else",
":",
"rest",
"=",
"self",
".",
"path",
"global_self",
"=",
"self",
".",
"getGlobalSelf",
"(",
")",
"clientFactory",
"=",
"self",
".",
"proxyClientFactoryClass",
"(",
"request",
".",
"method",
",",
"rest",
",",
"request",
".",
"clientproto",
",",
"request",
".",
"getAllHeaders",
"(",
")",
",",
"request",
".",
"content",
".",
"read",
"(",
")",
",",
"request",
",",
"global_self",
"# this is new",
")",
"self",
".",
"reactor",
".",
"connectTCP",
"(",
"self",
".",
"host",
",",
"self",
".",
"port",
",",
"clientFactory",
")",
"return",
"NOT_DONE_YET"
] | Render a request by forwarding it to the proxied server. | [
"Render",
"a",
"request",
"by",
"forwarding",
"it",
"to",
"the",
"proxied",
"server",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/resource.py#L178-L204 | train | 251,327 |
hendrix/hendrix | hendrix/contrib/cache/resource.py | CacheProxyResource.getGlobalSelf | def getGlobalSelf(self):
"""
This searches the reactor for the original instance of
CacheProxyResource. This is necessary because with each call of
getChild a new instance of CacheProxyResource is created.
"""
transports = self.reactor.getReaders()
for transport in transports:
try:
resource = transport.factory.resource
if isinstance(resource, self.__class__) and resource.port == self.port:
return resource
except AttributeError:
pass
return | python | def getGlobalSelf(self):
"""
This searches the reactor for the original instance of
CacheProxyResource. This is necessary because with each call of
getChild a new instance of CacheProxyResource is created.
"""
transports = self.reactor.getReaders()
for transport in transports:
try:
resource = transport.factory.resource
if isinstance(resource, self.__class__) and resource.port == self.port:
return resource
except AttributeError:
pass
return | [
"def",
"getGlobalSelf",
"(",
"self",
")",
":",
"transports",
"=",
"self",
".",
"reactor",
".",
"getReaders",
"(",
")",
"for",
"transport",
"in",
"transports",
":",
"try",
":",
"resource",
"=",
"transport",
".",
"factory",
".",
"resource",
"if",
"isinstance",
"(",
"resource",
",",
"self",
".",
"__class__",
")",
"and",
"resource",
".",
"port",
"==",
"self",
".",
"port",
":",
"return",
"resource",
"except",
"AttributeError",
":",
"pass",
"return"
] | This searches the reactor for the original instance of
CacheProxyResource. This is necessary because with each call of
getChild a new instance of CacheProxyResource is created. | [
"This",
"searches",
"the",
"reactor",
"for",
"the",
"original",
"instance",
"of",
"CacheProxyResource",
".",
"This",
"is",
"necessary",
"because",
"with",
"each",
"call",
"of",
"getChild",
"a",
"new",
"instance",
"of",
"CacheProxyResource",
"is",
"created",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/resource.py#L209-L223 | train | 251,328 |
hendrix/hendrix | hendrix/contrib/concurrency/resources.py | MessageHandlerProtocol.dataReceived | def dataReceived(self, data):
"""
Takes "data" which we assume is json encoded
If data has a subject_id attribute, we pass that to the dispatcher
as the subject_id so it will get carried through into any
return communications and be identifiable to the client
falls back to just passing the message along...
"""
try:
address = self.guid
data = json.loads(data)
threads.deferToThread(send_signal, self.dispatcher, data)
if 'hx_subscribe' in data:
return self.dispatcher.subscribe(self.transport, data)
if 'address' in data:
address = data['address']
else:
address = self.guid
self.dispatcher.send(address, data)
except Exception as e:
raise
self.dispatcher.send(
self.guid,
{'message': data, 'error': str(e)}
) | python | def dataReceived(self, data):
"""
Takes "data" which we assume is json encoded
If data has a subject_id attribute, we pass that to the dispatcher
as the subject_id so it will get carried through into any
return communications and be identifiable to the client
falls back to just passing the message along...
"""
try:
address = self.guid
data = json.loads(data)
threads.deferToThread(send_signal, self.dispatcher, data)
if 'hx_subscribe' in data:
return self.dispatcher.subscribe(self.transport, data)
if 'address' in data:
address = data['address']
else:
address = self.guid
self.dispatcher.send(address, data)
except Exception as e:
raise
self.dispatcher.send(
self.guid,
{'message': data, 'error': str(e)}
) | [
"def",
"dataReceived",
"(",
"self",
",",
"data",
")",
":",
"try",
":",
"address",
"=",
"self",
".",
"guid",
"data",
"=",
"json",
".",
"loads",
"(",
"data",
")",
"threads",
".",
"deferToThread",
"(",
"send_signal",
",",
"self",
".",
"dispatcher",
",",
"data",
")",
"if",
"'hx_subscribe'",
"in",
"data",
":",
"return",
"self",
".",
"dispatcher",
".",
"subscribe",
"(",
"self",
".",
"transport",
",",
"data",
")",
"if",
"'address'",
"in",
"data",
":",
"address",
"=",
"data",
"[",
"'address'",
"]",
"else",
":",
"address",
"=",
"self",
".",
"guid",
"self",
".",
"dispatcher",
".",
"send",
"(",
"address",
",",
"data",
")",
"except",
"Exception",
"as",
"e",
":",
"raise",
"self",
".",
"dispatcher",
".",
"send",
"(",
"self",
".",
"guid",
",",
"{",
"'message'",
":",
"data",
",",
"'error'",
":",
"str",
"(",
"e",
")",
"}",
")"
] | Takes "data" which we assume is json encoded
If data has a subject_id attribute, we pass that to the dispatcher
as the subject_id so it will get carried through into any
return communications and be identifiable to the client
falls back to just passing the message along... | [
"Takes",
"data",
"which",
"we",
"assume",
"is",
"json",
"encoded",
"If",
"data",
"has",
"a",
"subject_id",
"attribute",
"we",
"pass",
"that",
"to",
"the",
"dispatcher",
"as",
"the",
"subject_id",
"so",
"it",
"will",
"get",
"carried",
"through",
"into",
"any",
"return",
"communications",
"and",
"be",
"identifiable",
"to",
"the",
"client"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/concurrency/resources.py#L26-L57 | train | 251,329 |
hendrix/hendrix | hendrix/contrib/concurrency/resources.py | MessageHandlerProtocol.connectionMade | def connectionMade(self):
"""
establish the address of this new connection and add it to the list of
sockets managed by the dispatcher
reply to the transport with a "setup_connection" notice
containing the recipient's address for use by the client as a return
address for future communications
"""
self.transport.uid = str(uuid.uuid1())
self.guid = self.dispatcher.add(self.transport)
self.dispatcher.send(self.guid, {'setup_connection': self.guid}) | python | def connectionMade(self):
"""
establish the address of this new connection and add it to the list of
sockets managed by the dispatcher
reply to the transport with a "setup_connection" notice
containing the recipient's address for use by the client as a return
address for future communications
"""
self.transport.uid = str(uuid.uuid1())
self.guid = self.dispatcher.add(self.transport)
self.dispatcher.send(self.guid, {'setup_connection': self.guid}) | [
"def",
"connectionMade",
"(",
"self",
")",
":",
"self",
".",
"transport",
".",
"uid",
"=",
"str",
"(",
"uuid",
".",
"uuid1",
"(",
")",
")",
"self",
".",
"guid",
"=",
"self",
".",
"dispatcher",
".",
"add",
"(",
"self",
".",
"transport",
")",
"self",
".",
"dispatcher",
".",
"send",
"(",
"self",
".",
"guid",
",",
"{",
"'setup_connection'",
":",
"self",
".",
"guid",
"}",
")"
] | establish the address of this new connection and add it to the list of
sockets managed by the dispatcher
reply to the transport with a "setup_connection" notice
containing the recipient's address for use by the client as a return
address for future communications | [
"establish",
"the",
"address",
"of",
"this",
"new",
"connection",
"and",
"add",
"it",
"to",
"the",
"list",
"of",
"sockets",
"managed",
"by",
"the",
"dispatcher"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/concurrency/resources.py#L59-L71 | train | 251,330 |
hendrix/hendrix | hendrix/utils/conf.py | generateInitd | def generateInitd(conf_file):
"""
Helper function to generate the text content needed to create an init.d
executable
"""
allowed_opts = [
'virtualenv', 'project_path', 'settings', 'processes',
'http_port', 'cache', 'cache_port', 'https_port', 'key', 'cert'
]
base_opts = ['--daemonize', ] # always daemonize
options = base_opts
with open(conf_file, 'r') as cfg:
conf = yaml.load(cfg)
conf_specs = set(conf.keys())
if len(conf_specs - set(allowed_opts)):
raise RuntimeError('Improperly configured.')
try:
virtualenv = conf.pop('virtualenv')
project_path = conf.pop('project_path')
except:
raise RuntimeError('Improperly configured.')
cache = False
if 'cache' in conf:
cache = conf.pop('cache')
if not cache:
options.append('--nocache')
workers = 0
if 'processes' in conf:
processes = conf.pop('processes')
workers = int(processes) - 1
if workers > 0:
options += ['--workers', str(workers)]
for key, value in conf.iteritems():
options += ['--%s' % key, str(value)]
with open(os.path.join(SHARE_PATH, 'init.d.j2'), 'r') as f:
TEMPLATE_FILE = f.read()
template = jinja2.Template(TEMPLATE_FILE)
initd_content = template.render(
{
'venv_path': virtualenv,
'project_path': project_path,
'hendrix_opts': ' '.join(options)
}
)
return initd_content | python | def generateInitd(conf_file):
"""
Helper function to generate the text content needed to create an init.d
executable
"""
allowed_opts = [
'virtualenv', 'project_path', 'settings', 'processes',
'http_port', 'cache', 'cache_port', 'https_port', 'key', 'cert'
]
base_opts = ['--daemonize', ] # always daemonize
options = base_opts
with open(conf_file, 'r') as cfg:
conf = yaml.load(cfg)
conf_specs = set(conf.keys())
if len(conf_specs - set(allowed_opts)):
raise RuntimeError('Improperly configured.')
try:
virtualenv = conf.pop('virtualenv')
project_path = conf.pop('project_path')
except:
raise RuntimeError('Improperly configured.')
cache = False
if 'cache' in conf:
cache = conf.pop('cache')
if not cache:
options.append('--nocache')
workers = 0
if 'processes' in conf:
processes = conf.pop('processes')
workers = int(processes) - 1
if workers > 0:
options += ['--workers', str(workers)]
for key, value in conf.iteritems():
options += ['--%s' % key, str(value)]
with open(os.path.join(SHARE_PATH, 'init.d.j2'), 'r') as f:
TEMPLATE_FILE = f.read()
template = jinja2.Template(TEMPLATE_FILE)
initd_content = template.render(
{
'venv_path': virtualenv,
'project_path': project_path,
'hendrix_opts': ' '.join(options)
}
)
return initd_content | [
"def",
"generateInitd",
"(",
"conf_file",
")",
":",
"allowed_opts",
"=",
"[",
"'virtualenv'",
",",
"'project_path'",
",",
"'settings'",
",",
"'processes'",
",",
"'http_port'",
",",
"'cache'",
",",
"'cache_port'",
",",
"'https_port'",
",",
"'key'",
",",
"'cert'",
"]",
"base_opts",
"=",
"[",
"'--daemonize'",
",",
"]",
"# always daemonize",
"options",
"=",
"base_opts",
"with",
"open",
"(",
"conf_file",
",",
"'r'",
")",
"as",
"cfg",
":",
"conf",
"=",
"yaml",
".",
"load",
"(",
"cfg",
")",
"conf_specs",
"=",
"set",
"(",
"conf",
".",
"keys",
"(",
")",
")",
"if",
"len",
"(",
"conf_specs",
"-",
"set",
"(",
"allowed_opts",
")",
")",
":",
"raise",
"RuntimeError",
"(",
"'Improperly configured.'",
")",
"try",
":",
"virtualenv",
"=",
"conf",
".",
"pop",
"(",
"'virtualenv'",
")",
"project_path",
"=",
"conf",
".",
"pop",
"(",
"'project_path'",
")",
"except",
":",
"raise",
"RuntimeError",
"(",
"'Improperly configured.'",
")",
"cache",
"=",
"False",
"if",
"'cache'",
"in",
"conf",
":",
"cache",
"=",
"conf",
".",
"pop",
"(",
"'cache'",
")",
"if",
"not",
"cache",
":",
"options",
".",
"append",
"(",
"'--nocache'",
")",
"workers",
"=",
"0",
"if",
"'processes'",
"in",
"conf",
":",
"processes",
"=",
"conf",
".",
"pop",
"(",
"'processes'",
")",
"workers",
"=",
"int",
"(",
"processes",
")",
"-",
"1",
"if",
"workers",
">",
"0",
":",
"options",
"+=",
"[",
"'--workers'",
",",
"str",
"(",
"workers",
")",
"]",
"for",
"key",
",",
"value",
"in",
"conf",
".",
"iteritems",
"(",
")",
":",
"options",
"+=",
"[",
"'--%s'",
"%",
"key",
",",
"str",
"(",
"value",
")",
"]",
"with",
"open",
"(",
"os",
".",
"path",
".",
"join",
"(",
"SHARE_PATH",
",",
"'init.d.j2'",
")",
",",
"'r'",
")",
"as",
"f",
":",
"TEMPLATE_FILE",
"=",
"f",
".",
"read",
"(",
")",
"template",
"=",
"jinja2",
".",
"Template",
"(",
"TEMPLATE_FILE",
")",
"initd_content",
"=",
"template",
".",
"render",
"(",
"{",
"'venv_path'",
":",
"virtualenv",
",",
"'project_path'",
":",
"project_path",
",",
"'hendrix_opts'",
":",
"' '",
".",
"join",
"(",
"options",
")",
"}",
")",
"return",
"initd_content"
] | Helper function to generate the text content needed to create an init.d
executable | [
"Helper",
"function",
"to",
"generate",
"the",
"text",
"content",
"needed",
"to",
"create",
"an",
"init",
".",
"d",
"executable"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/utils/conf.py#L9-L61 | train | 251,331 |
hendrix/hendrix | hendrix/facilities/response.py | LoudWSGIResponse.startResponse | def startResponse(self, status, headers, excInfo=None):
"""
extends startResponse to call speakerBox in a thread
"""
self.status = status
self.headers = headers
self.reactor.callInThread(
responseInColor, self.request, status, headers
)
return self.write | python | def startResponse(self, status, headers, excInfo=None):
"""
extends startResponse to call speakerBox in a thread
"""
self.status = status
self.headers = headers
self.reactor.callInThread(
responseInColor, self.request, status, headers
)
return self.write | [
"def",
"startResponse",
"(",
"self",
",",
"status",
",",
"headers",
",",
"excInfo",
"=",
"None",
")",
":",
"self",
".",
"status",
"=",
"status",
"self",
".",
"headers",
"=",
"headers",
"self",
".",
"reactor",
".",
"callInThread",
"(",
"responseInColor",
",",
"self",
".",
"request",
",",
"status",
",",
"headers",
")",
"return",
"self",
".",
"write"
] | extends startResponse to call speakerBox in a thread | [
"extends",
"startResponse",
"to",
"call",
"speakerBox",
"in",
"a",
"thread"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/facilities/response.py#L41-L50 | train | 251,332 |
hendrix/hendrix | hendrix/contrib/cache/backends/__init__.py | CacheBackend.cacheContent | def cacheContent(self, request, response, buffer):
"""
Checks if the response should be cached.
Caches the content in a gzipped format given that a `cache_it` flag is
True
To be used CacheClient
"""
content = buffer.getvalue()
code = int(response.code)
cache_it = False
uri, bust = self.processURI(request.uri, PREFIX)
# Conditions for adding uri response to cache:
# * if it was successful i.e. status of in the 200s
# * requested using GET
# * not busted
if request.method == "GET" and code / 100 == 2 and not bust:
cache_control = response.headers.get('cache-control')
if cache_control:
params = dict(urlparse.parse_qsl(cache_control))
if int(params.get('max-age', '0')) > 0:
cache_it = True
if cache_it:
content = compressBuffer(content)
self.addResource(content, uri, response.headers)
buffer.close() | python | def cacheContent(self, request, response, buffer):
"""
Checks if the response should be cached.
Caches the content in a gzipped format given that a `cache_it` flag is
True
To be used CacheClient
"""
content = buffer.getvalue()
code = int(response.code)
cache_it = False
uri, bust = self.processURI(request.uri, PREFIX)
# Conditions for adding uri response to cache:
# * if it was successful i.e. status of in the 200s
# * requested using GET
# * not busted
if request.method == "GET" and code / 100 == 2 and not bust:
cache_control = response.headers.get('cache-control')
if cache_control:
params = dict(urlparse.parse_qsl(cache_control))
if int(params.get('max-age', '0')) > 0:
cache_it = True
if cache_it:
content = compressBuffer(content)
self.addResource(content, uri, response.headers)
buffer.close() | [
"def",
"cacheContent",
"(",
"self",
",",
"request",
",",
"response",
",",
"buffer",
")",
":",
"content",
"=",
"buffer",
".",
"getvalue",
"(",
")",
"code",
"=",
"int",
"(",
"response",
".",
"code",
")",
"cache_it",
"=",
"False",
"uri",
",",
"bust",
"=",
"self",
".",
"processURI",
"(",
"request",
".",
"uri",
",",
"PREFIX",
")",
"# Conditions for adding uri response to cache:",
"# * if it was successful i.e. status of in the 200s",
"# * requested using GET",
"# * not busted",
"if",
"request",
".",
"method",
"==",
"\"GET\"",
"and",
"code",
"/",
"100",
"==",
"2",
"and",
"not",
"bust",
":",
"cache_control",
"=",
"response",
".",
"headers",
".",
"get",
"(",
"'cache-control'",
")",
"if",
"cache_control",
":",
"params",
"=",
"dict",
"(",
"urlparse",
".",
"parse_qsl",
"(",
"cache_control",
")",
")",
"if",
"int",
"(",
"params",
".",
"get",
"(",
"'max-age'",
",",
"'0'",
")",
")",
">",
"0",
":",
"cache_it",
"=",
"True",
"if",
"cache_it",
":",
"content",
"=",
"compressBuffer",
"(",
"content",
")",
"self",
".",
"addResource",
"(",
"content",
",",
"uri",
",",
"response",
".",
"headers",
")",
"buffer",
".",
"close",
"(",
")"
] | Checks if the response should be cached.
Caches the content in a gzipped format given that a `cache_it` flag is
True
To be used CacheClient | [
"Checks",
"if",
"the",
"response",
"should",
"be",
"cached",
".",
"Caches",
"the",
"content",
"in",
"a",
"gzipped",
"format",
"given",
"that",
"a",
"cache_it",
"flag",
"is",
"True",
"To",
"be",
"used",
"CacheClient"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/backends/__init__.py#L70-L94 | train | 251,333 |
hendrix/hendrix | hendrix/facilities/gather.py | get_additional_services | def get_additional_services(settings_module):
"""
if HENDRIX_SERVICES is specified in settings_module,
it should be a list twisted internet services
example:
HENDRIX_SERVICES = (
('myServiceName', 'apps.offload.services.TimeService'),
)
"""
additional_services = []
if hasattr(settings_module, 'HENDRIX_SERVICES'):
for name, module_path in settings_module.HENDRIX_SERVICES:
path_to_module, service_name = module_path.rsplit('.', 1)
resource_module = importlib.import_module(path_to_module)
additional_services.append(
(name, getattr(resource_module, service_name))
)
return additional_services | python | def get_additional_services(settings_module):
"""
if HENDRIX_SERVICES is specified in settings_module,
it should be a list twisted internet services
example:
HENDRIX_SERVICES = (
('myServiceName', 'apps.offload.services.TimeService'),
)
"""
additional_services = []
if hasattr(settings_module, 'HENDRIX_SERVICES'):
for name, module_path in settings_module.HENDRIX_SERVICES:
path_to_module, service_name = module_path.rsplit('.', 1)
resource_module = importlib.import_module(path_to_module)
additional_services.append(
(name, getattr(resource_module, service_name))
)
return additional_services | [
"def",
"get_additional_services",
"(",
"settings_module",
")",
":",
"additional_services",
"=",
"[",
"]",
"if",
"hasattr",
"(",
"settings_module",
",",
"'HENDRIX_SERVICES'",
")",
":",
"for",
"name",
",",
"module_path",
"in",
"settings_module",
".",
"HENDRIX_SERVICES",
":",
"path_to_module",
",",
"service_name",
"=",
"module_path",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"resource_module",
"=",
"importlib",
".",
"import_module",
"(",
"path_to_module",
")",
"additional_services",
".",
"append",
"(",
"(",
"name",
",",
"getattr",
"(",
"resource_module",
",",
"service_name",
")",
")",
")",
"return",
"additional_services"
] | if HENDRIX_SERVICES is specified in settings_module,
it should be a list twisted internet services
example:
HENDRIX_SERVICES = (
('myServiceName', 'apps.offload.services.TimeService'),
) | [
"if",
"HENDRIX_SERVICES",
"is",
"specified",
"in",
"settings_module",
"it",
"should",
"be",
"a",
"list",
"twisted",
"internet",
"services"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/facilities/gather.py#L4-L25 | train | 251,334 |
hendrix/hendrix | hendrix/facilities/gather.py | get_additional_resources | def get_additional_resources(settings_module):
"""
if HENDRIX_CHILD_RESOURCES is specified in settings_module,
it should be a list resources subclassed from hendrix.contrib.NamedResource
example:
HENDRIX_CHILD_RESOURCES = (
'apps.offload.resources.LongRunningProcessResource',
'apps.chat.resources.ChatResource',
)
"""
additional_resources = []
if hasattr(settings_module, 'HENDRIX_CHILD_RESOURCES'):
for module_path in settings_module.HENDRIX_CHILD_RESOURCES:
path_to_module, resource_name = module_path.rsplit('.', 1)
resource_module = importlib.import_module(path_to_module)
additional_resources.append(
getattr(resource_module, resource_name)
)
return additional_resources | python | def get_additional_resources(settings_module):
"""
if HENDRIX_CHILD_RESOURCES is specified in settings_module,
it should be a list resources subclassed from hendrix.contrib.NamedResource
example:
HENDRIX_CHILD_RESOURCES = (
'apps.offload.resources.LongRunningProcessResource',
'apps.chat.resources.ChatResource',
)
"""
additional_resources = []
if hasattr(settings_module, 'HENDRIX_CHILD_RESOURCES'):
for module_path in settings_module.HENDRIX_CHILD_RESOURCES:
path_to_module, resource_name = module_path.rsplit('.', 1)
resource_module = importlib.import_module(path_to_module)
additional_resources.append(
getattr(resource_module, resource_name)
)
return additional_resources | [
"def",
"get_additional_resources",
"(",
"settings_module",
")",
":",
"additional_resources",
"=",
"[",
"]",
"if",
"hasattr",
"(",
"settings_module",
",",
"'HENDRIX_CHILD_RESOURCES'",
")",
":",
"for",
"module_path",
"in",
"settings_module",
".",
"HENDRIX_CHILD_RESOURCES",
":",
"path_to_module",
",",
"resource_name",
"=",
"module_path",
".",
"rsplit",
"(",
"'.'",
",",
"1",
")",
"resource_module",
"=",
"importlib",
".",
"import_module",
"(",
"path_to_module",
")",
"additional_resources",
".",
"append",
"(",
"getattr",
"(",
"resource_module",
",",
"resource_name",
")",
")",
"return",
"additional_resources"
] | if HENDRIX_CHILD_RESOURCES is specified in settings_module,
it should be a list resources subclassed from hendrix.contrib.NamedResource
example:
HENDRIX_CHILD_RESOURCES = (
'apps.offload.resources.LongRunningProcessResource',
'apps.chat.resources.ChatResource',
) | [
"if",
"HENDRIX_CHILD_RESOURCES",
"is",
"specified",
"in",
"settings_module",
"it",
"should",
"be",
"a",
"list",
"resources",
"subclassed",
"from",
"hendrix",
".",
"contrib",
".",
"NamedResource"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/facilities/gather.py#L28-L52 | train | 251,335 |
hendrix/hendrix | hendrix/deploy/base.py | HendrixDeploy.getConf | def getConf(cls, settings, options):
"updates the options dict to use config options in the settings module"
ports = ['http_port', 'https_port', 'cache_port']
for port_name in ports:
port = getattr(settings, port_name.upper(), None)
# only use the settings ports if the defaults were left unchanged
default = getattr(defaults, port_name.upper())
if port and options.get(port_name) == default:
options[port_name] = port
_opts = [
('key', 'hx_private_key'),
('cert', 'hx_certficate'),
('wsgi', 'wsgi_application')
]
for opt_name, settings_name in _opts:
opt = getattr(settings, settings_name.upper(), None)
if opt:
options[opt_name] = opt
if not options['settings']:
options['settings'] = environ['DJANGO_SETTINGS_MODULE']
return options | python | def getConf(cls, settings, options):
"updates the options dict to use config options in the settings module"
ports = ['http_port', 'https_port', 'cache_port']
for port_name in ports:
port = getattr(settings, port_name.upper(), None)
# only use the settings ports if the defaults were left unchanged
default = getattr(defaults, port_name.upper())
if port and options.get(port_name) == default:
options[port_name] = port
_opts = [
('key', 'hx_private_key'),
('cert', 'hx_certficate'),
('wsgi', 'wsgi_application')
]
for opt_name, settings_name in _opts:
opt = getattr(settings, settings_name.upper(), None)
if opt:
options[opt_name] = opt
if not options['settings']:
options['settings'] = environ['DJANGO_SETTINGS_MODULE']
return options | [
"def",
"getConf",
"(",
"cls",
",",
"settings",
",",
"options",
")",
":",
"ports",
"=",
"[",
"'http_port'",
",",
"'https_port'",
",",
"'cache_port'",
"]",
"for",
"port_name",
"in",
"ports",
":",
"port",
"=",
"getattr",
"(",
"settings",
",",
"port_name",
".",
"upper",
"(",
")",
",",
"None",
")",
"# only use the settings ports if the defaults were left unchanged",
"default",
"=",
"getattr",
"(",
"defaults",
",",
"port_name",
".",
"upper",
"(",
")",
")",
"if",
"port",
"and",
"options",
".",
"get",
"(",
"port_name",
")",
"==",
"default",
":",
"options",
"[",
"port_name",
"]",
"=",
"port",
"_opts",
"=",
"[",
"(",
"'key'",
",",
"'hx_private_key'",
")",
",",
"(",
"'cert'",
",",
"'hx_certficate'",
")",
",",
"(",
"'wsgi'",
",",
"'wsgi_application'",
")",
"]",
"for",
"opt_name",
",",
"settings_name",
"in",
"_opts",
":",
"opt",
"=",
"getattr",
"(",
"settings",
",",
"settings_name",
".",
"upper",
"(",
")",
",",
"None",
")",
"if",
"opt",
":",
"options",
"[",
"opt_name",
"]",
"=",
"opt",
"if",
"not",
"options",
"[",
"'settings'",
"]",
":",
"options",
"[",
"'settings'",
"]",
"=",
"environ",
"[",
"'DJANGO_SETTINGS_MODULE'",
"]",
"return",
"options"
] | updates the options dict to use config options in the settings module | [
"updates",
"the",
"options",
"dict",
"to",
"use",
"config",
"options",
"in",
"the",
"settings",
"module"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/deploy/base.py#L99-L121 | train | 251,336 |
hendrix/hendrix | hendrix/deploy/base.py | HendrixDeploy.addHendrix | def addHendrix(self):
'''
Instantiates a HendrixService with this object's threadpool.
It will be added as a service later.
'''
self.hendrix = HendrixService(
self.application,
threadpool=self.getThreadPool(),
resources=self.resources,
services=self.services,
loud=self.options['loud']
)
if self.options["https_only"] is not True:
self.hendrix.spawn_new_server(self.options['http_port'], HendrixTCPService) | python | def addHendrix(self):
'''
Instantiates a HendrixService with this object's threadpool.
It will be added as a service later.
'''
self.hendrix = HendrixService(
self.application,
threadpool=self.getThreadPool(),
resources=self.resources,
services=self.services,
loud=self.options['loud']
)
if self.options["https_only"] is not True:
self.hendrix.spawn_new_server(self.options['http_port'], HendrixTCPService) | [
"def",
"addHendrix",
"(",
"self",
")",
":",
"self",
".",
"hendrix",
"=",
"HendrixService",
"(",
"self",
".",
"application",
",",
"threadpool",
"=",
"self",
".",
"getThreadPool",
"(",
")",
",",
"resources",
"=",
"self",
".",
"resources",
",",
"services",
"=",
"self",
".",
"services",
",",
"loud",
"=",
"self",
".",
"options",
"[",
"'loud'",
"]",
")",
"if",
"self",
".",
"options",
"[",
"\"https_only\"",
"]",
"is",
"not",
"True",
":",
"self",
".",
"hendrix",
".",
"spawn_new_server",
"(",
"self",
".",
"options",
"[",
"'http_port'",
"]",
",",
"HendrixTCPService",
")"
] | Instantiates a HendrixService with this object's threadpool.
It will be added as a service later. | [
"Instantiates",
"a",
"HendrixService",
"with",
"this",
"object",
"s",
"threadpool",
".",
"It",
"will",
"be",
"added",
"as",
"a",
"service",
"later",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/deploy/base.py#L144-L157 | train | 251,337 |
hendrix/hendrix | hendrix/deploy/base.py | HendrixDeploy.catalogServers | def catalogServers(self, hendrix):
"collects a list of service names serving on TCP or SSL"
for service in hendrix.services:
if isinstance(service, (TCPServer, SSLServer)):
self.servers.append(service.name) | python | def catalogServers(self, hendrix):
"collects a list of service names serving on TCP or SSL"
for service in hendrix.services:
if isinstance(service, (TCPServer, SSLServer)):
self.servers.append(service.name) | [
"def",
"catalogServers",
"(",
"self",
",",
"hendrix",
")",
":",
"for",
"service",
"in",
"hendrix",
".",
"services",
":",
"if",
"isinstance",
"(",
"service",
",",
"(",
"TCPServer",
",",
"SSLServer",
")",
")",
":",
"self",
".",
"servers",
".",
"append",
"(",
"service",
".",
"name",
")"
] | collects a list of service names serving on TCP or SSL | [
"collects",
"a",
"list",
"of",
"service",
"names",
"serving",
"on",
"TCP",
"or",
"SSL"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/deploy/base.py#L159-L163 | train | 251,338 |
hendrix/hendrix | hendrix/deploy/base.py | HendrixDeploy.run | def run(self):
"sets up the desired services and runs the requested action"
self.addServices()
self.catalogServers(self.hendrix)
action = self.action
fd = self.options['fd']
if action.startswith('start'):
chalk.blue(self._listening_message())
getattr(self, action)(fd)
###########################
# annnnd run the reactor! #
###########################
try:
self.reactor.run()
finally:
shutil.rmtree(PID_DIR, ignore_errors=True) # cleanup tmp PID dir
elif action == 'restart':
getattr(self, action)(fd=fd)
else:
getattr(self, action)() | python | def run(self):
"sets up the desired services and runs the requested action"
self.addServices()
self.catalogServers(self.hendrix)
action = self.action
fd = self.options['fd']
if action.startswith('start'):
chalk.blue(self._listening_message())
getattr(self, action)(fd)
###########################
# annnnd run the reactor! #
###########################
try:
self.reactor.run()
finally:
shutil.rmtree(PID_DIR, ignore_errors=True) # cleanup tmp PID dir
elif action == 'restart':
getattr(self, action)(fd=fd)
else:
getattr(self, action)() | [
"def",
"run",
"(",
"self",
")",
":",
"self",
".",
"addServices",
"(",
")",
"self",
".",
"catalogServers",
"(",
"self",
".",
"hendrix",
")",
"action",
"=",
"self",
".",
"action",
"fd",
"=",
"self",
".",
"options",
"[",
"'fd'",
"]",
"if",
"action",
".",
"startswith",
"(",
"'start'",
")",
":",
"chalk",
".",
"blue",
"(",
"self",
".",
"_listening_message",
"(",
")",
")",
"getattr",
"(",
"self",
",",
"action",
")",
"(",
"fd",
")",
"###########################",
"# annnnd run the reactor! #",
"###########################",
"try",
":",
"self",
".",
"reactor",
".",
"run",
"(",
")",
"finally",
":",
"shutil",
".",
"rmtree",
"(",
"PID_DIR",
",",
"ignore_errors",
"=",
"True",
")",
"# cleanup tmp PID dir",
"elif",
"action",
"==",
"'restart'",
":",
"getattr",
"(",
"self",
",",
"action",
")",
"(",
"fd",
"=",
"fd",
")",
"else",
":",
"getattr",
"(",
"self",
",",
"action",
")",
"(",
")"
] | sets up the desired services and runs the requested action | [
"sets",
"up",
"the",
"desired",
"services",
"and",
"runs",
"the",
"requested",
"action"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/deploy/base.py#L169-L191 | train | 251,339 |
hendrix/hendrix | hendrix/deploy/base.py | HendrixDeploy.setFDs | def setFDs(self):
"""
Iterator for file descriptors.
Seperated from launchworkers for clarity and readability.
"""
# 0 corresponds to stdin, 1 to stdout, 2 to stderr
self.childFDs = {0: 0, 1: 1, 2: 2}
self.fds = {}
for name in self.servers:
self.port = self.hendrix.get_port(name)
fd = self.port.fileno()
self.childFDs[fd] = fd
self.fds[name] = fd | python | def setFDs(self):
"""
Iterator for file descriptors.
Seperated from launchworkers for clarity and readability.
"""
# 0 corresponds to stdin, 1 to stdout, 2 to stderr
self.childFDs = {0: 0, 1: 1, 2: 2}
self.fds = {}
for name in self.servers:
self.port = self.hendrix.get_port(name)
fd = self.port.fileno()
self.childFDs[fd] = fd
self.fds[name] = fd | [
"def",
"setFDs",
"(",
"self",
")",
":",
"# 0 corresponds to stdin, 1 to stdout, 2 to stderr",
"self",
".",
"childFDs",
"=",
"{",
"0",
":",
"0",
",",
"1",
":",
"1",
",",
"2",
":",
"2",
"}",
"self",
".",
"fds",
"=",
"{",
"}",
"for",
"name",
"in",
"self",
".",
"servers",
":",
"self",
".",
"port",
"=",
"self",
".",
"hendrix",
".",
"get_port",
"(",
"name",
")",
"fd",
"=",
"self",
".",
"port",
".",
"fileno",
"(",
")",
"self",
".",
"childFDs",
"[",
"fd",
"]",
"=",
"fd",
"self",
".",
"fds",
"[",
"name",
"]",
"=",
"fd"
] | Iterator for file descriptors.
Seperated from launchworkers for clarity and readability. | [
"Iterator",
"for",
"file",
"descriptors",
".",
"Seperated",
"from",
"launchworkers",
"for",
"clarity",
"and",
"readability",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/deploy/base.py#L240-L252 | train | 251,340 |
hendrix/hendrix | hendrix/deploy/base.py | HendrixDeploy.addSubprocess | def addSubprocess(self, fds, name, factory):
"""
Public method for _addSubprocess.
Wraps reactor.adoptStreamConnection in
a simple DeferredLock to guarantee
workers play well together.
"""
self._lock.run(self._addSubprocess, self, fds, name, factory) | python | def addSubprocess(self, fds, name, factory):
"""
Public method for _addSubprocess.
Wraps reactor.adoptStreamConnection in
a simple DeferredLock to guarantee
workers play well together.
"""
self._lock.run(self._addSubprocess, self, fds, name, factory) | [
"def",
"addSubprocess",
"(",
"self",
",",
"fds",
",",
"name",
",",
"factory",
")",
":",
"self",
".",
"_lock",
".",
"run",
"(",
"self",
".",
"_addSubprocess",
",",
"self",
",",
"fds",
",",
"name",
",",
"factory",
")"
] | Public method for _addSubprocess.
Wraps reactor.adoptStreamConnection in
a simple DeferredLock to guarantee
workers play well together. | [
"Public",
"method",
"for",
"_addSubprocess",
".",
"Wraps",
"reactor",
".",
"adoptStreamConnection",
"in",
"a",
"simple",
"DeferredLock",
"to",
"guarantee",
"workers",
"play",
"well",
"together",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/deploy/base.py#L273-L280 | train | 251,341 |
hendrix/hendrix | hendrix/deploy/base.py | HendrixDeploy.disownService | def disownService(self, name):
"""
disowns a service on hendirix by name
returns a factory for use in the adoptStreamPort part of setting up
multiple processes
"""
_service = self.hendrix.getServiceNamed(name)
_service.disownServiceParent()
return _service.factory | python | def disownService(self, name):
"""
disowns a service on hendirix by name
returns a factory for use in the adoptStreamPort part of setting up
multiple processes
"""
_service = self.hendrix.getServiceNamed(name)
_service.disownServiceParent()
return _service.factory | [
"def",
"disownService",
"(",
"self",
",",
"name",
")",
":",
"_service",
"=",
"self",
".",
"hendrix",
".",
"getServiceNamed",
"(",
"name",
")",
"_service",
".",
"disownServiceParent",
"(",
")",
"return",
"_service",
".",
"factory"
] | disowns a service on hendirix by name
returns a factory for use in the adoptStreamPort part of setting up
multiple processes | [
"disowns",
"a",
"service",
"on",
"hendirix",
"by",
"name",
"returns",
"a",
"factory",
"for",
"use",
"in",
"the",
"adoptStreamPort",
"part",
"of",
"setting",
"up",
"multiple",
"processes"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/deploy/base.py#L307-L315 | train | 251,342 |
hendrix/hendrix | hendrix/utils/__init__.py | get_pid | def get_pid(options):
"""returns The default location of the pid file for process management"""
namespace = options['settings'] if options['settings'] else options['wsgi']
return os.path.join('{}', '{}_{}.pid').format(PID_DIR, options['http_port'], namespace.replace('.', '_')) | python | def get_pid(options):
"""returns The default location of the pid file for process management"""
namespace = options['settings'] if options['settings'] else options['wsgi']
return os.path.join('{}', '{}_{}.pid').format(PID_DIR, options['http_port'], namespace.replace('.', '_')) | [
"def",
"get_pid",
"(",
"options",
")",
":",
"namespace",
"=",
"options",
"[",
"'settings'",
"]",
"if",
"options",
"[",
"'settings'",
"]",
"else",
"options",
"[",
"'wsgi'",
"]",
"return",
"os",
".",
"path",
".",
"join",
"(",
"'{}'",
",",
"'{}_{}.pid'",
")",
".",
"format",
"(",
"PID_DIR",
",",
"options",
"[",
"'http_port'",
"]",
",",
"namespace",
".",
"replace",
"(",
"'.'",
",",
"'_'",
")",
")"
] | returns The default location of the pid file for process management | [
"returns",
"The",
"default",
"location",
"of",
"the",
"pid",
"file",
"for",
"process",
"management"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/utils/__init__.py#L21-L24 | train | 251,343 |
hendrix/hendrix | hendrix/utils/__init__.py | responseInColor | def responseInColor(request, status, headers, prefix='Response', opts=None):
"Prints the response info in color"
code, message = status.split(None, 1)
message = '%s [%s] => Request %s %s %s on pid %d' % (
prefix,
code,
str(request.host),
request.method,
request.path,
os.getpid()
)
signal = int(code) / 100
if signal == 2:
chalk.green(message, opts=opts)
elif signal == 3:
chalk.blue(message, opts=opts)
else:
chalk.red(message, opts=opts) | python | def responseInColor(request, status, headers, prefix='Response', opts=None):
"Prints the response info in color"
code, message = status.split(None, 1)
message = '%s [%s] => Request %s %s %s on pid %d' % (
prefix,
code,
str(request.host),
request.method,
request.path,
os.getpid()
)
signal = int(code) / 100
if signal == 2:
chalk.green(message, opts=opts)
elif signal == 3:
chalk.blue(message, opts=opts)
else:
chalk.red(message, opts=opts) | [
"def",
"responseInColor",
"(",
"request",
",",
"status",
",",
"headers",
",",
"prefix",
"=",
"'Response'",
",",
"opts",
"=",
"None",
")",
":",
"code",
",",
"message",
"=",
"status",
".",
"split",
"(",
"None",
",",
"1",
")",
"message",
"=",
"'%s [%s] => Request %s %s %s on pid %d'",
"%",
"(",
"prefix",
",",
"code",
",",
"str",
"(",
"request",
".",
"host",
")",
",",
"request",
".",
"method",
",",
"request",
".",
"path",
",",
"os",
".",
"getpid",
"(",
")",
")",
"signal",
"=",
"int",
"(",
"code",
")",
"/",
"100",
"if",
"signal",
"==",
"2",
":",
"chalk",
".",
"green",
"(",
"message",
",",
"opts",
"=",
"opts",
")",
"elif",
"signal",
"==",
"3",
":",
"chalk",
".",
"blue",
"(",
"message",
",",
"opts",
"=",
"opts",
")",
"else",
":",
"chalk",
".",
"red",
"(",
"message",
",",
"opts",
"=",
"opts",
")"
] | Prints the response info in color | [
"Prints",
"the",
"response",
"info",
"in",
"color"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/utils/__init__.py#L27-L44 | train | 251,344 |
hendrix/hendrix | hendrix/deploy/cache.py | HendrixDeployCache.addLocalCacheService | def addLocalCacheService(self):
"adds a CacheService to the instatiated HendrixService"
_cache = self.getCacheService()
_cache.setName('cache_proxy')
_cache.setServiceParent(self.hendrix) | python | def addLocalCacheService(self):
"adds a CacheService to the instatiated HendrixService"
_cache = self.getCacheService()
_cache.setName('cache_proxy')
_cache.setServiceParent(self.hendrix) | [
"def",
"addLocalCacheService",
"(",
"self",
")",
":",
"_cache",
"=",
"self",
".",
"getCacheService",
"(",
")",
"_cache",
".",
"setName",
"(",
"'cache_proxy'",
")",
"_cache",
".",
"setServiceParent",
"(",
"self",
".",
"hendrix",
")"
] | adds a CacheService to the instatiated HendrixService | [
"adds",
"a",
"CacheService",
"to",
"the",
"instatiated",
"HendrixService"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/deploy/cache.py#L24-L28 | train | 251,345 |
hendrix/hendrix | hendrix/deploy/cache.py | HendrixDeployCache.addGlobalServices | def addGlobalServices(self):
"""
This is where we put service that we don't want to be duplicated on
worker subprocesses
"""
if self.options.get('global_cache') and self.options.get('cache'):
# only add the cache service here if the global_cache and cache
# options were set to True
_cache = self.getCacheService()
_cache.startService() | python | def addGlobalServices(self):
"""
This is where we put service that we don't want to be duplicated on
worker subprocesses
"""
if self.options.get('global_cache') and self.options.get('cache'):
# only add the cache service here if the global_cache and cache
# options were set to True
_cache = self.getCacheService()
_cache.startService() | [
"def",
"addGlobalServices",
"(",
"self",
")",
":",
"if",
"self",
".",
"options",
".",
"get",
"(",
"'global_cache'",
")",
"and",
"self",
".",
"options",
".",
"get",
"(",
"'cache'",
")",
":",
"# only add the cache service here if the global_cache and cache",
"# options were set to True",
"_cache",
"=",
"self",
".",
"getCacheService",
"(",
")",
"_cache",
".",
"startService",
"(",
")"
] | This is where we put service that we don't want to be duplicated on
worker subprocesses | [
"This",
"is",
"where",
"we",
"put",
"service",
"that",
"we",
"don",
"t",
"want",
"to",
"be",
"duplicated",
"on",
"worker",
"subprocesses"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/deploy/cache.py#L38-L47 | train | 251,346 |
hendrix/hendrix | hendrix/facilities/resources.py | HendrixResource.putNamedChild | def putNamedChild(self, res):
"""
putNamedChild takes either an instance of hendrix.contrib.NamedResource
or any resource.Resource with a "namespace" attribute as a means of
allowing application level control of resource namespacing.
if a child is already found at an existing path,
resources with paths that are children of those physical paths
will be added as children of those resources
"""
try:
EmptyResource = resource.Resource
namespace = res.namespace
parts = namespace.strip('/').split('/')
# initialise parent and children
parent = self
children = self.children
# loop through all of the path parts except for the last one
for name in parts[:-1]:
child = children.get(name)
if not child:
# if the child does not exist then create an empty one
# and associate it to the parent
child = EmptyResource()
parent.putChild(name, child)
# update parent and children for the next iteration
parent = child
children = parent.children
name = parts[-1] # get the path part that we care about
if children.get(name):
self.logger.warn(
'A resource already exists at this path. Check '
'your resources list to ensure each path is '
'unique. The previous resource will be overridden.'
)
parent.putChild(name, res)
except AttributeError:
# raise an attribute error if the resource `res` doesn't contain
# the attribute `namespace`
msg = (
'%r improperly configured. additional_resources instances must'
' have a namespace attribute'
) % resource
raise AttributeError(msg, None, sys.exc_info()[2]) | python | def putNamedChild(self, res):
"""
putNamedChild takes either an instance of hendrix.contrib.NamedResource
or any resource.Resource with a "namespace" attribute as a means of
allowing application level control of resource namespacing.
if a child is already found at an existing path,
resources with paths that are children of those physical paths
will be added as children of those resources
"""
try:
EmptyResource = resource.Resource
namespace = res.namespace
parts = namespace.strip('/').split('/')
# initialise parent and children
parent = self
children = self.children
# loop through all of the path parts except for the last one
for name in parts[:-1]:
child = children.get(name)
if not child:
# if the child does not exist then create an empty one
# and associate it to the parent
child = EmptyResource()
parent.putChild(name, child)
# update parent and children for the next iteration
parent = child
children = parent.children
name = parts[-1] # get the path part that we care about
if children.get(name):
self.logger.warn(
'A resource already exists at this path. Check '
'your resources list to ensure each path is '
'unique. The previous resource will be overridden.'
)
parent.putChild(name, res)
except AttributeError:
# raise an attribute error if the resource `res` doesn't contain
# the attribute `namespace`
msg = (
'%r improperly configured. additional_resources instances must'
' have a namespace attribute'
) % resource
raise AttributeError(msg, None, sys.exc_info()[2]) | [
"def",
"putNamedChild",
"(",
"self",
",",
"res",
")",
":",
"try",
":",
"EmptyResource",
"=",
"resource",
".",
"Resource",
"namespace",
"=",
"res",
".",
"namespace",
"parts",
"=",
"namespace",
".",
"strip",
"(",
"'/'",
")",
".",
"split",
"(",
"'/'",
")",
"# initialise parent and children",
"parent",
"=",
"self",
"children",
"=",
"self",
".",
"children",
"# loop through all of the path parts except for the last one",
"for",
"name",
"in",
"parts",
"[",
":",
"-",
"1",
"]",
":",
"child",
"=",
"children",
".",
"get",
"(",
"name",
")",
"if",
"not",
"child",
":",
"# if the child does not exist then create an empty one",
"# and associate it to the parent",
"child",
"=",
"EmptyResource",
"(",
")",
"parent",
".",
"putChild",
"(",
"name",
",",
"child",
")",
"# update parent and children for the next iteration",
"parent",
"=",
"child",
"children",
"=",
"parent",
".",
"children",
"name",
"=",
"parts",
"[",
"-",
"1",
"]",
"# get the path part that we care about",
"if",
"children",
".",
"get",
"(",
"name",
")",
":",
"self",
".",
"logger",
".",
"warn",
"(",
"'A resource already exists at this path. Check '",
"'your resources list to ensure each path is '",
"'unique. The previous resource will be overridden.'",
")",
"parent",
".",
"putChild",
"(",
"name",
",",
"res",
")",
"except",
"AttributeError",
":",
"# raise an attribute error if the resource `res` doesn't contain",
"# the attribute `namespace`",
"msg",
"=",
"(",
"'%r improperly configured. additional_resources instances must'",
"' have a namespace attribute'",
")",
"%",
"resource",
"raise",
"AttributeError",
"(",
"msg",
",",
"None",
",",
"sys",
".",
"exc_info",
"(",
")",
"[",
"2",
"]",
")"
] | putNamedChild takes either an instance of hendrix.contrib.NamedResource
or any resource.Resource with a "namespace" attribute as a means of
allowing application level control of resource namespacing.
if a child is already found at an existing path,
resources with paths that are children of those physical paths
will be added as children of those resources | [
"putNamedChild",
"takes",
"either",
"an",
"instance",
"of",
"hendrix",
".",
"contrib",
".",
"NamedResource",
"or",
"any",
"resource",
".",
"Resource",
"with",
"a",
"namespace",
"attribute",
"as",
"a",
"means",
"of",
"allowing",
"application",
"level",
"control",
"of",
"resource",
"namespacing",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/facilities/resources.py#L59-L105 | train | 251,347 |
hendrix/hendrix | hendrix/contrib/concurrency/messaging.py | send_json_message | def send_json_message(address, message, **kwargs):
"""
a shortcut for message sending
"""
data = {
'message': message,
}
if not kwargs.get('subject_id'):
data['subject_id'] = address
data.update(kwargs)
hxdispatcher.send(address, data) | python | def send_json_message(address, message, **kwargs):
"""
a shortcut for message sending
"""
data = {
'message': message,
}
if not kwargs.get('subject_id'):
data['subject_id'] = address
data.update(kwargs)
hxdispatcher.send(address, data) | [
"def",
"send_json_message",
"(",
"address",
",",
"message",
",",
"*",
"*",
"kwargs",
")",
":",
"data",
"=",
"{",
"'message'",
":",
"message",
",",
"}",
"if",
"not",
"kwargs",
".",
"get",
"(",
"'subject_id'",
")",
":",
"data",
"[",
"'subject_id'",
"]",
"=",
"address",
"data",
".",
"update",
"(",
"kwargs",
")",
"hxdispatcher",
".",
"send",
"(",
"address",
",",
"data",
")"
] | a shortcut for message sending | [
"a",
"shortcut",
"for",
"message",
"sending"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/concurrency/messaging.py#L125-L139 | train | 251,348 |
hendrix/hendrix | hendrix/contrib/concurrency/messaging.py | send_callback_json_message | def send_callback_json_message(value, *args, **kwargs):
"""
useful for sending messages from callbacks as it puts the
result of the callback in the dict for serialization
"""
if value:
kwargs['result'] = value
send_json_message(args[0], args[1], **kwargs)
return value | python | def send_callback_json_message(value, *args, **kwargs):
"""
useful for sending messages from callbacks as it puts the
result of the callback in the dict for serialization
"""
if value:
kwargs['result'] = value
send_json_message(args[0], args[1], **kwargs)
return value | [
"def",
"send_callback_json_message",
"(",
"value",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"value",
":",
"kwargs",
"[",
"'result'",
"]",
"=",
"value",
"send_json_message",
"(",
"args",
"[",
"0",
"]",
",",
"args",
"[",
"1",
"]",
",",
"*",
"*",
"kwargs",
")",
"return",
"value"
] | useful for sending messages from callbacks as it puts the
result of the callback in the dict for serialization | [
"useful",
"for",
"sending",
"messages",
"from",
"callbacks",
"as",
"it",
"puts",
"the",
"result",
"of",
"the",
"callback",
"in",
"the",
"dict",
"for",
"serialization"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/concurrency/messaging.py#L142-L153 | train | 251,349 |
hendrix/hendrix | hendrix/contrib/concurrency/messaging.py | RecipientManager.send | def send(self, message): # usually a json string...
"""
sends whatever it is to each transport
"""
for transport in self.transports.values():
transport.protocol.sendMessage(message) | python | def send(self, message): # usually a json string...
"""
sends whatever it is to each transport
"""
for transport in self.transports.values():
transport.protocol.sendMessage(message) | [
"def",
"send",
"(",
"self",
",",
"message",
")",
":",
"# usually a json string...",
"for",
"transport",
"in",
"self",
".",
"transports",
".",
"values",
"(",
")",
":",
"transport",
".",
"protocol",
".",
"sendMessage",
"(",
"message",
")"
] | sends whatever it is to each transport | [
"sends",
"whatever",
"it",
"is",
"to",
"each",
"transport"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/concurrency/messaging.py#L34-L39 | train | 251,350 |
hendrix/hendrix | hendrix/contrib/concurrency/messaging.py | RecipientManager.remove | def remove(self, transport):
"""
removes a transport if a member of this group
"""
if transport.uid in self.transports:
del (self.transports[transport.uid]) | python | def remove(self, transport):
"""
removes a transport if a member of this group
"""
if transport.uid in self.transports:
del (self.transports[transport.uid]) | [
"def",
"remove",
"(",
"self",
",",
"transport",
")",
":",
"if",
"transport",
".",
"uid",
"in",
"self",
".",
"transports",
":",
"del",
"(",
"self",
".",
"transports",
"[",
"transport",
".",
"uid",
"]",
")"
] | removes a transport if a member of this group | [
"removes",
"a",
"transport",
"if",
"a",
"member",
"of",
"this",
"group"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/concurrency/messaging.py#L41-L46 | train | 251,351 |
hendrix/hendrix | hendrix/contrib/concurrency/messaging.py | MessageDispatcher.add | def add(self, transport, address=None):
"""
add a new recipient to be addressable by this MessageDispatcher
generate a new uuid address if one is not specified
"""
if not address:
address = str(uuid.uuid1())
if address in self.recipients:
self.recipients[address].add(transport)
else:
self.recipients[address] = RecipientManager(transport, address)
return address | python | def add(self, transport, address=None):
"""
add a new recipient to be addressable by this MessageDispatcher
generate a new uuid address if one is not specified
"""
if not address:
address = str(uuid.uuid1())
if address in self.recipients:
self.recipients[address].add(transport)
else:
self.recipients[address] = RecipientManager(transport, address)
return address | [
"def",
"add",
"(",
"self",
",",
"transport",
",",
"address",
"=",
"None",
")",
":",
"if",
"not",
"address",
":",
"address",
"=",
"str",
"(",
"uuid",
".",
"uuid1",
"(",
")",
")",
"if",
"address",
"in",
"self",
".",
"recipients",
":",
"self",
".",
"recipients",
"[",
"address",
"]",
".",
"add",
"(",
"transport",
")",
"else",
":",
"self",
".",
"recipients",
"[",
"address",
"]",
"=",
"RecipientManager",
"(",
"transport",
",",
"address",
")",
"return",
"address"
] | add a new recipient to be addressable by this MessageDispatcher
generate a new uuid address if one is not specified | [
"add",
"a",
"new",
"recipient",
"to",
"be",
"addressable",
"by",
"this",
"MessageDispatcher",
"generate",
"a",
"new",
"uuid",
"address",
"if",
"one",
"is",
"not",
"specified"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/concurrency/messaging.py#L69-L83 | train | 251,352 |
hendrix/hendrix | hendrix/contrib/concurrency/messaging.py | MessageDispatcher.remove | def remove(self, transport):
"""
removes a transport from all channels to which it belongs.
"""
recipients = copy.copy(self.recipients)
for address, recManager in recipients.items():
recManager.remove(transport)
if not len(recManager.transports):
del self.recipients[address] | python | def remove(self, transport):
"""
removes a transport from all channels to which it belongs.
"""
recipients = copy.copy(self.recipients)
for address, recManager in recipients.items():
recManager.remove(transport)
if not len(recManager.transports):
del self.recipients[address] | [
"def",
"remove",
"(",
"self",
",",
"transport",
")",
":",
"recipients",
"=",
"copy",
".",
"copy",
"(",
"self",
".",
"recipients",
")",
"for",
"address",
",",
"recManager",
"in",
"recipients",
".",
"items",
"(",
")",
":",
"recManager",
".",
"remove",
"(",
"transport",
")",
"if",
"not",
"len",
"(",
"recManager",
".",
"transports",
")",
":",
"del",
"self",
".",
"recipients",
"[",
"address",
"]"
] | removes a transport from all channels to which it belongs. | [
"removes",
"a",
"transport",
"from",
"all",
"channels",
"to",
"which",
"it",
"belongs",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/concurrency/messaging.py#L85-L93 | train | 251,353 |
hendrix/hendrix | hendrix/contrib/concurrency/messaging.py | MessageDispatcher.send | def send(self, address, data_dict):
"""
address can either be a string or a list of strings
data_dict gets sent along as is and could contain anything
"""
if type(address) == list:
recipients = [self.recipients.get(rec) for rec in address]
else:
recipients = [self.recipients.get(address)]
if recipients:
for recipient in recipients:
if recipient:
recipient.send(json.dumps(data_dict).encode()) | python | def send(self, address, data_dict):
"""
address can either be a string or a list of strings
data_dict gets sent along as is and could contain anything
"""
if type(address) == list:
recipients = [self.recipients.get(rec) for rec in address]
else:
recipients = [self.recipients.get(address)]
if recipients:
for recipient in recipients:
if recipient:
recipient.send(json.dumps(data_dict).encode()) | [
"def",
"send",
"(",
"self",
",",
"address",
",",
"data_dict",
")",
":",
"if",
"type",
"(",
"address",
")",
"==",
"list",
":",
"recipients",
"=",
"[",
"self",
".",
"recipients",
".",
"get",
"(",
"rec",
")",
"for",
"rec",
"in",
"address",
"]",
"else",
":",
"recipients",
"=",
"[",
"self",
".",
"recipients",
".",
"get",
"(",
"address",
")",
"]",
"if",
"recipients",
":",
"for",
"recipient",
"in",
"recipients",
":",
"if",
"recipient",
":",
"recipient",
".",
"send",
"(",
"json",
".",
"dumps",
"(",
"data_dict",
")",
".",
"encode",
"(",
")",
")"
] | address can either be a string or a list of strings
data_dict gets sent along as is and could contain anything | [
"address",
"can",
"either",
"be",
"a",
"string",
"or",
"a",
"list",
"of",
"strings"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/concurrency/messaging.py#L95-L110 | train | 251,354 |
hendrix/hendrix | hendrix/contrib/concurrency/messaging.py | MessageDispatcher.subscribe | def subscribe(self, transport, data):
"""
adds a transport to a channel
"""
self.add(transport, address=data.get('hx_subscribe').encode())
self.send(
data['hx_subscribe'],
{'message': "%r is listening" % transport}
) | python | def subscribe(self, transport, data):
"""
adds a transport to a channel
"""
self.add(transport, address=data.get('hx_subscribe').encode())
self.send(
data['hx_subscribe'],
{'message': "%r is listening" % transport}
) | [
"def",
"subscribe",
"(",
"self",
",",
"transport",
",",
"data",
")",
":",
"self",
".",
"add",
"(",
"transport",
",",
"address",
"=",
"data",
".",
"get",
"(",
"'hx_subscribe'",
")",
".",
"encode",
"(",
")",
")",
"self",
".",
"send",
"(",
"data",
"[",
"'hx_subscribe'",
"]",
",",
"{",
"'message'",
":",
"\"%r is listening\"",
"%",
"transport",
"}",
")"
] | adds a transport to a channel | [
"adds",
"a",
"transport",
"to",
"a",
"channel"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/concurrency/messaging.py#L112-L122 | train | 251,355 |
hendrix/hendrix | hendrix/options.py | cleanOptions | def cleanOptions(options):
"""
Takes an options dict and returns a tuple containing the daemonize boolean,
the reload boolean, and the parsed list of cleaned options as would be
expected to be passed to hx
"""
_reload = options.pop('reload')
dev = options.pop('dev')
opts = []
store_true = [
'--nocache', '--global_cache', '--quiet', '--loud'
]
store_false = []
for key, value in options.items():
key = '--' + key
if (key in store_true and value) or (key in store_false and not value):
opts += [key, ]
elif value:
opts += [key, str(value)]
return _reload, opts | python | def cleanOptions(options):
"""
Takes an options dict and returns a tuple containing the daemonize boolean,
the reload boolean, and the parsed list of cleaned options as would be
expected to be passed to hx
"""
_reload = options.pop('reload')
dev = options.pop('dev')
opts = []
store_true = [
'--nocache', '--global_cache', '--quiet', '--loud'
]
store_false = []
for key, value in options.items():
key = '--' + key
if (key in store_true and value) or (key in store_false and not value):
opts += [key, ]
elif value:
opts += [key, str(value)]
return _reload, opts | [
"def",
"cleanOptions",
"(",
"options",
")",
":",
"_reload",
"=",
"options",
".",
"pop",
"(",
"'reload'",
")",
"dev",
"=",
"options",
".",
"pop",
"(",
"'dev'",
")",
"opts",
"=",
"[",
"]",
"store_true",
"=",
"[",
"'--nocache'",
",",
"'--global_cache'",
",",
"'--quiet'",
",",
"'--loud'",
"]",
"store_false",
"=",
"[",
"]",
"for",
"key",
",",
"value",
"in",
"options",
".",
"items",
"(",
")",
":",
"key",
"=",
"'--'",
"+",
"key",
"if",
"(",
"key",
"in",
"store_true",
"and",
"value",
")",
"or",
"(",
"key",
"in",
"store_false",
"and",
"not",
"value",
")",
":",
"opts",
"+=",
"[",
"key",
",",
"]",
"elif",
"value",
":",
"opts",
"+=",
"[",
"key",
",",
"str",
"(",
"value",
")",
"]",
"return",
"_reload",
",",
"opts"
] | Takes an options dict and returns a tuple containing the daemonize boolean,
the reload boolean, and the parsed list of cleaned options as would be
expected to be passed to hx | [
"Takes",
"an",
"options",
"dict",
"and",
"returns",
"a",
"tuple",
"containing",
"the",
"daemonize",
"boolean",
"the",
"reload",
"boolean",
"and",
"the",
"parsed",
"list",
"of",
"cleaned",
"options",
"as",
"would",
"be",
"expected",
"to",
"be",
"passed",
"to",
"hx"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/options.py#L7-L26 | train | 251,356 |
hendrix/hendrix | hendrix/options.py | options | def options(argv=[]):
"""
A helper function that returns a dictionary of the default key-values pairs
"""
parser = HendrixOptionParser
parsed_args = parser.parse_args(argv)
return vars(parsed_args[0]) | python | def options(argv=[]):
"""
A helper function that returns a dictionary of the default key-values pairs
"""
parser = HendrixOptionParser
parsed_args = parser.parse_args(argv)
return vars(parsed_args[0]) | [
"def",
"options",
"(",
"argv",
"=",
"[",
"]",
")",
":",
"parser",
"=",
"HendrixOptionParser",
"parsed_args",
"=",
"parser",
".",
"parse_args",
"(",
"argv",
")",
"return",
"vars",
"(",
"parsed_args",
"[",
"0",
"]",
")"
] | A helper function that returns a dictionary of the default key-values pairs | [
"A",
"helper",
"function",
"that",
"returns",
"a",
"dictionary",
"of",
"the",
"default",
"key",
"-",
"values",
"pairs"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/options.py#L195-L201 | train | 251,357 |
hendrix/hendrix | hendrix/experience/hey_joe.py | _ParticipantRegistry.remove | def remove(self, participant):
"""
Unsubscribe this participant from all topic to which it is subscribed.
"""
for topic, participants in list(self._participants_by_topic.items()):
self.unsubscribe(participant, topic)
# It's possible that we just nixe the last subscriber.
if not participants: # IE, nobody is still listening at this topic.
del self._participants_by_topic[topic] | python | def remove(self, participant):
"""
Unsubscribe this participant from all topic to which it is subscribed.
"""
for topic, participants in list(self._participants_by_topic.items()):
self.unsubscribe(participant, topic)
# It's possible that we just nixe the last subscriber.
if not participants: # IE, nobody is still listening at this topic.
del self._participants_by_topic[topic] | [
"def",
"remove",
"(",
"self",
",",
"participant",
")",
":",
"for",
"topic",
",",
"participants",
"in",
"list",
"(",
"self",
".",
"_participants_by_topic",
".",
"items",
"(",
")",
")",
":",
"self",
".",
"unsubscribe",
"(",
"participant",
",",
"topic",
")",
"# It's possible that we just nixe the last subscriber.",
"if",
"not",
"participants",
":",
"# IE, nobody is still listening at this topic.",
"del",
"self",
".",
"_participants_by_topic",
"[",
"topic",
"]"
] | Unsubscribe this participant from all topic to which it is subscribed. | [
"Unsubscribe",
"this",
"participant",
"from",
"all",
"topic",
"to",
"which",
"it",
"is",
"subscribed",
"."
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/experience/hey_joe.py#L67-L75 | train | 251,358 |
hendrix/hendrix | hendrix/deploy/tls.py | HendrixDeployTLS.addSSLService | def addSSLService(self):
"adds a SSLService to the instaitated HendrixService"
https_port = self.options['https_port']
self.tls_service = HendrixTCPServiceWithTLS(https_port, self.hendrix.site, self.key, self.cert,
self.context_factory, self.context_factory_kwargs)
self.tls_service.setServiceParent(self.hendrix) | python | def addSSLService(self):
"adds a SSLService to the instaitated HendrixService"
https_port = self.options['https_port']
self.tls_service = HendrixTCPServiceWithTLS(https_port, self.hendrix.site, self.key, self.cert,
self.context_factory, self.context_factory_kwargs)
self.tls_service.setServiceParent(self.hendrix) | [
"def",
"addSSLService",
"(",
"self",
")",
":",
"https_port",
"=",
"self",
".",
"options",
"[",
"'https_port'",
"]",
"self",
".",
"tls_service",
"=",
"HendrixTCPServiceWithTLS",
"(",
"https_port",
",",
"self",
".",
"hendrix",
".",
"site",
",",
"self",
".",
"key",
",",
"self",
".",
"cert",
",",
"self",
".",
"context_factory",
",",
"self",
".",
"context_factory_kwargs",
")",
"self",
".",
"tls_service",
".",
"setServiceParent",
"(",
"self",
".",
"hendrix",
")"
] | adds a SSLService to the instaitated HendrixService | [
"adds",
"a",
"SSLService",
"to",
"the",
"instaitated",
"HendrixService"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/deploy/tls.py#L61-L66 | train | 251,359 |
hendrix/hendrix | hendrix/contrib/cache/backends/memory_cache.py | MemoryCacheBackend.addResource | def addResource(self, content, uri, headers):
"""
Adds the a hendrix.contrib.cache.resource.CachedResource to the
ReverseProxy cache connection
"""
self.cache[uri] = CachedResource(content, headers) | python | def addResource(self, content, uri, headers):
"""
Adds the a hendrix.contrib.cache.resource.CachedResource to the
ReverseProxy cache connection
"""
self.cache[uri] = CachedResource(content, headers) | [
"def",
"addResource",
"(",
"self",
",",
"content",
",",
"uri",
",",
"headers",
")",
":",
"self",
".",
"cache",
"[",
"uri",
"]",
"=",
"CachedResource",
"(",
"content",
",",
"headers",
")"
] | Adds the a hendrix.contrib.cache.resource.CachedResource to the
ReverseProxy cache connection | [
"Adds",
"the",
"a",
"hendrix",
".",
"contrib",
".",
"cache",
".",
"resource",
".",
"CachedResource",
"to",
"the",
"ReverseProxy",
"cache",
"connection"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/backends/memory_cache.py#L15-L20 | train | 251,360 |
hendrix/hendrix | hendrix/contrib/cache/__init__.py | decompressBuffer | def decompressBuffer(buffer):
"complements the compressBuffer function in CacheClient"
zbuf = cStringIO.StringIO(buffer)
zfile = gzip.GzipFile(fileobj=zbuf)
deflated = zfile.read()
zfile.close()
return deflated | python | def decompressBuffer(buffer):
"complements the compressBuffer function in CacheClient"
zbuf = cStringIO.StringIO(buffer)
zfile = gzip.GzipFile(fileobj=zbuf)
deflated = zfile.read()
zfile.close()
return deflated | [
"def",
"decompressBuffer",
"(",
"buffer",
")",
":",
"zbuf",
"=",
"cStringIO",
".",
"StringIO",
"(",
"buffer",
")",
"zfile",
"=",
"gzip",
".",
"GzipFile",
"(",
"fileobj",
"=",
"zbuf",
")",
"deflated",
"=",
"zfile",
".",
"read",
"(",
")",
"zfile",
".",
"close",
"(",
")",
"return",
"deflated"
] | complements the compressBuffer function in CacheClient | [
"complements",
"the",
"compressBuffer",
"function",
"in",
"CacheClient"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/__init__.py#L30-L36 | train | 251,361 |
hendrix/hendrix | hendrix/contrib/cache/__init__.py | CachedResource.getMaxAge | def getMaxAge(self):
"get the max-age in seconds from the saved headers data"
max_age = 0
cache_control = self.headers.get('cache-control')
if cache_control:
params = dict(urlparse.parse_qsl(cache_control))
max_age = int(params.get('max-age', '0'))
return max_age | python | def getMaxAge(self):
"get the max-age in seconds from the saved headers data"
max_age = 0
cache_control = self.headers.get('cache-control')
if cache_control:
params = dict(urlparse.parse_qsl(cache_control))
max_age = int(params.get('max-age', '0'))
return max_age | [
"def",
"getMaxAge",
"(",
"self",
")",
":",
"max_age",
"=",
"0",
"cache_control",
"=",
"self",
".",
"headers",
".",
"get",
"(",
"'cache-control'",
")",
"if",
"cache_control",
":",
"params",
"=",
"dict",
"(",
"urlparse",
".",
"parse_qsl",
"(",
"cache_control",
")",
")",
"max_age",
"=",
"int",
"(",
"params",
".",
"get",
"(",
"'max-age'",
",",
"'0'",
")",
")",
"return",
"max_age"
] | get the max-age in seconds from the saved headers data | [
"get",
"the",
"max",
"-",
"age",
"in",
"seconds",
"from",
"the",
"saved",
"headers",
"data"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/__init__.py#L51-L58 | train | 251,362 |
hendrix/hendrix | hendrix/contrib/cache/__init__.py | CachedResource.getLastModified | def getLastModified(self):
"returns the GMT last-modified datetime or None"
last_modified = self.headers.get('last-modified')
if last_modified:
last_modified = self.convertTimeString(last_modified)
return last_modified | python | def getLastModified(self):
"returns the GMT last-modified datetime or None"
last_modified = self.headers.get('last-modified')
if last_modified:
last_modified = self.convertTimeString(last_modified)
return last_modified | [
"def",
"getLastModified",
"(",
"self",
")",
":",
"last_modified",
"=",
"self",
".",
"headers",
".",
"get",
"(",
"'last-modified'",
")",
"if",
"last_modified",
":",
"last_modified",
"=",
"self",
".",
"convertTimeString",
"(",
"last_modified",
")",
"return",
"last_modified"
] | returns the GMT last-modified datetime or None | [
"returns",
"the",
"GMT",
"last",
"-",
"modified",
"datetime",
"or",
"None"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/__init__.py#L68-L73 | train | 251,363 |
hendrix/hendrix | hendrix/contrib/cache/__init__.py | CachedResource.getDate | def getDate(self):
"returns the GMT response datetime or None"
date = self.headers.get('date')
if date:
date = self.convertTimeString(date)
return date | python | def getDate(self):
"returns the GMT response datetime or None"
date = self.headers.get('date')
if date:
date = self.convertTimeString(date)
return date | [
"def",
"getDate",
"(",
"self",
")",
":",
"date",
"=",
"self",
".",
"headers",
".",
"get",
"(",
"'date'",
")",
"if",
"date",
":",
"date",
"=",
"self",
".",
"convertTimeString",
"(",
"date",
")",
"return",
"date"
] | returns the GMT response datetime or None | [
"returns",
"the",
"GMT",
"response",
"datetime",
"or",
"None"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/__init__.py#L75-L80 | train | 251,364 |
hendrix/hendrix | hendrix/contrib/cache/__init__.py | CachedResource.isFresh | def isFresh(self):
"returns True if cached object is still fresh"
max_age = self.getMaxAge()
date = self.getDate()
is_fresh = False
if max_age and date:
delta_time = datetime.now() - date
is_fresh = delta_time.total_seconds() < max_age
return is_fresh | python | def isFresh(self):
"returns True if cached object is still fresh"
max_age = self.getMaxAge()
date = self.getDate()
is_fresh = False
if max_age and date:
delta_time = datetime.now() - date
is_fresh = delta_time.total_seconds() < max_age
return is_fresh | [
"def",
"isFresh",
"(",
"self",
")",
":",
"max_age",
"=",
"self",
".",
"getMaxAge",
"(",
")",
"date",
"=",
"self",
".",
"getDate",
"(",
")",
"is_fresh",
"=",
"False",
"if",
"max_age",
"and",
"date",
":",
"delta_time",
"=",
"datetime",
".",
"now",
"(",
")",
"-",
"date",
"is_fresh",
"=",
"delta_time",
".",
"total_seconds",
"(",
")",
"<",
"max_age",
"return",
"is_fresh"
] | returns True if cached object is still fresh | [
"returns",
"True",
"if",
"cached",
"object",
"is",
"still",
"fresh"
] | 175af011a7e5822b772bfec0e11a46466bb8688d | https://github.com/hendrix/hendrix/blob/175af011a7e5822b772bfec0e11a46466bb8688d/hendrix/contrib/cache/__init__.py#L82-L90 | train | 251,365 |
aio-libs/aiohttp-cors | aiohttp_cors/cors_config.py | _CorsConfigImpl._on_response_prepare | async def _on_response_prepare(self,
request: web.Request,
response: web.StreamResponse):
"""Non-preflight CORS request response processor.
If request is done on CORS-enabled route, process request parameters
and set appropriate CORS response headers.
"""
if (not self._router_adapter.is_cors_enabled_on_request(request) or
self._router_adapter.is_preflight_request(request)):
# Either not CORS enabled route, or preflight request which is
# handled in its own handler.
return
# Processing response of non-preflight CORS-enabled request.
config = self._router_adapter.get_non_preflight_request_config(request)
# Handle according to part 6.1 of the CORS specification.
origin = request.headers.get(hdrs.ORIGIN)
if origin is None:
# Terminate CORS according to CORS 6.1.1.
return
options = config.get(origin, config.get("*"))
if options is None:
# Terminate CORS according to CORS 6.1.2.
return
assert hdrs.ACCESS_CONTROL_ALLOW_ORIGIN not in response.headers
assert hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS not in response.headers
assert hdrs.ACCESS_CONTROL_EXPOSE_HEADERS not in response.headers
# Process according to CORS 6.1.4.
# Set exposed headers (server headers exposed to client) before
# setting any other headers.
if options.expose_headers == "*":
# Expose all headers that are set in response.
exposed_headers = \
frozenset(response.headers.keys()) - _SIMPLE_RESPONSE_HEADERS
response.headers[hdrs.ACCESS_CONTROL_EXPOSE_HEADERS] = \
",".join(exposed_headers)
elif options.expose_headers:
# Expose predefined list of headers.
response.headers[hdrs.ACCESS_CONTROL_EXPOSE_HEADERS] = \
",".join(options.expose_headers)
# Process according to CORS 6.1.3.
# Set allowed origin.
response.headers[hdrs.ACCESS_CONTROL_ALLOW_ORIGIN] = origin
if options.allow_credentials:
# Set allowed credentials.
response.headers[hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS] = _TRUE | python | async def _on_response_prepare(self,
request: web.Request,
response: web.StreamResponse):
"""Non-preflight CORS request response processor.
If request is done on CORS-enabled route, process request parameters
and set appropriate CORS response headers.
"""
if (not self._router_adapter.is_cors_enabled_on_request(request) or
self._router_adapter.is_preflight_request(request)):
# Either not CORS enabled route, or preflight request which is
# handled in its own handler.
return
# Processing response of non-preflight CORS-enabled request.
config = self._router_adapter.get_non_preflight_request_config(request)
# Handle according to part 6.1 of the CORS specification.
origin = request.headers.get(hdrs.ORIGIN)
if origin is None:
# Terminate CORS according to CORS 6.1.1.
return
options = config.get(origin, config.get("*"))
if options is None:
# Terminate CORS according to CORS 6.1.2.
return
assert hdrs.ACCESS_CONTROL_ALLOW_ORIGIN not in response.headers
assert hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS not in response.headers
assert hdrs.ACCESS_CONTROL_EXPOSE_HEADERS not in response.headers
# Process according to CORS 6.1.4.
# Set exposed headers (server headers exposed to client) before
# setting any other headers.
if options.expose_headers == "*":
# Expose all headers that are set in response.
exposed_headers = \
frozenset(response.headers.keys()) - _SIMPLE_RESPONSE_HEADERS
response.headers[hdrs.ACCESS_CONTROL_EXPOSE_HEADERS] = \
",".join(exposed_headers)
elif options.expose_headers:
# Expose predefined list of headers.
response.headers[hdrs.ACCESS_CONTROL_EXPOSE_HEADERS] = \
",".join(options.expose_headers)
# Process according to CORS 6.1.3.
# Set allowed origin.
response.headers[hdrs.ACCESS_CONTROL_ALLOW_ORIGIN] = origin
if options.allow_credentials:
# Set allowed credentials.
response.headers[hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS] = _TRUE | [
"async",
"def",
"_on_response_prepare",
"(",
"self",
",",
"request",
":",
"web",
".",
"Request",
",",
"response",
":",
"web",
".",
"StreamResponse",
")",
":",
"if",
"(",
"not",
"self",
".",
"_router_adapter",
".",
"is_cors_enabled_on_request",
"(",
"request",
")",
"or",
"self",
".",
"_router_adapter",
".",
"is_preflight_request",
"(",
"request",
")",
")",
":",
"# Either not CORS enabled route, or preflight request which is",
"# handled in its own handler.",
"return",
"# Processing response of non-preflight CORS-enabled request.",
"config",
"=",
"self",
".",
"_router_adapter",
".",
"get_non_preflight_request_config",
"(",
"request",
")",
"# Handle according to part 6.1 of the CORS specification.",
"origin",
"=",
"request",
".",
"headers",
".",
"get",
"(",
"hdrs",
".",
"ORIGIN",
")",
"if",
"origin",
"is",
"None",
":",
"# Terminate CORS according to CORS 6.1.1.",
"return",
"options",
"=",
"config",
".",
"get",
"(",
"origin",
",",
"config",
".",
"get",
"(",
"\"*\"",
")",
")",
"if",
"options",
"is",
"None",
":",
"# Terminate CORS according to CORS 6.1.2.",
"return",
"assert",
"hdrs",
".",
"ACCESS_CONTROL_ALLOW_ORIGIN",
"not",
"in",
"response",
".",
"headers",
"assert",
"hdrs",
".",
"ACCESS_CONTROL_ALLOW_CREDENTIALS",
"not",
"in",
"response",
".",
"headers",
"assert",
"hdrs",
".",
"ACCESS_CONTROL_EXPOSE_HEADERS",
"not",
"in",
"response",
".",
"headers",
"# Process according to CORS 6.1.4.",
"# Set exposed headers (server headers exposed to client) before",
"# setting any other headers.",
"if",
"options",
".",
"expose_headers",
"==",
"\"*\"",
":",
"# Expose all headers that are set in response.",
"exposed_headers",
"=",
"frozenset",
"(",
"response",
".",
"headers",
".",
"keys",
"(",
")",
")",
"-",
"_SIMPLE_RESPONSE_HEADERS",
"response",
".",
"headers",
"[",
"hdrs",
".",
"ACCESS_CONTROL_EXPOSE_HEADERS",
"]",
"=",
"\",\"",
".",
"join",
"(",
"exposed_headers",
")",
"elif",
"options",
".",
"expose_headers",
":",
"# Expose predefined list of headers.",
"response",
".",
"headers",
"[",
"hdrs",
".",
"ACCESS_CONTROL_EXPOSE_HEADERS",
"]",
"=",
"\",\"",
".",
"join",
"(",
"options",
".",
"expose_headers",
")",
"# Process according to CORS 6.1.3.",
"# Set allowed origin.",
"response",
".",
"headers",
"[",
"hdrs",
".",
"ACCESS_CONTROL_ALLOW_ORIGIN",
"]",
"=",
"origin",
"if",
"options",
".",
"allow_credentials",
":",
"# Set allowed credentials.",
"response",
".",
"headers",
"[",
"hdrs",
".",
"ACCESS_CONTROL_ALLOW_CREDENTIALS",
"]",
"=",
"_TRUE"
] | Non-preflight CORS request response processor.
If request is done on CORS-enabled route, process request parameters
and set appropriate CORS response headers. | [
"Non",
"-",
"preflight",
"CORS",
"request",
"response",
"processor",
"."
] | 14affbd95c88c675eb513c1d295ede1897930f94 | https://github.com/aio-libs/aiohttp-cors/blob/14affbd95c88c675eb513c1d295ede1897930f94/aiohttp_cors/cors_config.py#L141-L195 | train | 251,366 |
aio-libs/aiohttp-cors | aiohttp_cors/resource_options.py | _is_proper_sequence | def _is_proper_sequence(seq):
"""Returns is seq is sequence and not string."""
return (isinstance(seq, collections.abc.Sequence) and
not isinstance(seq, str)) | python | def _is_proper_sequence(seq):
"""Returns is seq is sequence and not string."""
return (isinstance(seq, collections.abc.Sequence) and
not isinstance(seq, str)) | [
"def",
"_is_proper_sequence",
"(",
"seq",
")",
":",
"return",
"(",
"isinstance",
"(",
"seq",
",",
"collections",
".",
"abc",
".",
"Sequence",
")",
"and",
"not",
"isinstance",
"(",
"seq",
",",
"str",
")",
")"
] | Returns is seq is sequence and not string. | [
"Returns",
"is",
"seq",
"is",
"sequence",
"and",
"not",
"string",
"."
] | 14affbd95c88c675eb513c1d295ede1897930f94 | https://github.com/aio-libs/aiohttp-cors/blob/14affbd95c88c675eb513c1d295ede1897930f94/aiohttp_cors/resource_options.py#L25-L28 | train | 251,367 |
aio-libs/aiohttp-cors | aiohttp_cors/__init__.py | setup | def setup(app: web.Application, *,
defaults: Mapping[str, Union[ResourceOptions,
Mapping[str, Any]]]=None) -> CorsConfig:
"""Setup CORS processing for the application.
To enable CORS for a resource you need to explicitly add route for
that resource using `CorsConfig.add()` method::
app = aiohttp.web.Application()
cors = aiohttp_cors.setup(app)
cors.add(
app.router.add_route("GET", "/resource", handler),
{
"*": aiohttp_cors.ResourceOptions(
allow_credentials=True,
expose_headers="*",
allow_headers="*"),
})
:param app:
The application for which CORS will be configured.
:param defaults:
Default settings for origins.
)
"""
cors = CorsConfig(app, defaults=defaults)
app[APP_CONFIG_KEY] = cors
return cors | python | def setup(app: web.Application, *,
defaults: Mapping[str, Union[ResourceOptions,
Mapping[str, Any]]]=None) -> CorsConfig:
"""Setup CORS processing for the application.
To enable CORS for a resource you need to explicitly add route for
that resource using `CorsConfig.add()` method::
app = aiohttp.web.Application()
cors = aiohttp_cors.setup(app)
cors.add(
app.router.add_route("GET", "/resource", handler),
{
"*": aiohttp_cors.ResourceOptions(
allow_credentials=True,
expose_headers="*",
allow_headers="*"),
})
:param app:
The application for which CORS will be configured.
:param defaults:
Default settings for origins.
)
"""
cors = CorsConfig(app, defaults=defaults)
app[APP_CONFIG_KEY] = cors
return cors | [
"def",
"setup",
"(",
"app",
":",
"web",
".",
"Application",
",",
"*",
",",
"defaults",
":",
"Mapping",
"[",
"str",
",",
"Union",
"[",
"ResourceOptions",
",",
"Mapping",
"[",
"str",
",",
"Any",
"]",
"]",
"]",
"=",
"None",
")",
"->",
"CorsConfig",
":",
"cors",
"=",
"CorsConfig",
"(",
"app",
",",
"defaults",
"=",
"defaults",
")",
"app",
"[",
"APP_CONFIG_KEY",
"]",
"=",
"cors",
"return",
"cors"
] | Setup CORS processing for the application.
To enable CORS for a resource you need to explicitly add route for
that resource using `CorsConfig.add()` method::
app = aiohttp.web.Application()
cors = aiohttp_cors.setup(app)
cors.add(
app.router.add_route("GET", "/resource", handler),
{
"*": aiohttp_cors.ResourceOptions(
allow_credentials=True,
expose_headers="*",
allow_headers="*"),
})
:param app:
The application for which CORS will be configured.
:param defaults:
Default settings for origins.
) | [
"Setup",
"CORS",
"processing",
"for",
"the",
"application",
"."
] | 14affbd95c88c675eb513c1d295ede1897930f94 | https://github.com/aio-libs/aiohttp-cors/blob/14affbd95c88c675eb513c1d295ede1897930f94/aiohttp_cors/__init__.py#L40-L67 | train | 251,368 |
aio-libs/aiohttp-cors | aiohttp_cors/preflight_handler.py | _PreflightHandler._parse_request_method | def _parse_request_method(request: web.Request):
"""Parse Access-Control-Request-Method header of the preflight request
"""
method = request.headers.get(hdrs.ACCESS_CONTROL_REQUEST_METHOD)
if method is None:
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"'Access-Control-Request-Method' header is not specified")
# FIXME: validate method string (ABNF: method = token), if parsing
# fails, raise HTTPForbidden.
return method | python | def _parse_request_method(request: web.Request):
"""Parse Access-Control-Request-Method header of the preflight request
"""
method = request.headers.get(hdrs.ACCESS_CONTROL_REQUEST_METHOD)
if method is None:
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"'Access-Control-Request-Method' header is not specified")
# FIXME: validate method string (ABNF: method = token), if parsing
# fails, raise HTTPForbidden.
return method | [
"def",
"_parse_request_method",
"(",
"request",
":",
"web",
".",
"Request",
")",
":",
"method",
"=",
"request",
".",
"headers",
".",
"get",
"(",
"hdrs",
".",
"ACCESS_CONTROL_REQUEST_METHOD",
")",
"if",
"method",
"is",
"None",
":",
"raise",
"web",
".",
"HTTPForbidden",
"(",
"text",
"=",
"\"CORS preflight request failed: \"",
"\"'Access-Control-Request-Method' header is not specified\"",
")",
"# FIXME: validate method string (ABNF: method = token), if parsing",
"# fails, raise HTTPForbidden.",
"return",
"method"
] | Parse Access-Control-Request-Method header of the preflight request | [
"Parse",
"Access",
"-",
"Control",
"-",
"Request",
"-",
"Method",
"header",
"of",
"the",
"preflight",
"request"
] | 14affbd95c88c675eb513c1d295ede1897930f94 | https://github.com/aio-libs/aiohttp-cors/blob/14affbd95c88c675eb513c1d295ede1897930f94/aiohttp_cors/preflight_handler.py#L10-L22 | train | 251,369 |
aio-libs/aiohttp-cors | aiohttp_cors/preflight_handler.py | _PreflightHandler._parse_request_headers | def _parse_request_headers(request: web.Request):
"""Parse Access-Control-Request-Headers header or the preflight request
Returns set of headers in upper case.
"""
headers = request.headers.get(hdrs.ACCESS_CONTROL_REQUEST_HEADERS)
if headers is None:
return frozenset()
# FIXME: validate each header string, if parsing fails, raise
# HTTPForbidden.
# FIXME: check, that headers split and stripped correctly (according
# to ABNF).
headers = (h.strip(" \t").upper() for h in headers.split(","))
# pylint: disable=bad-builtin
return frozenset(filter(None, headers)) | python | def _parse_request_headers(request: web.Request):
"""Parse Access-Control-Request-Headers header or the preflight request
Returns set of headers in upper case.
"""
headers = request.headers.get(hdrs.ACCESS_CONTROL_REQUEST_HEADERS)
if headers is None:
return frozenset()
# FIXME: validate each header string, if parsing fails, raise
# HTTPForbidden.
# FIXME: check, that headers split and stripped correctly (according
# to ABNF).
headers = (h.strip(" \t").upper() for h in headers.split(","))
# pylint: disable=bad-builtin
return frozenset(filter(None, headers)) | [
"def",
"_parse_request_headers",
"(",
"request",
":",
"web",
".",
"Request",
")",
":",
"headers",
"=",
"request",
".",
"headers",
".",
"get",
"(",
"hdrs",
".",
"ACCESS_CONTROL_REQUEST_HEADERS",
")",
"if",
"headers",
"is",
"None",
":",
"return",
"frozenset",
"(",
")",
"# FIXME: validate each header string, if parsing fails, raise",
"# HTTPForbidden.",
"# FIXME: check, that headers split and stripped correctly (according",
"# to ABNF).",
"headers",
"=",
"(",
"h",
".",
"strip",
"(",
"\" \\t\"",
")",
".",
"upper",
"(",
")",
"for",
"h",
"in",
"headers",
".",
"split",
"(",
"\",\"",
")",
")",
"# pylint: disable=bad-builtin",
"return",
"frozenset",
"(",
"filter",
"(",
"None",
",",
"headers",
")",
")"
] | Parse Access-Control-Request-Headers header or the preflight request
Returns set of headers in upper case. | [
"Parse",
"Access",
"-",
"Control",
"-",
"Request",
"-",
"Headers",
"header",
"or",
"the",
"preflight",
"request"
] | 14affbd95c88c675eb513c1d295ede1897930f94 | https://github.com/aio-libs/aiohttp-cors/blob/14affbd95c88c675eb513c1d295ede1897930f94/aiohttp_cors/preflight_handler.py#L25-L40 | train | 251,370 |
aio-libs/aiohttp-cors | aiohttp_cors/preflight_handler.py | _PreflightHandler._preflight_handler | async def _preflight_handler(self, request: web.Request):
"""CORS preflight request handler"""
# Handle according to part 6.2 of the CORS specification.
origin = request.headers.get(hdrs.ORIGIN)
if origin is None:
# Terminate CORS according to CORS 6.2.1.
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"origin header is not specified in the request")
# CORS 6.2.3. Doing it out of order is not an error.
request_method = self._parse_request_method(request)
# CORS 6.2.5. Doing it out of order is not an error.
try:
config = \
await self._get_config(request, origin, request_method)
except KeyError:
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"request method {!r} is not allowed "
"for {!r} origin".format(request_method, origin))
if not config:
# No allowed origins for the route.
# Terminate CORS according to CORS 6.2.1.
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"no origins are allowed")
options = config.get(origin, config.get("*"))
if options is None:
# No configuration for the origin - deny.
# Terminate CORS according to CORS 6.2.2.
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"origin '{}' is not allowed".format(origin))
# CORS 6.2.4
request_headers = self._parse_request_headers(request)
# CORS 6.2.6
if options.allow_headers == "*":
pass
else:
disallowed_headers = request_headers - options.allow_headers
if disallowed_headers:
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"headers are not allowed: {}".format(
", ".join(disallowed_headers)))
# Ok, CORS actual request with specified in the preflight request
# parameters is allowed.
# Set appropriate headers and return 200 response.
response = web.Response()
# CORS 6.2.7
response.headers[hdrs.ACCESS_CONTROL_ALLOW_ORIGIN] = origin
if options.allow_credentials:
# Set allowed credentials.
response.headers[hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS] = _TRUE
# CORS 6.2.8
if options.max_age is not None:
response.headers[hdrs.ACCESS_CONTROL_MAX_AGE] = \
str(options.max_age)
# CORS 6.2.9
# TODO: more optimal for client preflight request cache would be to
# respond with ALL allowed methods.
response.headers[hdrs.ACCESS_CONTROL_ALLOW_METHODS] = request_method
# CORS 6.2.10
if request_headers:
# Note: case of the headers in the request is changed, but this
# shouldn't be a problem, since the headers should be compared in
# the case-insensitive way.
response.headers[hdrs.ACCESS_CONTROL_ALLOW_HEADERS] = \
",".join(request_headers)
return response | python | async def _preflight_handler(self, request: web.Request):
"""CORS preflight request handler"""
# Handle according to part 6.2 of the CORS specification.
origin = request.headers.get(hdrs.ORIGIN)
if origin is None:
# Terminate CORS according to CORS 6.2.1.
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"origin header is not specified in the request")
# CORS 6.2.3. Doing it out of order is not an error.
request_method = self._parse_request_method(request)
# CORS 6.2.5. Doing it out of order is not an error.
try:
config = \
await self._get_config(request, origin, request_method)
except KeyError:
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"request method {!r} is not allowed "
"for {!r} origin".format(request_method, origin))
if not config:
# No allowed origins for the route.
# Terminate CORS according to CORS 6.2.1.
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"no origins are allowed")
options = config.get(origin, config.get("*"))
if options is None:
# No configuration for the origin - deny.
# Terminate CORS according to CORS 6.2.2.
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"origin '{}' is not allowed".format(origin))
# CORS 6.2.4
request_headers = self._parse_request_headers(request)
# CORS 6.2.6
if options.allow_headers == "*":
pass
else:
disallowed_headers = request_headers - options.allow_headers
if disallowed_headers:
raise web.HTTPForbidden(
text="CORS preflight request failed: "
"headers are not allowed: {}".format(
", ".join(disallowed_headers)))
# Ok, CORS actual request with specified in the preflight request
# parameters is allowed.
# Set appropriate headers and return 200 response.
response = web.Response()
# CORS 6.2.7
response.headers[hdrs.ACCESS_CONTROL_ALLOW_ORIGIN] = origin
if options.allow_credentials:
# Set allowed credentials.
response.headers[hdrs.ACCESS_CONTROL_ALLOW_CREDENTIALS] = _TRUE
# CORS 6.2.8
if options.max_age is not None:
response.headers[hdrs.ACCESS_CONTROL_MAX_AGE] = \
str(options.max_age)
# CORS 6.2.9
# TODO: more optimal for client preflight request cache would be to
# respond with ALL allowed methods.
response.headers[hdrs.ACCESS_CONTROL_ALLOW_METHODS] = request_method
# CORS 6.2.10
if request_headers:
# Note: case of the headers in the request is changed, but this
# shouldn't be a problem, since the headers should be compared in
# the case-insensitive way.
response.headers[hdrs.ACCESS_CONTROL_ALLOW_HEADERS] = \
",".join(request_headers)
return response | [
"async",
"def",
"_preflight_handler",
"(",
"self",
",",
"request",
":",
"web",
".",
"Request",
")",
":",
"# Handle according to part 6.2 of the CORS specification.",
"origin",
"=",
"request",
".",
"headers",
".",
"get",
"(",
"hdrs",
".",
"ORIGIN",
")",
"if",
"origin",
"is",
"None",
":",
"# Terminate CORS according to CORS 6.2.1.",
"raise",
"web",
".",
"HTTPForbidden",
"(",
"text",
"=",
"\"CORS preflight request failed: \"",
"\"origin header is not specified in the request\"",
")",
"# CORS 6.2.3. Doing it out of order is not an error.",
"request_method",
"=",
"self",
".",
"_parse_request_method",
"(",
"request",
")",
"# CORS 6.2.5. Doing it out of order is not an error.",
"try",
":",
"config",
"=",
"await",
"self",
".",
"_get_config",
"(",
"request",
",",
"origin",
",",
"request_method",
")",
"except",
"KeyError",
":",
"raise",
"web",
".",
"HTTPForbidden",
"(",
"text",
"=",
"\"CORS preflight request failed: \"",
"\"request method {!r} is not allowed \"",
"\"for {!r} origin\"",
".",
"format",
"(",
"request_method",
",",
"origin",
")",
")",
"if",
"not",
"config",
":",
"# No allowed origins for the route.",
"# Terminate CORS according to CORS 6.2.1.",
"raise",
"web",
".",
"HTTPForbidden",
"(",
"text",
"=",
"\"CORS preflight request failed: \"",
"\"no origins are allowed\"",
")",
"options",
"=",
"config",
".",
"get",
"(",
"origin",
",",
"config",
".",
"get",
"(",
"\"*\"",
")",
")",
"if",
"options",
"is",
"None",
":",
"# No configuration for the origin - deny.",
"# Terminate CORS according to CORS 6.2.2.",
"raise",
"web",
".",
"HTTPForbidden",
"(",
"text",
"=",
"\"CORS preflight request failed: \"",
"\"origin '{}' is not allowed\"",
".",
"format",
"(",
"origin",
")",
")",
"# CORS 6.2.4",
"request_headers",
"=",
"self",
".",
"_parse_request_headers",
"(",
"request",
")",
"# CORS 6.2.6",
"if",
"options",
".",
"allow_headers",
"==",
"\"*\"",
":",
"pass",
"else",
":",
"disallowed_headers",
"=",
"request_headers",
"-",
"options",
".",
"allow_headers",
"if",
"disallowed_headers",
":",
"raise",
"web",
".",
"HTTPForbidden",
"(",
"text",
"=",
"\"CORS preflight request failed: \"",
"\"headers are not allowed: {}\"",
".",
"format",
"(",
"\", \"",
".",
"join",
"(",
"disallowed_headers",
")",
")",
")",
"# Ok, CORS actual request with specified in the preflight request",
"# parameters is allowed.",
"# Set appropriate headers and return 200 response.",
"response",
"=",
"web",
".",
"Response",
"(",
")",
"# CORS 6.2.7",
"response",
".",
"headers",
"[",
"hdrs",
".",
"ACCESS_CONTROL_ALLOW_ORIGIN",
"]",
"=",
"origin",
"if",
"options",
".",
"allow_credentials",
":",
"# Set allowed credentials.",
"response",
".",
"headers",
"[",
"hdrs",
".",
"ACCESS_CONTROL_ALLOW_CREDENTIALS",
"]",
"=",
"_TRUE",
"# CORS 6.2.8",
"if",
"options",
".",
"max_age",
"is",
"not",
"None",
":",
"response",
".",
"headers",
"[",
"hdrs",
".",
"ACCESS_CONTROL_MAX_AGE",
"]",
"=",
"str",
"(",
"options",
".",
"max_age",
")",
"# CORS 6.2.9",
"# TODO: more optimal for client preflight request cache would be to",
"# respond with ALL allowed methods.",
"response",
".",
"headers",
"[",
"hdrs",
".",
"ACCESS_CONTROL_ALLOW_METHODS",
"]",
"=",
"request_method",
"# CORS 6.2.10",
"if",
"request_headers",
":",
"# Note: case of the headers in the request is changed, but this",
"# shouldn't be a problem, since the headers should be compared in",
"# the case-insensitive way.",
"response",
".",
"headers",
"[",
"hdrs",
".",
"ACCESS_CONTROL_ALLOW_HEADERS",
"]",
"=",
"\",\"",
".",
"join",
"(",
"request_headers",
")",
"return",
"response"
] | CORS preflight request handler | [
"CORS",
"preflight",
"request",
"handler"
] | 14affbd95c88c675eb513c1d295ede1897930f94 | https://github.com/aio-libs/aiohttp-cors/blob/14affbd95c88c675eb513c1d295ede1897930f94/aiohttp_cors/preflight_handler.py#L45-L130 | train | 251,371 |
aio-libs/aiohttp-cors | aiohttp_cors/urldispatcher_router_adapter.py | ResourcesUrlDispatcherRouterAdapter.add_preflight_handler | def add_preflight_handler(
self,
routing_entity: Union[web.Resource, web.StaticResource,
web.ResourceRoute],
handler):
"""Add OPTIONS handler for all routes defined by `routing_entity`.
Does nothing if CORS handler already handles routing entity.
Should fail if there are conflicting user-defined OPTIONS handlers.
"""
if isinstance(routing_entity, web.Resource):
resource = routing_entity
# Add preflight handler for Resource, if not yet added.
if resource in self._resources_with_preflight_handlers:
# Preflight handler already added for this resource.
return
for route_obj in resource:
if route_obj.method == hdrs.METH_OPTIONS:
if route_obj.handler is handler:
return # already added
else:
raise ValueError(
"{!r} already has OPTIONS handler {!r}"
.format(resource, route_obj.handler))
elif route_obj.method == hdrs.METH_ANY:
if _is_web_view(route_obj):
self._preflight_routes.add(route_obj)
self._resources_with_preflight_handlers.add(resource)
return
else:
raise ValueError("{!r} already has a '*' handler "
"for all methods".format(resource))
preflight_route = resource.add_route(hdrs.METH_OPTIONS, handler)
self._preflight_routes.add(preflight_route)
self._resources_with_preflight_handlers.add(resource)
elif isinstance(routing_entity, web.StaticResource):
resource = routing_entity
# Add preflight handler for Resource, if not yet added.
if resource in self._resources_with_preflight_handlers:
# Preflight handler already added for this resource.
return
resource.set_options_route(handler)
preflight_route = resource._routes[hdrs.METH_OPTIONS]
self._preflight_routes.add(preflight_route)
self._resources_with_preflight_handlers.add(resource)
elif isinstance(routing_entity, web.ResourceRoute):
route = routing_entity
if not self.is_cors_for_resource(route.resource):
self.add_preflight_handler(route.resource, handler)
else:
raise ValueError(
"Resource or ResourceRoute expected, got {!r}".format(
routing_entity)) | python | def add_preflight_handler(
self,
routing_entity: Union[web.Resource, web.StaticResource,
web.ResourceRoute],
handler):
"""Add OPTIONS handler for all routes defined by `routing_entity`.
Does nothing if CORS handler already handles routing entity.
Should fail if there are conflicting user-defined OPTIONS handlers.
"""
if isinstance(routing_entity, web.Resource):
resource = routing_entity
# Add preflight handler for Resource, if not yet added.
if resource in self._resources_with_preflight_handlers:
# Preflight handler already added for this resource.
return
for route_obj in resource:
if route_obj.method == hdrs.METH_OPTIONS:
if route_obj.handler is handler:
return # already added
else:
raise ValueError(
"{!r} already has OPTIONS handler {!r}"
.format(resource, route_obj.handler))
elif route_obj.method == hdrs.METH_ANY:
if _is_web_view(route_obj):
self._preflight_routes.add(route_obj)
self._resources_with_preflight_handlers.add(resource)
return
else:
raise ValueError("{!r} already has a '*' handler "
"for all methods".format(resource))
preflight_route = resource.add_route(hdrs.METH_OPTIONS, handler)
self._preflight_routes.add(preflight_route)
self._resources_with_preflight_handlers.add(resource)
elif isinstance(routing_entity, web.StaticResource):
resource = routing_entity
# Add preflight handler for Resource, if not yet added.
if resource in self._resources_with_preflight_handlers:
# Preflight handler already added for this resource.
return
resource.set_options_route(handler)
preflight_route = resource._routes[hdrs.METH_OPTIONS]
self._preflight_routes.add(preflight_route)
self._resources_with_preflight_handlers.add(resource)
elif isinstance(routing_entity, web.ResourceRoute):
route = routing_entity
if not self.is_cors_for_resource(route.resource):
self.add_preflight_handler(route.resource, handler)
else:
raise ValueError(
"Resource or ResourceRoute expected, got {!r}".format(
routing_entity)) | [
"def",
"add_preflight_handler",
"(",
"self",
",",
"routing_entity",
":",
"Union",
"[",
"web",
".",
"Resource",
",",
"web",
".",
"StaticResource",
",",
"web",
".",
"ResourceRoute",
"]",
",",
"handler",
")",
":",
"if",
"isinstance",
"(",
"routing_entity",
",",
"web",
".",
"Resource",
")",
":",
"resource",
"=",
"routing_entity",
"# Add preflight handler for Resource, if not yet added.",
"if",
"resource",
"in",
"self",
".",
"_resources_with_preflight_handlers",
":",
"# Preflight handler already added for this resource.",
"return",
"for",
"route_obj",
"in",
"resource",
":",
"if",
"route_obj",
".",
"method",
"==",
"hdrs",
".",
"METH_OPTIONS",
":",
"if",
"route_obj",
".",
"handler",
"is",
"handler",
":",
"return",
"# already added",
"else",
":",
"raise",
"ValueError",
"(",
"\"{!r} already has OPTIONS handler {!r}\"",
".",
"format",
"(",
"resource",
",",
"route_obj",
".",
"handler",
")",
")",
"elif",
"route_obj",
".",
"method",
"==",
"hdrs",
".",
"METH_ANY",
":",
"if",
"_is_web_view",
"(",
"route_obj",
")",
":",
"self",
".",
"_preflight_routes",
".",
"add",
"(",
"route_obj",
")",
"self",
".",
"_resources_with_preflight_handlers",
".",
"add",
"(",
"resource",
")",
"return",
"else",
":",
"raise",
"ValueError",
"(",
"\"{!r} already has a '*' handler \"",
"\"for all methods\"",
".",
"format",
"(",
"resource",
")",
")",
"preflight_route",
"=",
"resource",
".",
"add_route",
"(",
"hdrs",
".",
"METH_OPTIONS",
",",
"handler",
")",
"self",
".",
"_preflight_routes",
".",
"add",
"(",
"preflight_route",
")",
"self",
".",
"_resources_with_preflight_handlers",
".",
"add",
"(",
"resource",
")",
"elif",
"isinstance",
"(",
"routing_entity",
",",
"web",
".",
"StaticResource",
")",
":",
"resource",
"=",
"routing_entity",
"# Add preflight handler for Resource, if not yet added.",
"if",
"resource",
"in",
"self",
".",
"_resources_with_preflight_handlers",
":",
"# Preflight handler already added for this resource.",
"return",
"resource",
".",
"set_options_route",
"(",
"handler",
")",
"preflight_route",
"=",
"resource",
".",
"_routes",
"[",
"hdrs",
".",
"METH_OPTIONS",
"]",
"self",
".",
"_preflight_routes",
".",
"add",
"(",
"preflight_route",
")",
"self",
".",
"_resources_with_preflight_handlers",
".",
"add",
"(",
"resource",
")",
"elif",
"isinstance",
"(",
"routing_entity",
",",
"web",
".",
"ResourceRoute",
")",
":",
"route",
"=",
"routing_entity",
"if",
"not",
"self",
".",
"is_cors_for_resource",
"(",
"route",
".",
"resource",
")",
":",
"self",
".",
"add_preflight_handler",
"(",
"route",
".",
"resource",
",",
"handler",
")",
"else",
":",
"raise",
"ValueError",
"(",
"\"Resource or ResourceRoute expected, got {!r}\"",
".",
"format",
"(",
"routing_entity",
")",
")"
] | Add OPTIONS handler for all routes defined by `routing_entity`.
Does nothing if CORS handler already handles routing entity.
Should fail if there are conflicting user-defined OPTIONS handlers. | [
"Add",
"OPTIONS",
"handler",
"for",
"all",
"routes",
"defined",
"by",
"routing_entity",
"."
] | 14affbd95c88c675eb513c1d295ede1897930f94 | https://github.com/aio-libs/aiohttp-cors/blob/14affbd95c88c675eb513c1d295ede1897930f94/aiohttp_cors/urldispatcher_router_adapter.py#L137-L200 | train | 251,372 |
aio-libs/aiohttp-cors | aiohttp_cors/urldispatcher_router_adapter.py | ResourcesUrlDispatcherRouterAdapter.is_preflight_request | def is_preflight_request(self, request: web.Request) -> bool:
"""Is `request` is a CORS preflight request."""
route = self._request_route(request)
if _is_web_view(route, strict=False):
return request.method == 'OPTIONS'
return route in self._preflight_routes | python | def is_preflight_request(self, request: web.Request) -> bool:
"""Is `request` is a CORS preflight request."""
route = self._request_route(request)
if _is_web_view(route, strict=False):
return request.method == 'OPTIONS'
return route in self._preflight_routes | [
"def",
"is_preflight_request",
"(",
"self",
",",
"request",
":",
"web",
".",
"Request",
")",
"->",
"bool",
":",
"route",
"=",
"self",
".",
"_request_route",
"(",
"request",
")",
"if",
"_is_web_view",
"(",
"route",
",",
"strict",
"=",
"False",
")",
":",
"return",
"request",
".",
"method",
"==",
"'OPTIONS'",
"return",
"route",
"in",
"self",
".",
"_preflight_routes"
] | Is `request` is a CORS preflight request. | [
"Is",
"request",
"is",
"a",
"CORS",
"preflight",
"request",
"."
] | 14affbd95c88c675eb513c1d295ede1897930f94 | https://github.com/aio-libs/aiohttp-cors/blob/14affbd95c88c675eb513c1d295ede1897930f94/aiohttp_cors/urldispatcher_router_adapter.py#L214-L219 | train | 251,373 |
aio-libs/aiohttp-cors | aiohttp_cors/urldispatcher_router_adapter.py | ResourcesUrlDispatcherRouterAdapter.is_cors_enabled_on_request | def is_cors_enabled_on_request(self, request: web.Request) -> bool:
"""Is `request` is a request for CORS-enabled resource."""
return self._request_resource(request) in self._resource_config | python | def is_cors_enabled_on_request(self, request: web.Request) -> bool:
"""Is `request` is a request for CORS-enabled resource."""
return self._request_resource(request) in self._resource_config | [
"def",
"is_cors_enabled_on_request",
"(",
"self",
",",
"request",
":",
"web",
".",
"Request",
")",
"->",
"bool",
":",
"return",
"self",
".",
"_request_resource",
"(",
"request",
")",
"in",
"self",
".",
"_resource_config"
] | Is `request` is a request for CORS-enabled resource. | [
"Is",
"request",
"is",
"a",
"request",
"for",
"CORS",
"-",
"enabled",
"resource",
"."
] | 14affbd95c88c675eb513c1d295ede1897930f94 | https://github.com/aio-libs/aiohttp-cors/blob/14affbd95c88c675eb513c1d295ede1897930f94/aiohttp_cors/urldispatcher_router_adapter.py#L221-L224 | train | 251,374 |
aio-libs/aiohttp-cors | aiohttp_cors/urldispatcher_router_adapter.py | ResourcesUrlDispatcherRouterAdapter.set_config_for_routing_entity | def set_config_for_routing_entity(
self,
routing_entity: Union[web.Resource, web.StaticResource,
web.ResourceRoute],
config):
"""Record configuration for resource or it's route."""
if isinstance(routing_entity, (web.Resource, web.StaticResource)):
resource = routing_entity
# Add resource configuration or fail if it's already added.
if resource in self._resource_config:
raise ValueError(
"CORS is already configured for {!r} resource.".format(
resource))
self._resource_config[resource] = _ResourceConfig(
default_config=config)
elif isinstance(routing_entity, web.ResourceRoute):
route = routing_entity
# Add resource's route configuration or fail if it's already added.
if route.resource not in self._resource_config:
self.set_config_for_routing_entity(route.resource, config)
if route.resource not in self._resource_config:
raise ValueError(
"Can't setup CORS for {!r} request, "
"CORS must be enabled for route's resource first.".format(
route))
resource_config = self._resource_config[route.resource]
if route.method in resource_config.method_config:
raise ValueError(
"Can't setup CORS for {!r} route: CORS already "
"configured on resource {!r} for {} method".format(
route, route.resource, route.method))
resource_config.method_config[route.method] = config
else:
raise ValueError(
"Resource or ResourceRoute expected, got {!r}".format(
routing_entity)) | python | def set_config_for_routing_entity(
self,
routing_entity: Union[web.Resource, web.StaticResource,
web.ResourceRoute],
config):
"""Record configuration for resource or it's route."""
if isinstance(routing_entity, (web.Resource, web.StaticResource)):
resource = routing_entity
# Add resource configuration or fail if it's already added.
if resource in self._resource_config:
raise ValueError(
"CORS is already configured for {!r} resource.".format(
resource))
self._resource_config[resource] = _ResourceConfig(
default_config=config)
elif isinstance(routing_entity, web.ResourceRoute):
route = routing_entity
# Add resource's route configuration or fail if it's already added.
if route.resource not in self._resource_config:
self.set_config_for_routing_entity(route.resource, config)
if route.resource not in self._resource_config:
raise ValueError(
"Can't setup CORS for {!r} request, "
"CORS must be enabled for route's resource first.".format(
route))
resource_config = self._resource_config[route.resource]
if route.method in resource_config.method_config:
raise ValueError(
"Can't setup CORS for {!r} route: CORS already "
"configured on resource {!r} for {} method".format(
route, route.resource, route.method))
resource_config.method_config[route.method] = config
else:
raise ValueError(
"Resource or ResourceRoute expected, got {!r}".format(
routing_entity)) | [
"def",
"set_config_for_routing_entity",
"(",
"self",
",",
"routing_entity",
":",
"Union",
"[",
"web",
".",
"Resource",
",",
"web",
".",
"StaticResource",
",",
"web",
".",
"ResourceRoute",
"]",
",",
"config",
")",
":",
"if",
"isinstance",
"(",
"routing_entity",
",",
"(",
"web",
".",
"Resource",
",",
"web",
".",
"StaticResource",
")",
")",
":",
"resource",
"=",
"routing_entity",
"# Add resource configuration or fail if it's already added.",
"if",
"resource",
"in",
"self",
".",
"_resource_config",
":",
"raise",
"ValueError",
"(",
"\"CORS is already configured for {!r} resource.\"",
".",
"format",
"(",
"resource",
")",
")",
"self",
".",
"_resource_config",
"[",
"resource",
"]",
"=",
"_ResourceConfig",
"(",
"default_config",
"=",
"config",
")",
"elif",
"isinstance",
"(",
"routing_entity",
",",
"web",
".",
"ResourceRoute",
")",
":",
"route",
"=",
"routing_entity",
"# Add resource's route configuration or fail if it's already added.",
"if",
"route",
".",
"resource",
"not",
"in",
"self",
".",
"_resource_config",
":",
"self",
".",
"set_config_for_routing_entity",
"(",
"route",
".",
"resource",
",",
"config",
")",
"if",
"route",
".",
"resource",
"not",
"in",
"self",
".",
"_resource_config",
":",
"raise",
"ValueError",
"(",
"\"Can't setup CORS for {!r} request, \"",
"\"CORS must be enabled for route's resource first.\"",
".",
"format",
"(",
"route",
")",
")",
"resource_config",
"=",
"self",
".",
"_resource_config",
"[",
"route",
".",
"resource",
"]",
"if",
"route",
".",
"method",
"in",
"resource_config",
".",
"method_config",
":",
"raise",
"ValueError",
"(",
"\"Can't setup CORS for {!r} route: CORS already \"",
"\"configured on resource {!r} for {} method\"",
".",
"format",
"(",
"route",
",",
"route",
".",
"resource",
",",
"route",
".",
"method",
")",
")",
"resource_config",
".",
"method_config",
"[",
"route",
".",
"method",
"]",
"=",
"config",
"else",
":",
"raise",
"ValueError",
"(",
"\"Resource or ResourceRoute expected, got {!r}\"",
".",
"format",
"(",
"routing_entity",
")",
")"
] | Record configuration for resource or it's route. | [
"Record",
"configuration",
"for",
"resource",
"or",
"it",
"s",
"route",
"."
] | 14affbd95c88c675eb513c1d295ede1897930f94 | https://github.com/aio-libs/aiohttp-cors/blob/14affbd95c88c675eb513c1d295ede1897930f94/aiohttp_cors/urldispatcher_router_adapter.py#L226-L271 | train | 251,375 |
aio-libs/aiohttp-cors | aiohttp_cors/urldispatcher_router_adapter.py | ResourcesUrlDispatcherRouterAdapter.get_non_preflight_request_config | def get_non_preflight_request_config(self, request: web.Request):
"""Get stored CORS configuration for routing entity that handles
specified request."""
assert self.is_cors_enabled_on_request(request)
resource = self._request_resource(request)
resource_config = self._resource_config[resource]
# Take Route config (if any) with defaults from Resource CORS
# configuration and global defaults.
route = request.match_info.route
if _is_web_view(route, strict=False):
method_config = request.match_info.handler.get_request_config(
request, request.method)
else:
method_config = resource_config.method_config.get(request.method,
{})
defaulted_config = collections.ChainMap(
method_config,
resource_config.default_config,
self._default_config)
return defaulted_config | python | def get_non_preflight_request_config(self, request: web.Request):
"""Get stored CORS configuration for routing entity that handles
specified request."""
assert self.is_cors_enabled_on_request(request)
resource = self._request_resource(request)
resource_config = self._resource_config[resource]
# Take Route config (if any) with defaults from Resource CORS
# configuration and global defaults.
route = request.match_info.route
if _is_web_view(route, strict=False):
method_config = request.match_info.handler.get_request_config(
request, request.method)
else:
method_config = resource_config.method_config.get(request.method,
{})
defaulted_config = collections.ChainMap(
method_config,
resource_config.default_config,
self._default_config)
return defaulted_config | [
"def",
"get_non_preflight_request_config",
"(",
"self",
",",
"request",
":",
"web",
".",
"Request",
")",
":",
"assert",
"self",
".",
"is_cors_enabled_on_request",
"(",
"request",
")",
"resource",
"=",
"self",
".",
"_request_resource",
"(",
"request",
")",
"resource_config",
"=",
"self",
".",
"_resource_config",
"[",
"resource",
"]",
"# Take Route config (if any) with defaults from Resource CORS",
"# configuration and global defaults.",
"route",
"=",
"request",
".",
"match_info",
".",
"route",
"if",
"_is_web_view",
"(",
"route",
",",
"strict",
"=",
"False",
")",
":",
"method_config",
"=",
"request",
".",
"match_info",
".",
"handler",
".",
"get_request_config",
"(",
"request",
",",
"request",
".",
"method",
")",
"else",
":",
"method_config",
"=",
"resource_config",
".",
"method_config",
".",
"get",
"(",
"request",
".",
"method",
",",
"{",
"}",
")",
"defaulted_config",
"=",
"collections",
".",
"ChainMap",
"(",
"method_config",
",",
"resource_config",
".",
"default_config",
",",
"self",
".",
"_default_config",
")",
"return",
"defaulted_config"
] | Get stored CORS configuration for routing entity that handles
specified request. | [
"Get",
"stored",
"CORS",
"configuration",
"for",
"routing",
"entity",
"that",
"handles",
"specified",
"request",
"."
] | 14affbd95c88c675eb513c1d295ede1897930f94 | https://github.com/aio-libs/aiohttp-cors/blob/14affbd95c88c675eb513c1d295ede1897930f94/aiohttp_cors/urldispatcher_router_adapter.py#L302-L324 | train | 251,376 |
HewlettPackard/python-hpOneView | hpOneView/resources/facilities/datacenters.py | Datacenters.add | def add(self, information, timeout=-1):
"""
Adds a data center resource based upon the attributes specified.
Args:
information: Data center information
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Added data center.
"""
return self._client.create(information, timeout=timeout) | python | def add(self, information, timeout=-1):
"""
Adds a data center resource based upon the attributes specified.
Args:
information: Data center information
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Added data center.
"""
return self._client.create(information, timeout=timeout) | [
"def",
"add",
"(",
"self",
",",
"information",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"_client",
".",
"create",
"(",
"information",
",",
"timeout",
"=",
"timeout",
")"
] | Adds a data center resource based upon the attributes specified.
Args:
information: Data center information
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Added data center. | [
"Adds",
"a",
"data",
"center",
"resource",
"based",
"upon",
"the",
"attributes",
"specified",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/facilities/datacenters.py#L138-L150 | train | 251,377 |
HewlettPackard/python-hpOneView | hpOneView/resources/facilities/datacenters.py | Datacenters.update | def update(self, resource, timeout=-1):
"""
Updates the specified data center resource.
Args:
resource (dict): Object to update.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Updated data center.
"""
return self._client.update(resource, timeout=timeout) | python | def update(self, resource, timeout=-1):
"""
Updates the specified data center resource.
Args:
resource (dict): Object to update.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Updated data center.
"""
return self._client.update(resource, timeout=timeout) | [
"def",
"update",
"(",
"self",
",",
"resource",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"_client",
".",
"update",
"(",
"resource",
",",
"timeout",
"=",
"timeout",
")"
] | Updates the specified data center resource.
Args:
resource (dict): Object to update.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Updated data center. | [
"Updates",
"the",
"specified",
"data",
"center",
"resource",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/facilities/datacenters.py#L152-L164 | train | 251,378 |
HewlettPackard/python-hpOneView | hpOneView/resources/facilities/datacenters.py | Datacenters.remove_all | def remove_all(self, filter, force=False, timeout=-1):
"""
Deletes the set of datacenters according to the specified parameters. A filter is required to identify the set
of resources to be deleted.
Args:
filter:
A general filter/query string to narrow the list of items that will be removed.
force:
If set to true, the operation completes despite any problems with
network connectivity or errors on the resource itself. The default is false.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
bool: operation success
"""
return self._client.delete_all(filter=filter, force=force, timeout=timeout) | python | def remove_all(self, filter, force=False, timeout=-1):
"""
Deletes the set of datacenters according to the specified parameters. A filter is required to identify the set
of resources to be deleted.
Args:
filter:
A general filter/query string to narrow the list of items that will be removed.
force:
If set to true, the operation completes despite any problems with
network connectivity or errors on the resource itself. The default is false.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
bool: operation success
"""
return self._client.delete_all(filter=filter, force=force, timeout=timeout) | [
"def",
"remove_all",
"(",
"self",
",",
"filter",
",",
"force",
"=",
"False",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"_client",
".",
"delete_all",
"(",
"filter",
"=",
"filter",
",",
"force",
"=",
"force",
",",
"timeout",
"=",
"timeout",
")"
] | Deletes the set of datacenters according to the specified parameters. A filter is required to identify the set
of resources to be deleted.
Args:
filter:
A general filter/query string to narrow the list of items that will be removed.
force:
If set to true, the operation completes despite any problems with
network connectivity or errors on the resource itself. The default is false.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
bool: operation success | [
"Deletes",
"the",
"set",
"of",
"datacenters",
"according",
"to",
"the",
"specified",
"parameters",
".",
"A",
"filter",
"is",
"required",
"to",
"identify",
"the",
"set",
"of",
"resources",
"to",
"be",
"deleted",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/facilities/datacenters.py#L166-L184 | train | 251,379 |
HewlettPackard/python-hpOneView | hpOneView/resources/storage/drive_enclosures.py | DriveEnclosures.get_by | def get_by(self, field, value):
"""
Gets all drive enclosures that match the filter.
The search is case-insensitive.
Args:
Field: field name to filter.
Value: value to filter.
Returns:
list: A list of drive enclosures.
"""
return self._client.get_by(field=field, value=value) | python | def get_by(self, field, value):
"""
Gets all drive enclosures that match the filter.
The search is case-insensitive.
Args:
Field: field name to filter.
Value: value to filter.
Returns:
list: A list of drive enclosures.
"""
return self._client.get_by(field=field, value=value) | [
"def",
"get_by",
"(",
"self",
",",
"field",
",",
"value",
")",
":",
"return",
"self",
".",
"_client",
".",
"get_by",
"(",
"field",
"=",
"field",
",",
"value",
"=",
"value",
")"
] | Gets all drive enclosures that match the filter.
The search is case-insensitive.
Args:
Field: field name to filter.
Value: value to filter.
Returns:
list: A list of drive enclosures. | [
"Gets",
"all",
"drive",
"enclosures",
"that",
"match",
"the",
"filter",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/storage/drive_enclosures.py#L91-L104 | train | 251,380 |
HewlettPackard/python-hpOneView | hpOneView/resources/storage/drive_enclosures.py | DriveEnclosures.refresh_state | def refresh_state(self, id_or_uri, configuration, timeout=-1):
"""
Refreshes a drive enclosure.
Args:
id_or_uri: Can be either the resource ID or the resource URI.
configuration: Configuration
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Drive Enclosure
"""
uri = self._client.build_uri(id_or_uri) + self.REFRESH_STATE_PATH
return self._client.update(resource=configuration, uri=uri, timeout=timeout) | python | def refresh_state(self, id_or_uri, configuration, timeout=-1):
"""
Refreshes a drive enclosure.
Args:
id_or_uri: Can be either the resource ID or the resource URI.
configuration: Configuration
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Drive Enclosure
"""
uri = self._client.build_uri(id_or_uri) + self.REFRESH_STATE_PATH
return self._client.update(resource=configuration, uri=uri, timeout=timeout) | [
"def",
"refresh_state",
"(",
"self",
",",
"id_or_uri",
",",
"configuration",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"id_or_uri",
")",
"+",
"self",
".",
"REFRESH_STATE_PATH",
"return",
"self",
".",
"_client",
".",
"update",
"(",
"resource",
"=",
"configuration",
",",
"uri",
"=",
"uri",
",",
"timeout",
"=",
"timeout",
")"
] | Refreshes a drive enclosure.
Args:
id_or_uri: Can be either the resource ID or the resource URI.
configuration: Configuration
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Drive Enclosure | [
"Refreshes",
"a",
"drive",
"enclosure",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/storage/drive_enclosures.py#L119-L133 | train | 251,381 |
HewlettPackard/python-hpOneView | hpOneView/resources/uncategorized/os_deployment_servers.py | OsDeploymentServers.get_all | def get_all(self, start=0, count=-1, filter='', fields='', query='', sort='', view=''):
"""
Gets a list of Deployment Servers based on optional sorting and filtering, and constrained by start and count
parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
filter (list or str):
A general filter/query string to narrow the list of items returned. The
default is no filter; all resources are returned.
fields:
Specifies which fields should be returned in the result set.
query:
A general query string to narrow the list of resources returned. The default
is no query - all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
view:
Return a specific subset of the attributes of the resource or collection, by
specifying the name of a predefined view. The default view is expand - show all
attributes of the resource and all elements of collections of resources.
Returns:
list: Os Deployment Servers
"""
return self._client.get_all(start, count, filter=filter, sort=sort, query=query, fields=fields, view=view) | python | def get_all(self, start=0, count=-1, filter='', fields='', query='', sort='', view=''):
"""
Gets a list of Deployment Servers based on optional sorting and filtering, and constrained by start and count
parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
filter (list or str):
A general filter/query string to narrow the list of items returned. The
default is no filter; all resources are returned.
fields:
Specifies which fields should be returned in the result set.
query:
A general query string to narrow the list of resources returned. The default
is no query - all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
view:
Return a specific subset of the attributes of the resource or collection, by
specifying the name of a predefined view. The default view is expand - show all
attributes of the resource and all elements of collections of resources.
Returns:
list: Os Deployment Servers
"""
return self._client.get_all(start, count, filter=filter, sort=sort, query=query, fields=fields, view=view) | [
"def",
"get_all",
"(",
"self",
",",
"start",
"=",
"0",
",",
"count",
"=",
"-",
"1",
",",
"filter",
"=",
"''",
",",
"fields",
"=",
"''",
",",
"query",
"=",
"''",
",",
"sort",
"=",
"''",
",",
"view",
"=",
"''",
")",
":",
"return",
"self",
".",
"_client",
".",
"get_all",
"(",
"start",
",",
"count",
",",
"filter",
"=",
"filter",
",",
"sort",
"=",
"sort",
",",
"query",
"=",
"query",
",",
"fields",
"=",
"fields",
",",
"view",
"=",
"view",
")"
] | Gets a list of Deployment Servers based on optional sorting and filtering, and constrained by start and count
parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
filter (list or str):
A general filter/query string to narrow the list of items returned. The
default is no filter; all resources are returned.
fields:
Specifies which fields should be returned in the result set.
query:
A general query string to narrow the list of resources returned. The default
is no query - all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
view:
Return a specific subset of the attributes of the resource or collection, by
specifying the name of a predefined view. The default view is expand - show all
attributes of the resource and all elements of collections of resources.
Returns:
list: Os Deployment Servers | [
"Gets",
"a",
"list",
"of",
"Deployment",
"Servers",
"based",
"on",
"optional",
"sorting",
"and",
"filtering",
"and",
"constrained",
"by",
"start",
"and",
"count",
"parameters",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/uncategorized/os_deployment_servers.py#L43-L75 | train | 251,382 |
HewlettPackard/python-hpOneView | hpOneView/resources/uncategorized/os_deployment_servers.py | OsDeploymentServers.delete | def delete(self, resource, force=False, timeout=-1):
"""
Deletes a Deployment Server object based on its UUID or URI.
Args:
resource (dict):
Object to delete.
force:
If set to true, the operation completes despite any problems with
network connectivity or errors on the resource itself. The default is false.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
bool: Indicates if the volume was successfully deleted.
"""
return self._client.delete(resource, force=force, timeout=timeout) | python | def delete(self, resource, force=False, timeout=-1):
"""
Deletes a Deployment Server object based on its UUID or URI.
Args:
resource (dict):
Object to delete.
force:
If set to true, the operation completes despite any problems with
network connectivity or errors on the resource itself. The default is false.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
bool: Indicates if the volume was successfully deleted.
"""
return self._client.delete(resource, force=force, timeout=timeout) | [
"def",
"delete",
"(",
"self",
",",
"resource",
",",
"force",
"=",
"False",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"return",
"self",
".",
"_client",
".",
"delete",
"(",
"resource",
",",
"force",
"=",
"force",
",",
"timeout",
"=",
"timeout",
")"
] | Deletes a Deployment Server object based on its UUID or URI.
Args:
resource (dict):
Object to delete.
force:
If set to true, the operation completes despite any problems with
network connectivity or errors on the resource itself. The default is false.
timeout:
Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
bool: Indicates if the volume was successfully deleted. | [
"Deletes",
"a",
"Deployment",
"Server",
"object",
"based",
"on",
"its",
"UUID",
"or",
"URI",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/uncategorized/os_deployment_servers.py#L153-L171 | train | 251,383 |
HewlettPackard/python-hpOneView | hpOneView/resources/uncategorized/os_deployment_servers.py | OsDeploymentServers.get_appliances | def get_appliances(self, start=0, count=-1, filter='', fields='', query='', sort='', view=''):
"""
Gets a list of all the Image Streamer resources based on optional sorting and filtering, and constrained
by start and count parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
filter (list or str):
A general filter/query string to narrow the list of items returned. The
default is no filter; all resources are returned.
fields:
Specifies which fields should be returned in the result set.
query:
A general query string to narrow the list of resources returned. The default
is no query - all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
view:
Return a specific subset of the attributes of the resource or collection, by
specifying the name of a predefined view. The default view is expand - show all
attributes of the resource and all elements of collections of resources.
Returns:
list: Image Streamer resources associated with the Deployment Servers.
"""
uri = self.URI + '/image-streamer-appliances'
return self._client.get_all(start, count, filter=filter, sort=sort, query=query, fields=fields, view=view,
uri=uri) | python | def get_appliances(self, start=0, count=-1, filter='', fields='', query='', sort='', view=''):
"""
Gets a list of all the Image Streamer resources based on optional sorting and filtering, and constrained
by start and count parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
filter (list or str):
A general filter/query string to narrow the list of items returned. The
default is no filter; all resources are returned.
fields:
Specifies which fields should be returned in the result set.
query:
A general query string to narrow the list of resources returned. The default
is no query - all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
view:
Return a specific subset of the attributes of the resource or collection, by
specifying the name of a predefined view. The default view is expand - show all
attributes of the resource and all elements of collections of resources.
Returns:
list: Image Streamer resources associated with the Deployment Servers.
"""
uri = self.URI + '/image-streamer-appliances'
return self._client.get_all(start, count, filter=filter, sort=sort, query=query, fields=fields, view=view,
uri=uri) | [
"def",
"get_appliances",
"(",
"self",
",",
"start",
"=",
"0",
",",
"count",
"=",
"-",
"1",
",",
"filter",
"=",
"''",
",",
"fields",
"=",
"''",
",",
"query",
"=",
"''",
",",
"sort",
"=",
"''",
",",
"view",
"=",
"''",
")",
":",
"uri",
"=",
"self",
".",
"URI",
"+",
"'/image-streamer-appliances'",
"return",
"self",
".",
"_client",
".",
"get_all",
"(",
"start",
",",
"count",
",",
"filter",
"=",
"filter",
",",
"sort",
"=",
"sort",
",",
"query",
"=",
"query",
",",
"fields",
"=",
"fields",
",",
"view",
"=",
"view",
",",
"uri",
"=",
"uri",
")"
] | Gets a list of all the Image Streamer resources based on optional sorting and filtering, and constrained
by start and count parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
filter (list or str):
A general filter/query string to narrow the list of items returned. The
default is no filter; all resources are returned.
fields:
Specifies which fields should be returned in the result set.
query:
A general query string to narrow the list of resources returned. The default
is no query - all resources are returned.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
view:
Return a specific subset of the attributes of the resource or collection, by
specifying the name of a predefined view. The default view is expand - show all
attributes of the resource and all elements of collections of resources.
Returns:
list: Image Streamer resources associated with the Deployment Servers. | [
"Gets",
"a",
"list",
"of",
"all",
"the",
"Image",
"Streamer",
"resources",
"based",
"on",
"optional",
"sorting",
"and",
"filtering",
"and",
"constrained",
"by",
"start",
"and",
"count",
"parameters",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/uncategorized/os_deployment_servers.py#L183-L217 | train | 251,384 |
HewlettPackard/python-hpOneView | hpOneView/resources/uncategorized/os_deployment_servers.py | OsDeploymentServers.get_appliance | def get_appliance(self, id_or_uri, fields=''):
"""
Gets the particular Image Streamer resource based on its ID or URI.
Args:
id_or_uri:
Can be either the Os Deployment Server ID or the URI
fields:
Specifies which fields should be returned in the result.
Returns:
dict: Image Streamer resource.
"""
uri = self.URI + '/image-streamer-appliances/' + extract_id_from_uri(id_or_uri)
if fields:
uri += '?fields=' + fields
return self._client.get(uri) | python | def get_appliance(self, id_or_uri, fields=''):
"""
Gets the particular Image Streamer resource based on its ID or URI.
Args:
id_or_uri:
Can be either the Os Deployment Server ID or the URI
fields:
Specifies which fields should be returned in the result.
Returns:
dict: Image Streamer resource.
"""
uri = self.URI + '/image-streamer-appliances/' + extract_id_from_uri(id_or_uri)
if fields:
uri += '?fields=' + fields
return self._client.get(uri) | [
"def",
"get_appliance",
"(",
"self",
",",
"id_or_uri",
",",
"fields",
"=",
"''",
")",
":",
"uri",
"=",
"self",
".",
"URI",
"+",
"'/image-streamer-appliances/'",
"+",
"extract_id_from_uri",
"(",
"id_or_uri",
")",
"if",
"fields",
":",
"uri",
"+=",
"'?fields='",
"+",
"fields",
"return",
"self",
".",
"_client",
".",
"get",
"(",
"uri",
")"
] | Gets the particular Image Streamer resource based on its ID or URI.
Args:
id_or_uri:
Can be either the Os Deployment Server ID or the URI
fields:
Specifies which fields should be returned in the result.
Returns:
dict: Image Streamer resource. | [
"Gets",
"the",
"particular",
"Image",
"Streamer",
"resource",
"based",
"on",
"its",
"ID",
"or",
"URI",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/uncategorized/os_deployment_servers.py#L219-L236 | train | 251,385 |
HewlettPackard/python-hpOneView | hpOneView/resources/uncategorized/os_deployment_servers.py | OsDeploymentServers.get_appliance_by_name | def get_appliance_by_name(self, appliance_name):
"""
Gets the particular Image Streamer resource based on its name.
Args:
appliance_name:
The Image Streamer resource name.
Returns:
dict: Image Streamer resource.
"""
appliances = self.get_appliances()
if appliances:
for appliance in appliances:
if appliance['name'] == appliance_name:
return appliance
return None | python | def get_appliance_by_name(self, appliance_name):
"""
Gets the particular Image Streamer resource based on its name.
Args:
appliance_name:
The Image Streamer resource name.
Returns:
dict: Image Streamer resource.
"""
appliances = self.get_appliances()
if appliances:
for appliance in appliances:
if appliance['name'] == appliance_name:
return appliance
return None | [
"def",
"get_appliance_by_name",
"(",
"self",
",",
"appliance_name",
")",
":",
"appliances",
"=",
"self",
".",
"get_appliances",
"(",
")",
"if",
"appliances",
":",
"for",
"appliance",
"in",
"appliances",
":",
"if",
"appliance",
"[",
"'name'",
"]",
"==",
"appliance_name",
":",
"return",
"appliance",
"return",
"None"
] | Gets the particular Image Streamer resource based on its name.
Args:
appliance_name:
The Image Streamer resource name.
Returns:
dict: Image Streamer resource. | [
"Gets",
"the",
"particular",
"Image",
"Streamer",
"resource",
"based",
"on",
"its",
"name",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/uncategorized/os_deployment_servers.py#L238-L255 | train | 251,386 |
HewlettPackard/python-hpOneView | hpOneView/resources/storage/storage_systems.py | StorageSystems.get_managed_ports | def get_managed_ports(self, id_or_uri, port_id_or_uri=''):
"""
Gets all ports or a specific managed target port for the specified storage system.
Args:
id_or_uri: Can be either the storage system id or the storage system uri.
port_id_or_uri: Can be either the port id or the port uri.
Returns:
dict: Managed ports.
"""
if port_id_or_uri:
uri = self._client.build_uri(port_id_or_uri)
if "/managedPorts" not in uri:
uri = self._client.build_uri(id_or_uri) + "/managedPorts" + "/" + port_id_or_uri
else:
uri = self._client.build_uri(id_or_uri) + "/managedPorts"
return self._client.get_collection(uri) | python | def get_managed_ports(self, id_or_uri, port_id_or_uri=''):
"""
Gets all ports or a specific managed target port for the specified storage system.
Args:
id_or_uri: Can be either the storage system id or the storage system uri.
port_id_or_uri: Can be either the port id or the port uri.
Returns:
dict: Managed ports.
"""
if port_id_or_uri:
uri = self._client.build_uri(port_id_or_uri)
if "/managedPorts" not in uri:
uri = self._client.build_uri(id_or_uri) + "/managedPorts" + "/" + port_id_or_uri
else:
uri = self._client.build_uri(id_or_uri) + "/managedPorts"
return self._client.get_collection(uri) | [
"def",
"get_managed_ports",
"(",
"self",
",",
"id_or_uri",
",",
"port_id_or_uri",
"=",
"''",
")",
":",
"if",
"port_id_or_uri",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"port_id_or_uri",
")",
"if",
"\"/managedPorts\"",
"not",
"in",
"uri",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"id_or_uri",
")",
"+",
"\"/managedPorts\"",
"+",
"\"/\"",
"+",
"port_id_or_uri",
"else",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"id_or_uri",
")",
"+",
"\"/managedPorts\"",
"return",
"self",
".",
"_client",
".",
"get_collection",
"(",
"uri",
")"
] | Gets all ports or a specific managed target port for the specified storage system.
Args:
id_or_uri: Can be either the storage system id or the storage system uri.
port_id_or_uri: Can be either the port id or the port uri.
Returns:
dict: Managed ports. | [
"Gets",
"all",
"ports",
"or",
"a",
"specific",
"managed",
"target",
"port",
"for",
"the",
"specified",
"storage",
"system",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/storage/storage_systems.py#L163-L182 | train | 251,387 |
HewlettPackard/python-hpOneView | hpOneView/resources/storage/storage_systems.py | StorageSystems.get_by_ip_hostname | def get_by_ip_hostname(self, ip_hostname):
"""
Retrieve a storage system by its IP.
Works only with API version <= 300.
Args:
ip_hostname: Storage system IP or hostname.
Returns:
dict
"""
resources = self._client.get_all()
resources_filtered = [x for x in resources if x['credentials']['ip_hostname'] == ip_hostname]
if resources_filtered:
return resources_filtered[0]
else:
return None | python | def get_by_ip_hostname(self, ip_hostname):
"""
Retrieve a storage system by its IP.
Works only with API version <= 300.
Args:
ip_hostname: Storage system IP or hostname.
Returns:
dict
"""
resources = self._client.get_all()
resources_filtered = [x for x in resources if x['credentials']['ip_hostname'] == ip_hostname]
if resources_filtered:
return resources_filtered[0]
else:
return None | [
"def",
"get_by_ip_hostname",
"(",
"self",
",",
"ip_hostname",
")",
":",
"resources",
"=",
"self",
".",
"_client",
".",
"get_all",
"(",
")",
"resources_filtered",
"=",
"[",
"x",
"for",
"x",
"in",
"resources",
"if",
"x",
"[",
"'credentials'",
"]",
"[",
"'ip_hostname'",
"]",
"==",
"ip_hostname",
"]",
"if",
"resources_filtered",
":",
"return",
"resources_filtered",
"[",
"0",
"]",
"else",
":",
"return",
"None"
] | Retrieve a storage system by its IP.
Works only with API version <= 300.
Args:
ip_hostname: Storage system IP or hostname.
Returns:
dict | [
"Retrieve",
"a",
"storage",
"system",
"by",
"its",
"IP",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/storage/storage_systems.py#L211-L230 | train | 251,388 |
HewlettPackard/python-hpOneView | hpOneView/resources/storage/storage_systems.py | StorageSystems.get_by_hostname | def get_by_hostname(self, hostname):
"""
Retrieve a storage system by its hostname.
Works only in API500 onwards.
Args:
hostname: Storage system hostname.
Returns:
dict
"""
resources = self._client.get_all()
resources_filtered = [x for x in resources if x['hostname'] == hostname]
if resources_filtered:
return resources_filtered[0]
else:
return None | python | def get_by_hostname(self, hostname):
"""
Retrieve a storage system by its hostname.
Works only in API500 onwards.
Args:
hostname: Storage system hostname.
Returns:
dict
"""
resources = self._client.get_all()
resources_filtered = [x for x in resources if x['hostname'] == hostname]
if resources_filtered:
return resources_filtered[0]
else:
return None | [
"def",
"get_by_hostname",
"(",
"self",
",",
"hostname",
")",
":",
"resources",
"=",
"self",
".",
"_client",
".",
"get_all",
"(",
")",
"resources_filtered",
"=",
"[",
"x",
"for",
"x",
"in",
"resources",
"if",
"x",
"[",
"'hostname'",
"]",
"==",
"hostname",
"]",
"if",
"resources_filtered",
":",
"return",
"resources_filtered",
"[",
"0",
"]",
"else",
":",
"return",
"None"
] | Retrieve a storage system by its hostname.
Works only in API500 onwards.
Args:
hostname: Storage system hostname.
Returns:
dict | [
"Retrieve",
"a",
"storage",
"system",
"by",
"its",
"hostname",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/storage/storage_systems.py#L232-L251 | train | 251,389 |
HewlettPackard/python-hpOneView | hpOneView/resources/storage/storage_systems.py | StorageSystems.get_reachable_ports | def get_reachable_ports(self, id_or_uri, start=0, count=-1, filter='', query='', sort='', networks=[]):
"""
Gets the storage ports that are connected on the specified networks
based on the storage system port's expected network connectivity.
Returns:
list: Reachable Storage Port List.
"""
uri = self._client.build_uri(id_or_uri) + "/reachable-ports"
if networks:
elements = "\'"
for n in networks:
elements += n + ','
elements = elements[:-1] + "\'"
uri = uri + "?networks=" + elements
return self._client.get(self._client.build_query_uri(start=start, count=count, filter=filter, query=query,
sort=sort, uri=uri)) | python | def get_reachable_ports(self, id_or_uri, start=0, count=-1, filter='', query='', sort='', networks=[]):
"""
Gets the storage ports that are connected on the specified networks
based on the storage system port's expected network connectivity.
Returns:
list: Reachable Storage Port List.
"""
uri = self._client.build_uri(id_or_uri) + "/reachable-ports"
if networks:
elements = "\'"
for n in networks:
elements += n + ','
elements = elements[:-1] + "\'"
uri = uri + "?networks=" + elements
return self._client.get(self._client.build_query_uri(start=start, count=count, filter=filter, query=query,
sort=sort, uri=uri)) | [
"def",
"get_reachable_ports",
"(",
"self",
",",
"id_or_uri",
",",
"start",
"=",
"0",
",",
"count",
"=",
"-",
"1",
",",
"filter",
"=",
"''",
",",
"query",
"=",
"''",
",",
"sort",
"=",
"''",
",",
"networks",
"=",
"[",
"]",
")",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"id_or_uri",
")",
"+",
"\"/reachable-ports\"",
"if",
"networks",
":",
"elements",
"=",
"\"\\'\"",
"for",
"n",
"in",
"networks",
":",
"elements",
"+=",
"n",
"+",
"','",
"elements",
"=",
"elements",
"[",
":",
"-",
"1",
"]",
"+",
"\"\\'\"",
"uri",
"=",
"uri",
"+",
"\"?networks=\"",
"+",
"elements",
"return",
"self",
".",
"_client",
".",
"get",
"(",
"self",
".",
"_client",
".",
"build_query_uri",
"(",
"start",
"=",
"start",
",",
"count",
"=",
"count",
",",
"filter",
"=",
"filter",
",",
"query",
"=",
"query",
",",
"sort",
"=",
"sort",
",",
"uri",
"=",
"uri",
")",
")"
] | Gets the storage ports that are connected on the specified networks
based on the storage system port's expected network connectivity.
Returns:
list: Reachable Storage Port List. | [
"Gets",
"the",
"storage",
"ports",
"that",
"are",
"connected",
"on",
"the",
"specified",
"networks",
"based",
"on",
"the",
"storage",
"system",
"port",
"s",
"expected",
"network",
"connectivity",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/storage/storage_systems.py#L253-L271 | train | 251,390 |
HewlettPackard/python-hpOneView | hpOneView/resources/storage/storage_systems.py | StorageSystems.get_templates | def get_templates(self, id_or_uri, start=0, count=-1, filter='', query='', sort=''):
"""
Gets a list of volume templates. Returns a list of storage templates belonging to the storage system.
Returns:
list: Storage Template List.
"""
uri = self._client.build_uri(id_or_uri) + "/templates"
return self._client.get(self._client.build_query_uri(start=start, count=count, filter=filter,
query=query, sort=sort, uri=uri)) | python | def get_templates(self, id_or_uri, start=0, count=-1, filter='', query='', sort=''):
"""
Gets a list of volume templates. Returns a list of storage templates belonging to the storage system.
Returns:
list: Storage Template List.
"""
uri = self._client.build_uri(id_or_uri) + "/templates"
return self._client.get(self._client.build_query_uri(start=start, count=count, filter=filter,
query=query, sort=sort, uri=uri)) | [
"def",
"get_templates",
"(",
"self",
",",
"id_or_uri",
",",
"start",
"=",
"0",
",",
"count",
"=",
"-",
"1",
",",
"filter",
"=",
"''",
",",
"query",
"=",
"''",
",",
"sort",
"=",
"''",
")",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"id_or_uri",
")",
"+",
"\"/templates\"",
"return",
"self",
".",
"_client",
".",
"get",
"(",
"self",
".",
"_client",
".",
"build_query_uri",
"(",
"start",
"=",
"start",
",",
"count",
"=",
"count",
",",
"filter",
"=",
"filter",
",",
"query",
"=",
"query",
",",
"sort",
"=",
"sort",
",",
"uri",
"=",
"uri",
")",
")"
] | Gets a list of volume templates. Returns a list of storage templates belonging to the storage system.
Returns:
list: Storage Template List. | [
"Gets",
"a",
"list",
"of",
"volume",
"templates",
".",
"Returns",
"a",
"list",
"of",
"storage",
"templates",
"belonging",
"to",
"the",
"storage",
"system",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/storage/storage_systems.py#L273-L282 | train | 251,391 |
HewlettPackard/python-hpOneView | hpOneView/resources/networking/fabrics.py | Fabrics.get_reserved_vlan_range | def get_reserved_vlan_range(self, id_or_uri):
"""
Gets the reserved vlan ID range for the fabric.
Note:
This method is only available on HPE Synergy.
Args:
id_or_uri: ID or URI of fabric.
Returns:
dict: vlan-pool
"""
uri = self._client.build_uri(id_or_uri) + "/reserved-vlan-range"
return self._client.get(uri) | python | def get_reserved_vlan_range(self, id_or_uri):
"""
Gets the reserved vlan ID range for the fabric.
Note:
This method is only available on HPE Synergy.
Args:
id_or_uri: ID or URI of fabric.
Returns:
dict: vlan-pool
"""
uri = self._client.build_uri(id_or_uri) + "/reserved-vlan-range"
return self._client.get(uri) | [
"def",
"get_reserved_vlan_range",
"(",
"self",
",",
"id_or_uri",
")",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"id_or_uri",
")",
"+",
"\"/reserved-vlan-range\"",
"return",
"self",
".",
"_client",
".",
"get",
"(",
"uri",
")"
] | Gets the reserved vlan ID range for the fabric.
Note:
This method is only available on HPE Synergy.
Args:
id_or_uri: ID or URI of fabric.
Returns:
dict: vlan-pool | [
"Gets",
"the",
"reserved",
"vlan",
"ID",
"range",
"for",
"the",
"fabric",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/fabrics.py#L107-L121 | train | 251,392 |
HewlettPackard/python-hpOneView | hpOneView/resources/networking/fabrics.py | Fabrics.update_reserved_vlan_range | def update_reserved_vlan_range(self, id_or_uri, vlan_pool, force=False):
"""
Updates the reserved vlan ID range for the fabric.
Note:
This method is only available on HPE Synergy.
Args:
id_or_uri: ID or URI of fabric.
vlan_pool (dict): vlan-pool data to update.
force: If set to true, the operation completes despite any problems with network connectivity or errors
on the resource itself. The default is false.
Returns:
dict: The fabric
"""
uri = self._client.build_uri(id_or_uri) + "/reserved-vlan-range"
return self._client.update(resource=vlan_pool, uri=uri, force=force, default_values=self.DEFAULT_VALUES) | python | def update_reserved_vlan_range(self, id_or_uri, vlan_pool, force=False):
"""
Updates the reserved vlan ID range for the fabric.
Note:
This method is only available on HPE Synergy.
Args:
id_or_uri: ID or URI of fabric.
vlan_pool (dict): vlan-pool data to update.
force: If set to true, the operation completes despite any problems with network connectivity or errors
on the resource itself. The default is false.
Returns:
dict: The fabric
"""
uri = self._client.build_uri(id_or_uri) + "/reserved-vlan-range"
return self._client.update(resource=vlan_pool, uri=uri, force=force, default_values=self.DEFAULT_VALUES) | [
"def",
"update_reserved_vlan_range",
"(",
"self",
",",
"id_or_uri",
",",
"vlan_pool",
",",
"force",
"=",
"False",
")",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"id_or_uri",
")",
"+",
"\"/reserved-vlan-range\"",
"return",
"self",
".",
"_client",
".",
"update",
"(",
"resource",
"=",
"vlan_pool",
",",
"uri",
"=",
"uri",
",",
"force",
"=",
"force",
",",
"default_values",
"=",
"self",
".",
"DEFAULT_VALUES",
")"
] | Updates the reserved vlan ID range for the fabric.
Note:
This method is only available on HPE Synergy.
Args:
id_or_uri: ID or URI of fabric.
vlan_pool (dict): vlan-pool data to update.
force: If set to true, the operation completes despite any problems with network connectivity or errors
on the resource itself. The default is false.
Returns:
dict: The fabric | [
"Updates",
"the",
"reserved",
"vlan",
"ID",
"range",
"for",
"the",
"fabric",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/fabrics.py#L123-L140 | train | 251,393 |
HewlettPackard/python-hpOneView | hpOneView/resources/security/roles.py | Roles.get | def get(self, name_or_uri):
"""
Get the role by its URI or Name.
Args:
name_or_uri:
Can be either the Name or the URI.
Returns:
dict: Role
"""
name_or_uri = quote(name_or_uri)
return self._client.get(name_or_uri) | python | def get(self, name_or_uri):
"""
Get the role by its URI or Name.
Args:
name_or_uri:
Can be either the Name or the URI.
Returns:
dict: Role
"""
name_or_uri = quote(name_or_uri)
return self._client.get(name_or_uri) | [
"def",
"get",
"(",
"self",
",",
"name_or_uri",
")",
":",
"name_or_uri",
"=",
"quote",
"(",
"name_or_uri",
")",
"return",
"self",
".",
"_client",
".",
"get",
"(",
"name_or_uri",
")"
] | Get the role by its URI or Name.
Args:
name_or_uri:
Can be either the Name or the URI.
Returns:
dict: Role | [
"Get",
"the",
"role",
"by",
"its",
"URI",
"or",
"Name",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/security/roles.py#L74-L86 | train | 251,394 |
HewlettPackard/python-hpOneView | hpOneView/resources/storage/sas_logical_jbods.py | SasLogicalJbods.get_drives | def get_drives(self, id_or_uri):
"""
Gets the list of drives allocated to this SAS logical JBOD.
Args:
id_or_uri: Can be either the SAS logical JBOD ID or the SAS logical JBOD URI.
Returns:
list: A list of Drives
"""
uri = self._client.build_uri(id_or_uri=id_or_uri) + self.DRIVES_PATH
return self._client.get(id_or_uri=uri) | python | def get_drives(self, id_or_uri):
"""
Gets the list of drives allocated to this SAS logical JBOD.
Args:
id_or_uri: Can be either the SAS logical JBOD ID or the SAS logical JBOD URI.
Returns:
list: A list of Drives
"""
uri = self._client.build_uri(id_or_uri=id_or_uri) + self.DRIVES_PATH
return self._client.get(id_or_uri=uri) | [
"def",
"get_drives",
"(",
"self",
",",
"id_or_uri",
")",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"id_or_uri",
"=",
"id_or_uri",
")",
"+",
"self",
".",
"DRIVES_PATH",
"return",
"self",
".",
"_client",
".",
"get",
"(",
"id_or_uri",
"=",
"uri",
")"
] | Gets the list of drives allocated to this SAS logical JBOD.
Args:
id_or_uri: Can be either the SAS logical JBOD ID or the SAS logical JBOD URI.
Returns:
list: A list of Drives | [
"Gets",
"the",
"list",
"of",
"drives",
"allocated",
"to",
"this",
"SAS",
"logical",
"JBOD",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/storage/sas_logical_jbods.py#L104-L115 | train | 251,395 |
HewlettPackard/python-hpOneView | hpOneView/resources/servers/id_pools_ranges.py | IdPoolsRanges.enable | def enable(self, information, id_or_uri, timeout=-1):
"""
Enables or disables a range.
Args:
information (dict): Information to update.
id_or_uri: ID or URI of range.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Updated resource.
"""
uri = self._client.build_uri(id_or_uri)
return self._client.update(information, uri, timeout=timeout) | python | def enable(self, information, id_or_uri, timeout=-1):
"""
Enables or disables a range.
Args:
information (dict): Information to update.
id_or_uri: ID or URI of range.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Updated resource.
"""
uri = self._client.build_uri(id_or_uri)
return self._client.update(information, uri, timeout=timeout) | [
"def",
"enable",
"(",
"self",
",",
"information",
",",
"id_or_uri",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"id_or_uri",
")",
"return",
"self",
".",
"_client",
".",
"update",
"(",
"information",
",",
"uri",
",",
"timeout",
"=",
"timeout",
")"
] | Enables or disables a range.
Args:
information (dict): Information to update.
id_or_uri: ID or URI of range.
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Updated resource. | [
"Enables",
"or",
"disables",
"a",
"range",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/servers/id_pools_ranges.py#L86-L102 | train | 251,396 |
HewlettPackard/python-hpOneView | hpOneView/resources/servers/id_pools_ranges.py | IdPoolsRanges.get_allocated_fragments | def get_allocated_fragments(self, id_or_uri, count=-1, start=0):
"""
Gets all fragments that have been allocated in range.
Args:
id_or_uri:
ID or URI of range.
count:
The number of resources to return. A count of -1 requests all items. The actual number of items in
the response may differ from the requested count if the sum of start and count exceed the total number
of items.
start:
The first item to return, using 0-based indexing. If not specified, the default is 0 - start with the
first available item.
Returns:
list: A list with the allocated fragements.
"""
uri = self._client.build_uri(id_or_uri) + "/allocated-fragments?start={0}&count={1}".format(start, count)
return self._client.get_collection(uri) | python | def get_allocated_fragments(self, id_or_uri, count=-1, start=0):
"""
Gets all fragments that have been allocated in range.
Args:
id_or_uri:
ID or URI of range.
count:
The number of resources to return. A count of -1 requests all items. The actual number of items in
the response may differ from the requested count if the sum of start and count exceed the total number
of items.
start:
The first item to return, using 0-based indexing. If not specified, the default is 0 - start with the
first available item.
Returns:
list: A list with the allocated fragements.
"""
uri = self._client.build_uri(id_or_uri) + "/allocated-fragments?start={0}&count={1}".format(start, count)
return self._client.get_collection(uri) | [
"def",
"get_allocated_fragments",
"(",
"self",
",",
"id_or_uri",
",",
"count",
"=",
"-",
"1",
",",
"start",
"=",
"0",
")",
":",
"uri",
"=",
"self",
".",
"_client",
".",
"build_uri",
"(",
"id_or_uri",
")",
"+",
"\"/allocated-fragments?start={0}&count={1}\"",
".",
"format",
"(",
"start",
",",
"count",
")",
"return",
"self",
".",
"_client",
".",
"get_collection",
"(",
"uri",
")"
] | Gets all fragments that have been allocated in range.
Args:
id_or_uri:
ID or URI of range.
count:
The number of resources to return. A count of -1 requests all items. The actual number of items in
the response may differ from the requested count if the sum of start and count exceed the total number
of items.
start:
The first item to return, using 0-based indexing. If not specified, the default is 0 - start with the
first available item.
Returns:
list: A list with the allocated fragements. | [
"Gets",
"all",
"fragments",
"that",
"have",
"been",
"allocated",
"in",
"range",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/servers/id_pools_ranges.py#L123-L142 | train | 251,397 |
HewlettPackard/python-hpOneView | hpOneView/resources/networking/logical_interconnects.py | LogicalInterconnects.get_all | def get_all(self, start=0, count=-1, sort=''):
"""
Gets a list of logical interconnects based on optional sorting and filtering and is constrained by start
and count parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
Returns:
list: A list of logical interconnects.
"""
return self._helper.get_all(start, count, sort=sort) | python | def get_all(self, start=0, count=-1, sort=''):
"""
Gets a list of logical interconnects based on optional sorting and filtering and is constrained by start
and count parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
Returns:
list: A list of logical interconnects.
"""
return self._helper.get_all(start, count, sort=sort) | [
"def",
"get_all",
"(",
"self",
",",
"start",
"=",
"0",
",",
"count",
"=",
"-",
"1",
",",
"sort",
"=",
"''",
")",
":",
"return",
"self",
".",
"_helper",
".",
"get_all",
"(",
"start",
",",
"count",
",",
"sort",
"=",
"sort",
")"
] | Gets a list of logical interconnects based on optional sorting and filtering and is constrained by start
and count parameters.
Args:
start:
The first item to return, using 0-based indexing.
If not specified, the default is 0 - start with the first available item.
count:
The number of resources to return. A count of -1 requests all items.
The actual number of items in the response might differ from the requested
count if the sum of start and count exceeds the total number of items.
sort:
The sort order of the returned data set. By default, the sort order is based
on create time with the oldest entry first.
Returns:
list: A list of logical interconnects. | [
"Gets",
"a",
"list",
"of",
"logical",
"interconnects",
"based",
"on",
"optional",
"sorting",
"and",
"filtering",
"and",
"is",
"constrained",
"by",
"start",
"and",
"count",
"parameters",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/logical_interconnects.py#L89-L109 | train | 251,398 |
HewlettPackard/python-hpOneView | hpOneView/resources/networking/logical_interconnects.py | LogicalInterconnects.update_compliance | def update_compliance(self, timeout=-1):
"""
Returns logical interconnects to a consistent state. The current logical interconnect state is
compared to the associated logical interconnect group.
Any differences identified are corrected, bringing the logical interconnect back to a consistent
state. Changes are asynchronously applied to all managed interconnects. Note that if the changes detected
involve differences in the interconnect map between the logical interconnect group and the logical interconnect,
the process of bringing the logical interconnect back to a consistent state might involve automatically removing
existing interconnects from management and/or adding new interconnects for management.
Args:
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Logical Interconnect.
"""
uri = "{}/compliance".format(self.data["uri"])
return self._helper.update(None, uri, timeout=timeout) | python | def update_compliance(self, timeout=-1):
"""
Returns logical interconnects to a consistent state. The current logical interconnect state is
compared to the associated logical interconnect group.
Any differences identified are corrected, bringing the logical interconnect back to a consistent
state. Changes are asynchronously applied to all managed interconnects. Note that if the changes detected
involve differences in the interconnect map between the logical interconnect group and the logical interconnect,
the process of bringing the logical interconnect back to a consistent state might involve automatically removing
existing interconnects from management and/or adding new interconnects for management.
Args:
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Logical Interconnect.
"""
uri = "{}/compliance".format(self.data["uri"])
return self._helper.update(None, uri, timeout=timeout) | [
"def",
"update_compliance",
"(",
"self",
",",
"timeout",
"=",
"-",
"1",
")",
":",
"uri",
"=",
"\"{}/compliance\"",
".",
"format",
"(",
"self",
".",
"data",
"[",
"\"uri\"",
"]",
")",
"return",
"self",
".",
"_helper",
".",
"update",
"(",
"None",
",",
"uri",
",",
"timeout",
"=",
"timeout",
")"
] | Returns logical interconnects to a consistent state. The current logical interconnect state is
compared to the associated logical interconnect group.
Any differences identified are corrected, bringing the logical interconnect back to a consistent
state. Changes are asynchronously applied to all managed interconnects. Note that if the changes detected
involve differences in the interconnect map between the logical interconnect group and the logical interconnect,
the process of bringing the logical interconnect back to a consistent state might involve automatically removing
existing interconnects from management and/or adding new interconnects for management.
Args:
timeout: Timeout in seconds. Wait for task completion by default. The timeout does not abort the operation
in OneView; it just stops waiting for its completion.
Returns:
dict: Logical Interconnect. | [
"Returns",
"logical",
"interconnects",
"to",
"a",
"consistent",
"state",
".",
"The",
"current",
"logical",
"interconnect",
"state",
"is",
"compared",
"to",
"the",
"associated",
"logical",
"interconnect",
"group",
"."
] | 3c6219723ef25e6e0c83d44a89007f89bc325b89 | https://github.com/HewlettPackard/python-hpOneView/blob/3c6219723ef25e6e0c83d44a89007f89bc325b89/hpOneView/resources/networking/logical_interconnects.py#L131-L150 | train | 251,399 |