response
stringlengths
1
33.1k
instruction
stringlengths
22
582k
Returns the current color map.
def ansi_color_style(style="default"): """Returns the current color map.""" if style in ANSI_STYLES: cmap = ANSI_STYLES[style] else: msg = f"Could not find color style {style!r}, using default." warnings.warn(msg, RuntimeWarning, stacklevel=2) cmap = ANSI_STYLES["default"] return cmap
Reverses an ANSI color style mapping so that escape codes map to colors. Style may either be string or mapping. May also return the style it looked up.
def ansi_reverse_style(style="default", return_style=False): """Reverses an ANSI color style mapping so that escape codes map to colors. Style may either be string or mapping. May also return the style it looked up. """ style = ansi_style_by_name(style) if isinstance(style, str) else style reversed_style = {v: k for k, v in style.items()} # add keys to make this more useful updates = { "1": "BOLD_", "2": "FAINT_", "3": "ITALIC_", "4": "UNDERLINE_", "5": "SLOWBLINK_", "6": "FASTBLINK_", "7": "INVERT_", "8": "CONCEAL_", "9": "STRIKETHROUGH_", "21": "BOLDOFF_", "22": "FAINTOFF_", "23": "ITALICOFF_", "24": "UNDERLINEOFF_", "25": "BLINKOFF_", "27": "INVERTOFF_", "28": "REVEALOFF_", "29": "STRIKETHROUGHOFF_", "38": "SET_FOREGROUND_", "48": "SET_BACKGROUND_", "38;2": "SET_FOREGROUND_FAINT_", "48;2": "SET_BACKGROUND_FAINT_", "38;5": "SET_FOREGROUND_SLOWBLINK_", "48;5": "SET_BACKGROUND_SLOWBLINK_", } for ec, name in reversed_style.items(): no_left_zero = ec.lstrip("0") if no_left_zero.startswith(";"): updates[no_left_zero[1:]] = name elif no_left_zero != ec: updates[no_left_zero] = name reversed_style.update(updates) # return results if return_style: return style, reversed_style else: return reversed_style
Converts an ANSI color code escape sequence to a tuple of color names in the provided style ('default' should almost be the style). For example, '0' becomes ('RESET',) and '32;41' becomes ('GREEN', 'BACKGROUND_RED'). The style keyword may either be a string, in which the style is looked up, or an actual style dict. You can also provide a reversed style mapping, too, which is just the keys/values of the style dict swapped. If reversed style is not provided, it is computed.
def ansi_color_escape_code_to_name(escape_code, style, reversed_style=None): """Converts an ANSI color code escape sequence to a tuple of color names in the provided style ('default' should almost be the style). For example, '0' becomes ('RESET',) and '32;41' becomes ('GREEN', 'BACKGROUND_RED'). The style keyword may either be a string, in which the style is looked up, or an actual style dict. You can also provide a reversed style mapping, too, which is just the keys/values of the style dict swapped. If reversed style is not provided, it is computed. """ if reversed_style is None: style, reversed_style = ansi_reverse_style(style, return_style=True) # strip some actual escape codes, if needed. match = ANSI_ESCAPE_CODE_RE.match(escape_code) if not match: msg = f'Invalid ANSI color sequence "{escape_code}", using "RESET" instead.' warnings.warn(msg, RuntimeWarning, stacklevel=2) return ("RESET",) ec = match.group(2) names = [] n_ints = 0 seen_set_foreback = False for e in ec.split(";"): no_left_zero = e.lstrip("0") if len(e) > 1 else e if seen_set_foreback and n_ints > 0: names.append(e) n_ints -= 1 if n_ints == 0: seen_set_foreback = False continue else: names.append(reversed_style.get(no_left_zero, no_left_zero)) # set the flags for next time if "38" == e or "48" == e: seen_set_foreback = True elif seen_set_foreback and "2" == e: n_ints = 3 elif seen_set_foreback and "5" == e: n_ints = 1 # normalize names n = "" norm_names = [] prefixes = "" for name in names: if name in ("RESET", "NO_COLOR"): # skip most '0' entries continue elif "BACKGROUND_" in name and n: prefixes += n n = "" n = n + name if n else name if n.endswith("_"): continue elif ANSI_COLOR_NAME_SET_SHORT_RE.match(n) is not None: pre, fore_back, short = ANSI_COLOR_NAME_SET_SHORT_RE.match(n).groups() n = _color_name_from_ints( short_to_ints(short), background=(fore_back == "BACK"), prefix=pre ) elif ANSI_COLOR_NAME_SET_3INTS_RE.match(n) is not None: pre, fore_back, r, g, b = ANSI_COLOR_NAME_SET_3INTS_RE.match(n).groups() n = _color_name_from_ints( (int(r), int(g), int(b)), background=(fore_back == "BACK"), prefix=pre ) elif "GROUND_FAINT_" in n: # have 1 or 2, but not 3 ints n += "_" continue # error check if not iscolor(n): msg = ( "Could not translate ANSI color code {escape_code!r} " "into a known color in the palette. Specifically, the {n!r} " "portion of {name!r} in {names!r} seems to missing." ) raise ValueError( msg.format(escape_code=escape_code, names=names, name=name, n=n) ) norm_names.append(n) n = "" # check if we have pre- & post-fixes to apply to the last, non-background element prefixes += n if prefixes.endswith("_"): for i in range(-1, -len(norm_names) - 1, -1): if "BACKGROUND_" not in norm_names[i]: norm_names[i] = prefixes + norm_names[i] break else: # only have background colors, so select WHITE as default color norm_names.append(prefixes + "WHITE") # return if len(norm_names) == 0: return ("RESET",) else: return tuple(norm_names)
Makes an ANSI color style from a color palette
def make_ansi_style(palette): """Makes an ANSI color style from a color palette""" style = {"RESET": "0"} for name, t in BASE_XONSH_COLORS.items(): closest = find_closest_color(t, palette) if len(closest) == 3: closest = "".join([a * 2 for a in closest]) short = rgb2short(closest)[0] style[name] = "38;5;" + short return style
Tries to convert the given pygments style to ANSI style. Parameters ---------- style : pygments style value Returns ------- ANSI style
def _pygments_to_ansi_style(style): """Tries to convert the given pygments style to ANSI style. Parameters ---------- style : pygments style value Returns ------- ANSI style """ ansi_style_list = [] parts = style.split(" ") for part in parts: if part in _PART_STYLE_CODE_MAPPING: ansi_style_list.append(_PART_STYLE_CODE_MAPPING[part]) elif part[:3] == "bg:": ansi_style_list.append("48;5;" + rgb2short(part[3:])[0]) else: ansi_style_list.append("38;5;" + rgb2short(part)[0]) return ";".join(ansi_style_list)
Converts pygments like style dict to ANSI rules
def _style_dict_to_ansi(styles): """Converts pygments like style dict to ANSI rules""" ansi_style = {} for token, style in styles.items(): token = str(token) # convert pygments token to str parts = token.split(".") if len(parts) == 1 or parts[-2] == "Color": ansi_style[parts[-1]] = _pygments_to_ansi_style(style) return ansi_style
Register custom ANSI style. Parameters ---------- name : str Style name. styles : dict Token (or str) -> style mapping. base : str, optional Base style to use as default.
def register_custom_ansi_style(name, styles, base="default"): """Register custom ANSI style. Parameters ---------- name : str Style name. styles : dict Token (or str) -> style mapping. base : str, optional Base style to use as default. """ base_style = ANSI_STYLES[base].copy() base_style.update(_style_dict_to_ansi(styles)) ANSI_STYLES[name] = base_style
Gets or makes an ANSI color style by name. If the styles does not exist, it will look for a style using the pygments name.
def ansi_style_by_name(name): """Gets or makes an ANSI color style by name. If the styles does not exist, it will look for a style using the pygments name. """ if name in ANSI_STYLES: return ANSI_STYLES[name] elif not HAS_PYGMENTS: print(f"could not find style {name!r}, using 'default'") return ANSI_STYLES["default"] from pygments.util import ClassNotFound from xonsh.pygments_cache import get_style_by_name try: pstyle = get_style_by_name(name) except (ModuleNotFoundError, ClassNotFound): pstyle = get_style_by_name("default") palette = make_palette(pstyle.styles.values()) astyle = make_ansi_style(palette) ANSI_STYLES[name] = astyle return astyle
Attempts to find the first name in the tree.
def leftmostname(node): """Attempts to find the first name in the tree.""" if isinstance(node, Name): rtn = node.id elif isinstance(node, (BinOp, Compare)): rtn = leftmostname(node.left) elif isinstance(node, (Attribute, Subscript, Starred, Expr)): rtn = leftmostname(node.value) elif isinstance(node, Call): rtn = leftmostname(node.func) elif isinstance(node, UnaryOp): rtn = leftmostname(node.operand) elif isinstance(node, BoolOp): rtn = leftmostname(node.values[0]) elif isinstance(node, Assign): rtn = leftmostname(node.targets[0]) elif isinstance(node, AnnAssign): rtn = leftmostname(node.target) elif isinstance(node, JoinedStr) or is_const_str(node) or is_const_bytes(node): # handles case of "./my executable" rtn = leftmostname(node.s) elif isinstance(node, Tuple) and len(node.elts) > 0: # handles case of echo ,1,2,3 rtn = leftmostname(node.elts[0]) else: rtn = None return rtn
Gets the lineno of a node or returns the default.
def get_lineno(node, default=0): """Gets the lineno of a node or returns the default.""" return getattr(node, "lineno", default)
Computes the minimum lineno.
def min_line(node): """Computes the minimum lineno.""" node_line = get_lineno(node) return min(map(get_lineno, walk(node), itertools.repeat(node_line)))
Computes the maximum lineno.
def max_line(node): """Computes the maximum lineno.""" return max(map(get_lineno, walk(node)))
Gets the col_offset of a node, or returns the default
def get_col(node, default=-1): """Gets the col_offset of a node, or returns the default""" return getattr(node, "col_offset", default)
Computes the minimum col_offset.
def min_col(node): """Computes the minimum col_offset.""" return min(map(get_col, walk(node), itertools.repeat(node.col_offset)))
Returns the maximum col_offset of the node and all sub-nodes.
def max_col(node): """Returns the maximum col_offset of the node and all sub-nodes.""" col = getattr(node, "max_col", None) if col is not None: return col highest = max(walk(node), key=get_col) col = highest.col_offset + node_len(highest) return col
The length of a node as a string
def node_len(node): """The length of a node as a string""" val = 0 for n in walk(node): if isinstance(n, Name): val += len(n.id) elif isinstance(n, Attribute): val += 1 + (len(n.attr) if isinstance(n.attr, str) else 0) # this may need to be added to for more nodes as more cases are found return val
Gets the id attribute of a node, or returns a default.
def get_id(node, default=None): """Gets the id attribute of a node, or returns a default.""" return getattr(node, "id", default)
Returns the set of all names present in the node's tree.
def gather_names(node): """Returns the set of all names present in the node's tree.""" rtn = set(map(get_id, walk(node))) rtn.discard(None) return rtn
Gets the id and attribute of a node, or returns a default.
def get_id_ctx(node): """Gets the id and attribute of a node, or returns a default.""" nid = getattr(node, "id", None) if nid is None: return (None, None) return (nid, node.ctx)
Returns the names present in the node's tree in a set of load nodes and a set of store nodes.
def gather_load_store_names(node): """Returns the names present in the node's tree in a set of load nodes and a set of store nodes. """ load = set() store = set() for nid, ctx in map(get_id_ctx, walk(node)): if nid is None: continue elif isinstance(ctx, Load): load.add(nid) else: store.add(nid) return (load, store)
Tests if x is an AST node with elements.
def has_elts(x): """Tests if x is an AST node with elements.""" return isinstance(x, AST) and hasattr(x, "elts")
Creates an AST that loads variable name that may (or may not) have attribute chains. For example, "a.b.c"
def load_attribute_chain(name, lineno=None, col=None): """Creates an AST that loads variable name that may (or may not) have attribute chains. For example, "a.b.c" """ names = name.split(".") node = Name(id=names.pop(0), ctx=Load(), lineno=lineno, col_offset=col) for attr in names: node = Attribute( value=node, attr=attr, ctx=Load(), lineno=lineno, col_offset=col ) return node
Creates the AST node for calling a function of a given name. Functions names may contain attribute access, e.g. __xonsh__.env.
def xonsh_call(name, args, lineno=None, col=None): """Creates the AST node for calling a function of a given name. Functions names may contain attribute access, e.g. __xonsh__.env. """ return Call( func=load_attribute_chain(name, lineno=lineno, col=col), args=args, keywords=[], starargs=None, kwargs=None, lineno=lineno, col_offset=col, )
Determines whether or not a node is worth visiting. Currently only UnaryOp and BoolOp nodes are visited.
def isdescendable(node): """Determines whether or not a node is worth visiting. Currently only UnaryOp and BoolOp nodes are visited. """ return isinstance(node, (UnaryOp, BoolOp))
Determines whether a node (or code string) is an expression, and does not contain any statements. The execution context (ctx) and other args and kwargs are passed down to the parser, as needed.
def isexpression(node, ctx=None, *args, **kwargs): """Determines whether a node (or code string) is an expression, and does not contain any statements. The execution context (ctx) and other args and kwargs are passed down to the parser, as needed. """ # parse string to AST if isinstance(node, str): node = node if node.endswith("\n") else node + "\n" ctx = XSH.ctx if ctx is None else ctx node = XSH.execer.parse(node, ctx, *args, **kwargs) # determine if expression-like enough if isinstance(node, (Expr, Expression)): isexpr = True elif isinstance(node, Module) and len(node.body) == 1: isexpr = isinstance(node.body[0], (Expr, Expression)) else: isexpr = False return isexpr
performs a pretty dump of an AST node.
def pdump(s, **kwargs): """performs a pretty dump of an AST node.""" if isinstance(s, AST): s = dump(s, **kwargs).replace(",", ",\n") openers = "([{" closers = ")]}" lens = len(s) + 1 if lens == 1: return s i = min(s.find(o) % lens for o in openers) if i == lens - 1: return s closer = closers[openers.find(s[i])] j = s.rfind(closer) if j == -1 or j <= i: return s[: i + 1] + "\n" + textwrap.indent(pdump(s[i + 1 :]), " ") pre = s[: i + 1] + "\n" mid = s[i + 1 : j] post = "\n" + s[j:] mid = textwrap.indent(pdump(mid), " ") if "(" in post or "[" in post or "{" in post: post = pdump(post) return pre + mid + post
Performs a pretty print of the AST nodes.
def pprint_ast(s, *, sep=None, end=None, file=None, flush=False, **kwargs): """Performs a pretty print of the AST nodes.""" print(pdump(s, **kwargs), sep=sep, end=end, file=file, flush=flush)
calls getattr(name, '__xonsh_block__', False).
def _getblockattr(name, lineno, col): """calls getattr(name, '__xonsh_block__', False).""" return xonsh_call( "getattr", args=[ Name(id=name, ctx=Load(), lineno=lineno, col_offset=col), const_str(s="__xonsh_block__", lineno=lineno, col_offset=col), const_name(value=False, lineno=lineno, col_offset=col), ], lineno=lineno, col=col, )
Sets a new signal handle that will automatically restore the old value once the new handle is finished.
def resetting_signal_handle(sig, f): """Sets a new signal handle that will automatically restore the old value once the new handle is finished. """ oldh = signal.getsignal(sig) def newh(s=None, frame=None): f(s, frame) signal.signal(sig, oldh) if sig != 0: sys.exit(sig) signal.signal(sig, newh)
Prints help about, and then returns that variable.
def helper(x, name=""): """Prints help about, and then returns that variable.""" name = name or getattr(x, "__name__", "") INSPECTOR.pinfo(x, oname=name, detail_level=0) return x
Prints help about, and then returns that variable.
def superhelper(x, name=""): """Prints help about, and then returns that variable.""" name = name or getattr(x, "__name__", "") INSPECTOR.pinfo(x, oname=name, detail_level=1) return x
Regular expression-based globbing.
def reglob(path, parts=None, i=None): """Regular expression-based globbing.""" if parts is None: path = os.path.normpath(path) drive, tail = os.path.splitdrive(path) parts = tail.split(os.sep) d = os.sep if os.path.isabs(path) else "." d = os.path.join(drive, d) return reglob(d, parts, i=0) base = subdir = path if i == 0: if not os.path.isabs(base): base = "" elif len(parts) > 1: i += 1 try: regex = re.compile(parts[i]) except Exception as e: if isinstance(e, re.error) and str(e) == "nothing to repeat at position 0": raise XonshError( "Consider adding a leading '.' to your glob regex pattern." ) from e else: raise e files = os.listdir(subdir) files.sort() paths = [] i1 = i + 1 if i1 == len(parts): for f in files: p = os.path.join(base, f) if regex.fullmatch(f) is not None: paths.append(p) else: for f in files: p = os.path.join(base, f) if regex.fullmatch(f) is None or not os.path.isdir(p): continue paths += reglob(p, parts=parts, i=i1) return paths
Takes a string and returns a list of file paths that match (regex, glob, or arbitrary search function). If pathobj=True, the return is a list of pathlib.Path objects instead of strings.
def pathsearch(func, s, pymode=False, pathobj=False): """ Takes a string and returns a list of file paths that match (regex, glob, or arbitrary search function). If pathobj=True, the return is a list of pathlib.Path objects instead of strings. """ if not callable(func) or len(inspect.signature(func).parameters) != 1: error = "%r is not a known path search function" raise XonshError(error % func) o = func(s) if pathobj and pymode: o = list(map(pathlib.Path, o)) no_match = [] if pymode else [s] return o if len(o) != 0 else no_match
Runs a subprocess, capturing the output. Returns the stdout that was produced as a str.
def subproc_captured_stdout(*cmds, envs=None): """Runs a subprocess, capturing the output. Returns the stdout that was produced as a str. """ import xonsh.procs.specs return xonsh.procs.specs.run_subproc(cmds, captured="stdout", envs=envs)
Runs a subprocess, capturing the output. Returns a list of whitespace-separated strings of the stdout that was produced. The string is split using xonsh's lexer, rather than Python's str.split() or shlex.split().
def subproc_captured_inject(*cmds, envs=None): """Runs a subprocess, capturing the output. Returns a list of whitespace-separated strings of the stdout that was produced. The string is split using xonsh's lexer, rather than Python's str.split() or shlex.split(). """ import xonsh.procs.specs o = xonsh.procs.specs.run_subproc(cmds, captured="object", envs=envs) o.end() toks = [] for line in o: line = line.rstrip(os.linesep) toks.extend(XSH.execer.parser.lexer.split(line)) return toks
Runs a subprocess, capturing the output. Returns an instance of CommandPipeline representing the completed command.
def subproc_captured_object(*cmds, envs=None): """ Runs a subprocess, capturing the output. Returns an instance of CommandPipeline representing the completed command. """ import xonsh.procs.specs return xonsh.procs.specs.run_subproc(cmds, captured="object", envs=envs)
Runs a subprocess, capturing the output. Returns an instance of HiddenCommandPipeline representing the completed command.
def subproc_captured_hiddenobject(*cmds, envs=None): """Runs a subprocess, capturing the output. Returns an instance of HiddenCommandPipeline representing the completed command. """ import xonsh.procs.specs return xonsh.procs.specs.run_subproc(cmds, captured="hiddenobject", envs=envs)
Runs a subprocess, without capturing the output. Returns the stdout that was produced as a str.
def subproc_uncaptured(*cmds, envs=None): """Runs a subprocess, without capturing the output. Returns the stdout that was produced as a str. """ import xonsh.procs.specs return xonsh.procs.specs.run_subproc(cmds, captured=False, envs=envs)
Ensures that x is a list of strings.
def ensure_list_of_strs(x): """Ensures that x is a list of strings.""" if isinstance(x, str): rtn = [x] elif isinstance(x, cabc.Sequence): rtn = [i if isinstance(i, str) else str(i) for i in x] else: rtn = [str(x)] return rtn
Ensures that x is single string or function.
def ensure_str_or_callable(x): """Ensures that x is single string or function.""" if isinstance(x, str) or callable(x): return x if isinstance(x, bytes): # ``os.fsdecode`` decodes using "surrogateescape" on linux and "strict" on windows. # This is used to decode bytes for interfacing with the os, notably for command line arguments. # See https://www.python.org/dev/peps/pep-0383/#specification return os.fsdecode(x) return str(x)
Ensures that x is a list of strings or functions. This is called when using the ``@()`` operator to expand it's content.
def list_of_strs_or_callables(x): """ Ensures that x is a list of strings or functions. This is called when using the ``@()`` operator to expand it's content. """ if isinstance(x, (str, bytes)) or callable(x): rtn = [ensure_str_or_callable(x)] elif isinstance(x, cabc.Iterable): rtn = list(map(ensure_str_or_callable, x)) else: rtn = [ensure_str_or_callable(x)] return rtn
Takes an outer product of a list of strings
def list_of_list_of_strs_outer_product(x): """Takes an outer product of a list of strings""" lolos = map(ensure_list_of_strs, x) rtn = [] for los in itertools.product(*lolos): s = "".join(los) if "*" in s: rtn.extend(XSH.glob(s)) else: rtn.append(XSH.expand_path(s)) return rtn
Evaluates the argument in Xonsh context.
def eval_fstring_field(field): """Evaluates the argument in Xonsh context.""" res = XSH.execer.eval( field[0].strip(), glbs=globals(), locs=XSH.ctx, filename=field[1] ) return res
Puts a kind flag (string) a canonical form.
def _convert_kind_flag(x): """Puts a kind flag (string) a canonical form.""" x = x.lower() kind = MACRO_FLAG_KINDS.get(x, None) if kind is None: raise TypeError(f"{x!r} not a recognized macro type.") return kind
Converts a string macro argument based on the requested kind. Parameters ---------- raw_arg : str The str representation of the macro argument. kind : object A flag or type representing how to convert the argument. glbs : Mapping The globals from the call site. locs : Mapping or None The locals from the call site. name : str, optional The macro argument name. macroname : str, optional The name of the macro itself. Returns ------- The converted argument.
def convert_macro_arg(raw_arg, kind, glbs, locs, *, name="<arg>", macroname="<macro>"): """Converts a string macro argument based on the requested kind. Parameters ---------- raw_arg : str The str representation of the macro argument. kind : object A flag or type representing how to convert the argument. glbs : Mapping The globals from the call site. locs : Mapping or None The locals from the call site. name : str, optional The macro argument name. macroname : str, optional The name of the macro itself. Returns ------- The converted argument. """ # munge kind and mode to start mode = None if isinstance(kind, cabc.Sequence) and not isinstance(kind, str): # have (kind, mode) tuple kind, mode = kind if isinstance(kind, str): kind = _convert_kind_flag(kind) if kind is str or kind is None: return raw_arg # short circuit since there is nothing else to do # select from kind and convert execer = XSH.execer filename = macroname + "(" + name + ")" if kind is AST: ctx = set(dir(builtins)) | set(glbs.keys()) if locs is not None: ctx |= set(locs.keys()) mode = mode or "eval" if mode != "eval" and not raw_arg.endswith("\n"): raw_arg += "\n" arg = execer.parse(raw_arg, ctx, mode=mode, filename=filename) elif kind is types.CodeType or kind is compile: # NOQA mode = mode or "eval" arg = execer.compile( raw_arg, mode=mode, glbs=glbs, locs=locs, filename=filename ) elif kind is eval: arg = execer.eval(raw_arg, glbs=glbs, locs=locs, filename=filename) elif kind is exec: mode = mode or "exec" if not raw_arg.endswith("\n"): raw_arg += "\n" arg = execer.exec(raw_arg, mode=mode, glbs=glbs, locs=locs, filename=filename) elif kind is type: arg = type(execer.eval(raw_arg, glbs=glbs, locs=locs, filename=filename)) else: msg = "kind={0!r} and mode={1!r} was not recognized for macro " "argument {2!r}" raise TypeError(msg.format(kind, mode, name)) return arg
Attaches macro globals and locals temporarily to function as a context manager. Parameters ---------- f : callable object The function that is called as ``f(*args)``. glbs : Mapping The globals from the call site. locs : Mapping or None The locals from the call site.
def in_macro_call(f, glbs, locs): """Attaches macro globals and locals temporarily to function as a context manager. Parameters ---------- f : callable object The function that is called as ``f(*args)``. glbs : Mapping The globals from the call site. locs : Mapping or None The locals from the call site. """ prev_glbs = getattr(f, "macro_globals", None) prev_locs = getattr(f, "macro_locals", None) f.macro_globals = glbs f.macro_locals = locs yield if prev_glbs is None: del f.macro_globals else: f.macro_globals = prev_glbs if prev_locs is None: del f.macro_locals else: f.macro_locals = prev_locs
Calls a function as a macro, returning its result. Parameters ---------- f : callable object The function that is called as ``f(*args)``. raw_args : tuple of str The str representation of arguments of that were passed into the macro. These strings will be parsed, compiled, evaled, or left as a string depending on the annotations of f. glbs : Mapping The globals from the call site. locs : Mapping or None The locals from the call site.
def call_macro(f, raw_args, glbs, locs): """Calls a function as a macro, returning its result. Parameters ---------- f : callable object The function that is called as ``f(*args)``. raw_args : tuple of str The str representation of arguments of that were passed into the macro. These strings will be parsed, compiled, evaled, or left as a string depending on the annotations of f. glbs : Mapping The globals from the call site. locs : Mapping or None The locals from the call site. """ sig = inspect.signature(f) empty = inspect.Parameter.empty macroname = f.__name__ i = 0 args = [] for (key, param), raw_arg in zip(sig.parameters.items(), raw_args): i += 1 if raw_arg == "*": break kind = param.annotation if kind is empty or kind is None: kind = str arg = convert_macro_arg( raw_arg, kind, glbs, locs, name=key, macroname=macroname ) args.append(arg) reg_args, kwargs = _eval_regular_args(raw_args[i:], glbs, locs) args += reg_args with in_macro_call(f, glbs, locs): rtn = f(*args, **kwargs) return rtn
Tests if a string starts as a non-kwarg string would.
def _starts_as_arg(s): """Tests if a string starts as a non-kwarg string would.""" return KWARG_RE.match(s) is None
Prepares to enter a context manager macro by attaching the contents of the macro block, globals, and locals to the object. These modifications are made in-place and the original object is returned. Parameters ---------- obj : context manager The object that is about to be entered via a with-statement. raw_block : str The str of the block that is the context body. This string will be parsed, compiled, evaled, or left as a string depending on the return annotation of obj.__enter__. glbs : Mapping The globals from the context site. locs : Mapping or None The locals from the context site. Returns ------- obj : context manager The same context manager but with the new macro information applied.
def enter_macro(obj, raw_block, glbs, locs): """Prepares to enter a context manager macro by attaching the contents of the macro block, globals, and locals to the object. These modifications are made in-place and the original object is returned. Parameters ---------- obj : context manager The object that is about to be entered via a with-statement. raw_block : str The str of the block that is the context body. This string will be parsed, compiled, evaled, or left as a string depending on the return annotation of obj.__enter__. glbs : Mapping The globals from the context site. locs : Mapping or None The locals from the context site. Returns ------- obj : context manager The same context manager but with the new macro information applied. """ # recurse down sequences if isinstance(obj, cabc.Sequence): for x in obj: enter_macro(x, raw_block, glbs, locs) return obj # convert block as needed kind = getattr(obj, "__xonsh_block__", str) macroname = getattr(obj, "__name__", "<context>") block = convert_macro_arg( raw_block, kind, glbs, locs, name="<with!>", macroname=macroname ) # attach attrs obj.macro_globals = glbs obj.macro_locals = locs obj.macro_block = block return obj
A context manager for using the xonsh builtins only in a limited scope. Likely useful in testing.
def xonsh_builtins(execer=None): """A context manager for using the xonsh builtins only in a limited scope. Likely useful in testing. """ XSH.load(execer=execer) yield XSH.unload()
Using the function's annotation add arguments to the parser basically converts ``def fn(param : Arg(*args, **kw), ...): ...`` -> into equivalent ``parser.add_argument(*args, *kw)`` call.
def add_args( parser: ap.ArgumentParser, func: tp.Callable, allowed_params=None, doc=None, ) -> None: """Using the function's annotation add arguments to the parser basically converts ``def fn(param : Arg(*args, **kw), ...): ...`` -> into equivalent ``parser.add_argument(*args, *kw)`` call. """ # call this function when this sub-command is selected parser.set_defaults(**{_FUNC_NAME: func}) doc = doc or NumpyDoc(func, parser.prefix_chars) sign = inspect.signature(func) for name, param in sign.parameters.items(): if name.startswith("_") or ( allowed_params is not None and name not in allowed_params ): continue flags, kwargs = _get_args_kwargs(param.annotation) if (not flags) and (name in doc.flags): # load from docstring flags = doc.flags.get(name) if flags: # optional argument. eg. --option kwargs.setdefault("dest", name) else: # positional argument flags = [name] # checks for optional positional arg if ( (inspect.Parameter.empty != param.default) and (param.default is None) and ("nargs" not in kwargs) and ("action" not in kwargs) ): kwargs.setdefault("nargs", "?") if inspect.Parameter.empty != param.default: kwargs.setdefault("default", param.default) # for booleans set action automatically if ( flags and isinstance(param.default, bool) and ("action" not in kwargs) and ("type" not in kwargs) ): # opposite of default value act_name = "store_false" if param.default else "store_true" kwargs.setdefault("action", act_name) # help can be set by passing help argument otherwise inferred from docstring kwargs.setdefault("help", doc.params.get(name)) completer = kwargs.pop("completer", None) action = parser.add_argument(*flags, **kwargs) if completer: action.completer = completer # type: ignore action.help = action.help or "" # Don't show default when # 1. None : No value is given for the option # 2. bool : in case of flags the default is opposite of the flag's meaning if ( action.default and (not isinstance(action.default, bool)) and ("%(default)s" not in action.help) ): action.help += os.linesep + " (default: '%(default)s')" if action.type and "%(type)s" not in action.help: action.help += " (type: %(type)s)"
A bare-bones argparse builder from functions
def make_parser( func: tp.Union[tp.Callable, str], empty_help=False, **kwargs, ) -> "ArgParser": """A bare-bones argparse builder from functions""" doc = NumpyDoc(func) if "description" not in kwargs: kwargs["description"] = doc.description if "epilog" not in kwargs: if doc.epilog: kwargs["epilog"] = doc.epilog parser = ArgParser(**kwargs) if empty_help: parser.set_defaults(**{_FUNC_NAME: empty_help_func}) return parser
Final dispatch to the function based on signature.
def _dispatch_func(func: tp.Callable, ns: dict[str, tp.Any]): """Final dispatch to the function based on signature.""" sign = inspect.signature(func) kwargs = {} for name, param in sign.parameters.items(): default = None # sometimes the args are skipped in the parser. # like ones having _ prefix(private to the function), or some special cases like exclusive group. # it is better to fill the defaults from paramspec when available. if param.default != inspect.Parameter.empty: default = param.default kwargs[name] = ns.get(name, default) return func(**kwargs)
Call the underlying function with arguments parsed from sys.argv Parameters ---------- parser root parser args sys.argv as parsed by Alias lenient if True, then use parser_know_args and pass the extra arguments as `_unparsed` ns a dict that will be passed to underlying function
def dispatch(parser: ap.ArgumentParser, args=None, lenient=False, **ns): """Call the underlying function with arguments parsed from sys.argv Parameters ---------- parser root parser args sys.argv as parsed by Alias lenient if True, then use parser_know_args and pass the extra arguments as `_unparsed` ns a dict that will be passed to underlying function """ ns.setdefault("_parser", parser) ns.setdefault("_args", args) if lenient: parsed, unparsed = parser.parse_known_args(args) ns["_unparsed"] = unparsed else: parsed = parser.parse_args(args) ns["_parsed"] = parsed ns.update(vars(parsed)) func = ns[_FUNC_NAME] return _dispatch_func(func, ns)
Return ``True`` if caching has been enabled for this mode (through command line flags or environment variables)
def should_use_cache(execer, mode): """ Return ``True`` if caching has been enabled for this mode (through command line flags or environment variables) """ if mode == "exec": return (execer.scriptcache or execer.cacheall) and ( XSH.env["XONSH_CACHE_SCRIPTS"] or XSH.env["XONSH_CACHE_EVERYTHING"] ) else: return execer.cacheall or XSH.env["XONSH_CACHE_EVERYTHING"]
Helper to run code in a given mode and context. Returns a sys.exc_info() triplet in case the code raises an exception, or (None, None, None) otherwise.
def run_compiled_code(code, glb, loc, mode): """ Helper to run code in a given mode and context. Returns a sys.exc_info() triplet in case the code raises an exception, or (None, None, None) otherwise. """ if code is None: return if mode in {"exec", "single"}: func = exec else: func = eval try: func(code, glb, loc) return (None, None, None) except BaseException: type, value, traceback = sys.exc_info() # strip off the current frame as the traceback should only show user code traceback = traceback.tb_next return type, value, traceback
Return the filename of the cache for the given filename. Cache filenames are similar to those used by the Mercurial DVCS for its internal store. The ``code`` switch should be true if we should use the code store rather than the script store.
def get_cache_filename(fname, code=True): """ Return the filename of the cache for the given filename. Cache filenames are similar to those used by the Mercurial DVCS for its internal store. The ``code`` switch should be true if we should use the code store rather than the script store. """ datadir = XSH.env["XONSH_DATA_DIR"] cachedir = os.path.join( datadir, "xonsh_code_cache" if code else "xonsh_script_cache" ) cachefname = os.path.join(cachedir, *_cache_renamer(fname, code=code)) return cachefname
Update the cache at ``cache_file_name`` to contain the compiled code represented by ``ccode``.
def update_cache(ccode, cache_file_name): """ Update the cache at ``cache_file_name`` to contain the compiled code represented by ``ccode``. """ if cache_file_name is not None: os.makedirs(os.path.dirname(cache_file_name), exist_ok=True) with open(cache_file_name, "wb") as cfile: cfile.write(XONSH_VERSION.encode() + b"\n") cfile.write(bytes(PYTHON_VERSION_INFO_BYTES) + b"\n") marshal.dump(ccode, cfile)
Wrapper for ``execer.compile`` to compile the given code
def compile_code(filename, code, execer, glb, loc, mode): """ Wrapper for ``execer.compile`` to compile the given code """ if filename.endswith(".py") and mode == "exec": return compile(code, filename, mode) if not code.endswith("\n"): code += "\n" old_filename = execer.filename try: execer.filename = filename ccode = execer.compile(code, glbs=glb, locs=loc, mode=mode, filename=filename) except Exception: raise finally: execer.filename = old_filename return ccode
Check whether the script cache for a particular file is valid. Returns a tuple containing: a boolean representing whether the cached code should be used, and the cached code (or ``None`` if the cache should not be used).
def script_cache_check(filename, cachefname): """ Check whether the script cache for a particular file is valid. Returns a tuple containing: a boolean representing whether the cached code should be used, and the cached code (or ``None`` if the cache should not be used). """ ccode = None run_cached = False if os.path.isfile(cachefname): if os.stat(cachefname).st_mtime >= os.stat(filename).st_mtime: with open(cachefname, "rb") as cfile: if not _check_cache_versions(cfile): return False, None ccode = marshal.load(cfile) run_cached = True return run_cached, ccode
Run a script, using a cached version if it exists (and the source has not changed), and updating the cache as necessary. See run_compiled_code for the return value.
def run_script_with_cache(filename, execer, glb=None, loc=None, mode="exec"): """ Run a script, using a cached version if it exists (and the source has not changed), and updating the cache as necessary. See run_compiled_code for the return value. """ run_cached = False use_cache = should_use_cache(execer, mode) cachefname = get_cache_filename(filename, code=False) if use_cache: run_cached, ccode = script_cache_check(filename, cachefname) if not run_cached: with open(filename, encoding="utf-8") as f: code = f.read() ccode = compile_code(filename, code, execer, glb, loc, mode) update_cache(ccode, cachefname) return run_compiled_code(ccode, glb, loc, mode)
Return an appropriate spoofed filename for the given code.
def code_cache_name(code): """ Return an appropriate spoofed filename for the given code. """ if isinstance(code, str): code = code.encode() return hashlib.md5(code).hexdigest()
Check whether the code cache for a particular piece of code is valid. Returns a tuple containing: a boolean representing whether the cached code should be used, and the cached code (or ``None`` if the cache should not be used).
def code_cache_check(cachefname): """ Check whether the code cache for a particular piece of code is valid. Returns a tuple containing: a boolean representing whether the cached code should be used, and the cached code (or ``None`` if the cache should not be used). """ ccode = None run_cached = False if os.path.isfile(cachefname): with open(cachefname, "rb") as cfile: if not _check_cache_versions(cfile): return False, None ccode = marshal.load(cfile) run_cached = True return run_cached, ccode
Run a piece of code, using a cached version if it exists, and updating the cache as necessary. See run_compiled_code for the return value.
def run_code_with_cache( code, display_filename, execer, glb=None, loc=None, mode="exec" ): """ Run a piece of code, using a cached version if it exists, and updating the cache as necessary. See run_compiled_code for the return value. """ use_cache = should_use_cache(execer, mode) filename = code_cache_name(code) cachefname = get_cache_filename(filename, code=True) run_cached = False if use_cache: run_cached, ccode = code_cache_check(cachefname) if not run_cached: ccode = compile_code(display_filename, code, execer, glb, loc, mode) update_cache(ccode, cachefname) return run_compiled_code(ccode, glb, loc, mode)
These are the minimum number of colors that need to be implemented by any style.
def KNOWN_XONSH_COLORS(): """These are the minimum number of colors that need to be implemented by any style. """ return frozenset( [ "DEFAULT", "BLACK", "RED", "GREEN", "YELLOW", "BLUE", "PURPLE", "CYAN", "WHITE", "INTENSE_BLACK", "INTENSE_RED", "INTENSE_GREEN", "INTENSE_YELLOW", "INTENSE_BLUE", "INTENSE_PURPLE", "INTENSE_CYAN", "INTENSE_WHITE", ] )
Tests if a string is a valid color
def iscolor(s): """Tests if a string is a valid color""" return RE_XONSH_COLOR.match(s) is not None
color look-up table
def CLUT(): """color look-up table""" return [ # 8-bit, RGB hex # Primary 3-bit (8 colors). Unique representation! ("0", "000000"), ("1", "800000"), ("2", "008000"), ("3", "808000"), ("4", "000080"), ("5", "800080"), ("6", "008080"), ("7", "c0c0c0"), # Equivalent "bright" versions of original 8 colors. ("8", "808080"), ("9", "ff0000"), ("10", "00ff00"), ("11", "ffff00"), ("12", "0000ff"), ("13", "ff00ff"), ("14", "00ffff"), ("15", "ffffff"), # Strictly ascending. ("16", "000000"), ("17", "00005f"), ("18", "000087"), ("19", "0000af"), ("20", "0000d7"), ("21", "0000ff"), ("22", "005f00"), ("23", "005f5f"), ("24", "005f87"), ("25", "005faf"), ("26", "005fd7"), ("27", "005fff"), ("28", "008700"), ("29", "00875f"), ("30", "008787"), ("31", "0087af"), ("32", "0087d7"), ("33", "0087ff"), ("34", "00af00"), ("35", "00af5f"), ("36", "00af87"), ("37", "00afaf"), ("38", "00afd7"), ("39", "00afff"), ("40", "00d700"), ("41", "00d75f"), ("42", "00d787"), ("43", "00d7af"), ("44", "00d7d7"), ("45", "00d7ff"), ("46", "00ff00"), ("47", "00ff5f"), ("48", "00ff87"), ("49", "00ffaf"), ("50", "00ffd7"), ("51", "00ffff"), ("52", "5f0000"), ("53", "5f005f"), ("54", "5f0087"), ("55", "5f00af"), ("56", "5f00d7"), ("57", "5f00ff"), ("58", "5f5f00"), ("59", "5f5f5f"), ("60", "5f5f87"), ("61", "5f5faf"), ("62", "5f5fd7"), ("63", "5f5fff"), ("64", "5f8700"), ("65", "5f875f"), ("66", "5f8787"), ("67", "5f87af"), ("68", "5f87d7"), ("69", "5f87ff"), ("70", "5faf00"), ("71", "5faf5f"), ("72", "5faf87"), ("73", "5fafaf"), ("74", "5fafd7"), ("75", "5fafff"), ("76", "5fd700"), ("77", "5fd75f"), ("78", "5fd787"), ("79", "5fd7af"), ("80", "5fd7d7"), ("81", "5fd7ff"), ("82", "5fff00"), ("83", "5fff5f"), ("84", "5fff87"), ("85", "5fffaf"), ("86", "5fffd7"), ("87", "5fffff"), ("88", "870000"), ("89", "87005f"), ("90", "870087"), ("91", "8700af"), ("92", "8700d7"), ("93", "8700ff"), ("94", "875f00"), ("95", "875f5f"), ("96", "875f87"), ("97", "875faf"), ("98", "875fd7"), ("99", "875fff"), ("100", "878700"), ("101", "87875f"), ("102", "878787"), ("103", "8787af"), ("104", "8787d7"), ("105", "8787ff"), ("106", "87af00"), ("107", "87af5f"), ("108", "87af87"), ("109", "87afaf"), ("110", "87afd7"), ("111", "87afff"), ("112", "87d700"), ("113", "87d75f"), ("114", "87d787"), ("115", "87d7af"), ("116", "87d7d7"), ("117", "87d7ff"), ("118", "87ff00"), ("119", "87ff5f"), ("120", "87ff87"), ("121", "87ffaf"), ("122", "87ffd7"), ("123", "87ffff"), ("124", "af0000"), ("125", "af005f"), ("126", "af0087"), ("127", "af00af"), ("128", "af00d7"), ("129", "af00ff"), ("130", "af5f00"), ("131", "af5f5f"), ("132", "af5f87"), ("133", "af5faf"), ("134", "af5fd7"), ("135", "af5fff"), ("136", "af8700"), ("137", "af875f"), ("138", "af8787"), ("139", "af87af"), ("140", "af87d7"), ("141", "af87ff"), ("142", "afaf00"), ("143", "afaf5f"), ("144", "afaf87"), ("145", "afafaf"), ("146", "afafd7"), ("147", "afafff"), ("148", "afd700"), ("149", "afd75f"), ("150", "afd787"), ("151", "afd7af"), ("152", "afd7d7"), ("153", "afd7ff"), ("154", "afff00"), ("155", "afff5f"), ("156", "afff87"), ("157", "afffaf"), ("158", "afffd7"), ("159", "afffff"), ("160", "d70000"), ("161", "d7005f"), ("162", "d70087"), ("163", "d700af"), ("164", "d700d7"), ("165", "d700ff"), ("166", "d75f00"), ("167", "d75f5f"), ("168", "d75f87"), ("169", "d75faf"), ("170", "d75fd7"), ("171", "d75fff"), ("172", "d78700"), ("173", "d7875f"), ("174", "d78787"), ("175", "d787af"), ("176", "d787d7"), ("177", "d787ff"), ("178", "d7af00"), ("179", "d7af5f"), ("180", "d7af87"), ("181", "d7afaf"), ("182", "d7afd7"), ("183", "d7afff"), ("184", "d7d700"), ("185", "d7d75f"), ("186", "d7d787"), ("187", "d7d7af"), ("188", "d7d7d7"), ("189", "d7d7ff"), ("190", "d7ff00"), ("191", "d7ff5f"), ("192", "d7ff87"), ("193", "d7ffaf"), ("194", "d7ffd7"), ("195", "d7ffff"), ("196", "ff0000"), ("197", "ff005f"), ("198", "ff0087"), ("199", "ff00af"), ("200", "ff00d7"), ("201", "ff00ff"), ("202", "ff5f00"), ("203", "ff5f5f"), ("204", "ff5f87"), ("205", "ff5faf"), ("206", "ff5fd7"), ("207", "ff5fff"), ("208", "ff8700"), ("209", "ff875f"), ("210", "ff8787"), ("211", "ff87af"), ("212", "ff87d7"), ("213", "ff87ff"), ("214", "ffaf00"), ("215", "ffaf5f"), ("216", "ffaf87"), ("217", "ffafaf"), ("218", "ffafd7"), ("219", "ffafff"), ("220", "ffd700"), ("221", "ffd75f"), ("222", "ffd787"), ("223", "ffd7af"), ("224", "ffd7d7"), ("225", "ffd7ff"), ("226", "ffff00"), ("227", "ffff5f"), ("228", "ffff87"), ("229", "ffffaf"), ("230", "ffffd7"), ("231", "ffffff"), # Gray-scale range. ("232", "080808"), ("233", "121212"), ("234", "1c1c1c"), ("235", "262626"), ("236", "303030"), ("237", "3a3a3a"), ("238", "444444"), ("239", "4e4e4e"), ("240", "585858"), ("241", "626262"), ("242", "6c6c6c"), ("243", "767676"), ("244", "808080"), ("245", "8a8a8a"), ("246", "949494"), ("247", "9e9e9e"), ("248", "a8a8a8"), ("249", "b2b2b2"), ("250", "bcbcbc"), ("251", "c6c6c6"), ("252", "d0d0d0"), ("253", "dadada"), ("254", "e4e4e4"), ("255", "eeeeee"), ]
Find the closest ANSI 256 approximation to the given RGB value. >>> rgb2short('123456') ('23', '005f5f') >>> rgb2short('ffffff') ('231', 'ffffff') >>> rgb2short('0DADD6') # vimeo logo ('38', '00afd7') Parameters ---------- rgb : Hex code representing an RGB value, eg, 'abcdef' Returns ------- Tuple of String between 0 and 255 (compatible with xterm) and hex code (length-6).
def rgb_to_256(rgb): """Find the closest ANSI 256 approximation to the given RGB value. >>> rgb2short('123456') ('23', '005f5f') >>> rgb2short('ffffff') ('231', 'ffffff') >>> rgb2short('0DADD6') # vimeo logo ('38', '00afd7') Parameters ---------- rgb : Hex code representing an RGB value, eg, 'abcdef' Returns ------- Tuple of String between 0 and 255 (compatible with xterm) and hex code (length-6). """ rgb = rgb.lstrip("#") if len(rgb) == 0: return "0", "000000" incs = (0x00, 0x5F, 0x87, 0xAF, 0xD7, 0xFF) # Break 6-char RGB code into 3 integer vals. parts = rgb_to_ints(rgb) res = [] for part in parts: i = 0 while i < len(incs) - 1: s, b = incs[i], incs[i + 1] # smaller, bigger if s <= part <= b: s1 = abs(s - part) b1 = abs(b - part) if s1 < b1: closest = s else: closest = b res.append(closest) break i += 1 res = "".join([f"{i:02x}" for i in res]) equiv = RGB_TO_SHORT[res] return equiv, res
Coverts a short (256) color to a 3-tuple of ints.
def short_to_ints(short): """Coverts a short (256) color to a 3-tuple of ints.""" return rgb_to_ints(short2rgb(short))
Makes a color palette from a collection of strings.
def make_palette(strings): """Makes a color palette from a collection of strings.""" palette = {} for s in strings: while "#" in s: _, t = s.split("#", 1) t, _, s = t.partition(" ") palette[t] = rgb_to_ints(t) return palette
Show a warning once if NO_COLOR was used instead of RESET.
def warn_deprecated_no_color(): """Show a warning once if NO_COLOR was used instead of RESET.""" global _NO_COLOR_WARNING_SHOWN if not _NO_COLOR_WARNING_SHOWN: print_warning("NO_COLOR is deprecated and should be replaced with RESET.") _NO_COLOR_WARNING_SHOWN = True
Always say the process is threadable.
def predict_true(_, __): """Always say the process is threadable.""" return True
Never say the process is threadable.
def predict_false(_, __): """Never say the process is threadable.""" return False
Predict the backgroundability of the normal shell interface, which comes down to whether it is being run in subproc mode.
def predict_shell(args, _): """Predict the backgroundability of the normal shell interface, which comes down to whether it is being run in subproc mode. """ ns, _ = SHELL_PREDICTOR_PARSER.parse_known_args(args) if ns.c is None and ns.filename is None: pred = False else: pred = True return pred
Predict the backgroundability of commands that have help & version switches: -h, --help, -v, -V, --version. If either of these options is present, the command is assumed to print to stdout normally and is therefore threadable. Otherwise, the command is assumed to not be threadable. This is useful for commands, like top, that normally enter alternate mode but may not in certain circumstances.
def predict_help_ver(args, _): """Predict the backgroundability of commands that have help & version switches: -h, --help, -v, -V, --version. If either of these options is present, the command is assumed to print to stdout normally and is therefore threadable. Otherwise, the command is assumed to not be threadable. This is useful for commands, like top, that normally enter alternate mode but may not in certain circumstances. """ ns, _ = HELP_VER_PREDICTOR_PARSER.parse_known_args(args) pred = ns.help is not None or ns.version is not None return pred
Predict if mercurial is about to be run in interactive mode. If it is interactive, predict False. If it isn't, predict True. Also predict False for certain commands, such as split.
def predict_hg(args, _): """Predict if mercurial is about to be run in interactive mode. If it is interactive, predict False. If it isn't, predict True. Also predict False for certain commands, such as split. """ ns, _ = HG_PREDICTOR_PARSER.parse_known_args(args) if ns.command == "split": return False else: return not ns.interactive
Predict if env is launching a threadable command or not. The launched command is extracted from env args, and the predictor of lauched command is used.
def predict_env(args, cmd_cache: CommandsCache): """Predict if env is launching a threadable command or not. The launched command is extracted from env args, and the predictor of lauched command is used.""" for i in range(len(args)): if args[i] and args[i][0] != "-" and "=" not in args[i]: # args[i] is the command and the following is its arguments # so args[i:] is used to predict if the command is threadable return cmd_cache.predict_threadable(args[i:]) return True
Generates a new defaultdict for known threadable predictors. The default is to predict true.
def default_threadable_predictors(): """Generates a new defaultdict for known threadable predictors. The default is to predict true. """ # alphabetical, for what it is worth. predictors = { "asciinema": predict_help_ver, "aurman": predict_false, "awk": predict_true, "bash": predict_shell, "cat": predict_false, "clear": predict_false, "cls": predict_false, "cmd": predict_shell, "cryptop": predict_false, "cryptsetup": predict_true, "csh": predict_shell, "curl": predict_true, "elvish": predict_shell, "emacsclient": predict_false, "env": predict_env, "ex": predict_false, "fish": predict_shell, "gawk": predict_true, "ghci": predict_help_ver, "git": predict_true, "gvim": predict_help_ver, "hg": predict_hg, "htop": predict_help_ver, "ipython": predict_shell, "julia": predict_shell, "ksh": predict_shell, "less": predict_help_ver, "ls": predict_true, "man": predict_help_ver, "mc": predict_false, "more": predict_help_ver, "mutt": predict_help_ver, "mvim": predict_help_ver, "nano": predict_help_ver, "nmcli": predict_true, "nvim": predict_false, "percol": predict_false, "ponysay": predict_help_ver, "psql": predict_false, "push": predict_shell, "pv": predict_false, "python": predict_shell, "python2": predict_shell, "python3": predict_shell, "ranger": predict_help_ver, "repo": predict_help_ver, "rview": predict_false, "rvim": predict_false, "rwt": predict_shell, "scp": predict_false, "sh": predict_shell, "ssh": predict_false, "startx": predict_false, "sudo": predict_help_ver, "sudoedit": predict_help_ver, "systemctl": predict_true, "tcsh": predict_shell, "telnet": predict_false, "top": predict_help_ver, "tput": predict_false, "udisksctl": predict_true, "unzip": predict_true, "vi": predict_false, "view": predict_false, "vim": predict_false, "vimpager": predict_help_ver, "weechat": predict_help_ver, "wget": predict_true, "xclip": predict_help_ver, "xdg-open": predict_false, "xo": predict_help_ver, "xon.sh": predict_shell, "xonsh": predict_shell, "yes": predict_false, "zip": predict_true, "zipinfo": predict_true, "zsh": predict_shell, } return predictors
Returns a highlighted string, with bold characters where different.
def highlighted_ndiff(a, b): """Returns a highlighted string, with bold characters where different.""" s = "" sm = difflib.SequenceMatcher() sm.set_seqs(a, b) linesm = difflib.SequenceMatcher() for tag, i1, i2, j1, j2 in sm.get_opcodes(): if tag == REPLACE_S: for aline, bline in itertools.zip_longest(a[i1:i2], b[j1:j2]): if bline is None: s += redline(aline) elif aline is None: s += greenline(bline) else: s += bold_str_diff(aline, bline, sm=linesm) elif tag == DELETE_S: for aline in a[i1:i2]: s += redline(aline) elif tag == INSERT_S: for bline in b[j1:j2]: s += greenline(bline) elif tag == EQUAL_S: for aline in a[i1:i2]: s += " " + aline + "\n" else: raise RuntimeError("tag not understood") return s
Check whether CMD.EXE is enforcing no-UNC-as-working-directory check. Check can be disabled by setting {HKCU, HKLM}/SOFTWARE\Microsoft\Command Processor\DisableUNCCheck:REG_DWORD=1 Returns: True if `CMD.EXE` is enforcing the check (default Windows situation) False if check is explicitly disabled.
def _unc_check_enabled() -> bool: r"""Check whether CMD.EXE is enforcing no-UNC-as-working-directory check. Check can be disabled by setting {HKCU, HKLM}/SOFTWARE\Microsoft\Command Processor\DisableUNCCheck:REG_DWORD=1 Returns: True if `CMD.EXE` is enforcing the check (default Windows situation) False if check is explicitly disabled. """ if not ON_WINDOWS: return False import winreg wval = _query_win_reg_key( winreg.HKEY_CURRENT_USER, r"software\microsoft\command processor", "DisableUNCCheck", ) if wval is None: wval = _query_win_reg_key( winreg.HKEY_LOCAL_MACHINE, r"software\microsoft\command processor", "DisableUNCCheck", ) return False if wval else True
True if path starts with 2 backward (or forward, due to python path hacking) slashes.
def _is_unc_path(some_path) -> bool: """True if path starts with 2 backward (or forward, due to python path hacking) slashes.""" return ( len(some_path) > 1 and some_path[0] == some_path[1] and some_path[0] in (os.sep, os.altsep) )
Map a new temporary drive letter for each distinct share, unless `CMD.EXE` is not insisting on non-UNC working directory. Emulating behavior of `CMD.EXE` `pushd`, create a new mapped drive (starting from Z: towards A:, skipping existing drive letters) for each new UNC path user selects. Args: unc_path: the path specified by user. Assumed to be a UNC path of form \\<server>\share... Returns: a replacement for `unc_path` to be used as the actual new working directory. Note that the drive letter may be a the same as one already mapped if the server and share portion of `unc_path` is the same as one still active on the stack.
def _unc_map_temp_drive(unc_path) -> str: r"""Map a new temporary drive letter for each distinct share, unless `CMD.EXE` is not insisting on non-UNC working directory. Emulating behavior of `CMD.EXE` `pushd`, create a new mapped drive (starting from Z: towards A:, skipping existing drive letters) for each new UNC path user selects. Args: unc_path: the path specified by user. Assumed to be a UNC path of form \\<server>\share... Returns: a replacement for `unc_path` to be used as the actual new working directory. Note that the drive letter may be a the same as one already mapped if the server and share portion of `unc_path` is the same as one still active on the stack. """ global _unc_tempDrives assert unc_path[1] in (os.sep, os.altsep), "unc_path is UNC form of path" if not _unc_check_enabled(): return unc_path unc_share, rem_path = os.path.splitdrive(unc_path) unc_share = unc_share.casefold() for d in _unc_tempDrives: if _unc_tempDrives[d] == unc_share: return os.path.join(d, rem_path) for dord in range(ord("z"), ord("a"), -1): d = chr(dord) + ":" if not os.path.isdir(d): # find unused drive letter starting from z: subprocess.check_output(["NET", "USE", d, unc_share], text=True) _unc_tempDrives[d] = unc_share return os.path.join(d, rem_path) raise RuntimeError(f"Failed to find a drive for UNC Path({unc_path})")
Unmap a temporary drive letter if it is no longer needed. Called after popping `DIRSTACK` and changing to new working directory, so we need stack *and* new current working directory to be sure drive letter no longer needed. Args: left_drive: driveletter (and colon) of working directory we just left cwd: full path of new current working directory
def _unc_unmap_temp_drive(left_drive, cwd): """Unmap a temporary drive letter if it is no longer needed. Called after popping `DIRSTACK` and changing to new working directory, so we need stack *and* new current working directory to be sure drive letter no longer needed. Args: left_drive: driveletter (and colon) of working directory we just left cwd: full path of new current working directory """ global _unc_tempDrives if left_drive not in _unc_tempDrives: # if not one we've mapped, don't unmap it return for p in DIRSTACK + [cwd]: # if still in use , don't unmap it. if p.casefold().startswith(left_drive): return _unc_tempDrives.pop(left_drive) subprocess.check_output(["NET", "USE", left_drive, "/delete"], text=True)
Changes the directory. If no directory is specified (i.e. if `args` is None) then this changes to the current user's home directory.
def cd(args, stdin=None): """Changes the directory. If no directory is specified (i.e. if `args` is None) then this changes to the current user's home directory. """ env = XSH.env oldpwd = env.get("OLDPWD", None) cwd = env["PWD"] follow_symlinks = False if len(args) > 0 and args[0] == "-P": follow_symlinks = True del args[0] if len(args) == 0: d = env.get("HOME", os.path.expanduser("~")) elif len(args) == 1: d = os.path.expanduser(args[0]) if not os.path.isdir(d): if d == "-": if oldpwd is not None: d = oldpwd else: return "", "cd: no previous directory stored\n", 1 elif d.startswith("-"): try: num = int(d[1:]) except ValueError: return "", f"cd: Invalid destination: {d}\n", 1 if num == 0: return None, None, 0 elif num < 0: return "", f"cd: Invalid destination: {d}\n", 1 elif num > len(DIRSTACK): e = "cd: Too few elements in dirstack ({0} elements)\n" return "", e.format(len(DIRSTACK)), 1 else: d = DIRSTACK[num - 1] else: d = _try_cdpath(d) else: return ( "", ( f"cd takes 0 or 1 arguments, not {len(args)}. An additional `-P` " "flag can be passed in first position to follow symlinks." "\n" ), 1, ) if not os.path.exists(d): return "", f"cd: no such file or directory: {d}\n", 1 if not os.path.isdir(d): return "", f"cd: {d} is not a directory\n", 1 if not os.access(d, os.X_OK): return "", f"cd: permission denied: {d}\n", 1 if ( ON_WINDOWS and _is_unc_path(d) and _unc_check_enabled() and (not env.get("AUTO_PUSHD")) ): return ( "", "cd: can't cd to UNC path on Windows, unless $AUTO_PUSHD set or reg entry " + r"HKCU\SOFTWARE\MICROSOFT\Command Processor\DisableUNCCheck:DWORD = 1" + "\n", 1, ) # now, push the directory onto the dirstack if AUTO_PUSHD is set if cwd is not None and env.get("AUTO_PUSHD"): pushd(["-n", "-q", cwd]) if ON_WINDOWS and _is_unc_path(d): d = _unc_map_temp_drive(d) _change_working_directory(d, follow_symlinks) return None, None, 0
Adds a directory to the top of the directory stack, or rotates the stack, making the new top of the stack the current working directory. On Windows, if the path is a UNC path (begins with `\\<server>\<share>`) and if the `DisableUNCCheck` registry value is not enabled, creates a temporary mapped drive letter and sets the working directory there, emulating behavior of `PUSHD` in `CMD.EXE` Parameters ---------- dir_or_n * dir : Makes dir be the top of the stack, making it the new current directory as if it had been supplied as an argument to the cd builtin. * +N : Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. * -N : Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. cd : -n, --cd Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated. quiet : -q, --quiet Do not call dirs, regardless of $PUSHD_SILENT
def pushd_fn( dir_or_n: Annotated[tp.Optional[str], Arg(metavar="+N|-N|dir", nargs="?")] = None, cd=True, quiet=False, ): r"""Adds a directory to the top of the directory stack, or rotates the stack, making the new top of the stack the current working directory. On Windows, if the path is a UNC path (begins with `\\<server>\<share>`) and if the `DisableUNCCheck` registry value is not enabled, creates a temporary mapped drive letter and sets the working directory there, emulating behavior of `PUSHD` in `CMD.EXE` Parameters ---------- dir_or_n * dir : Makes dir be the top of the stack, making it the new current directory as if it had been supplied as an argument to the cd builtin. * +N : Brings the Nth directory (counting from the left of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. * -N : Brings the Nth directory (counting from the right of the list printed by dirs, starting with zero) to the top of the list by rotating the stack. cd : -n, --cd Suppresses the normal change of directory when adding directories to the stack, so that only the stack is manipulated. quiet : -q, --quiet Do not call dirs, regardless of $PUSHD_SILENT """ global DIRSTACK env = XSH.env pwd = env["PWD"] if env.get("PUSHD_MINUS", False): BACKWARD = "-" FORWARD = "+" else: BACKWARD = "+" FORWARD = "-" if dir_or_n is None: try: new_pwd: tp.Optional[str] = DIRSTACK.pop(0) except IndexError: e = "pushd: Directory stack is empty\n" return None, e, 1 elif os.path.isdir(dir_or_n): new_pwd = dir_or_n else: try: num = int(dir_or_n[1:]) except ValueError: e = "Invalid argument to pushd: {0}\n" return None, e.format(dir_or_n), 1 if num < 0: e = "Invalid argument to pushd: {0}\n" return None, e.format(dir_or_n), 1 if num > len(DIRSTACK): e = "Too few elements in dirstack ({0} elements)\n" return None, e.format(len(DIRSTACK)), 1 elif dir_or_n.startswith(FORWARD): if num == len(DIRSTACK): new_pwd = None else: new_pwd = DIRSTACK.pop(len(DIRSTACK) - 1 - num) elif dir_or_n.startswith(BACKWARD): if num == 0: new_pwd = None else: new_pwd = DIRSTACK.pop(num - 1) else: e = "Invalid argument to pushd: {0}\n" return None, e.format(dir_or_n), 1 if new_pwd is not None: if ON_WINDOWS and _is_unc_path(new_pwd): new_pwd = _unc_map_temp_drive(new_pwd) if cd: DIRSTACK.insert(0, os.path.expanduser(pwd)) _change_working_directory(new_pwd) else: DIRSTACK.insert(0, os.path.expanduser(new_pwd)) maxsize = env.get("DIRSTACK_SIZE") if len(DIRSTACK) > maxsize: DIRSTACK = DIRSTACK[:maxsize] if not quiet and not env.get("PUSHD_SILENT"): return dirs([], None) return None, None, 0
When no arguments are given, popd removes the top directory from the stack and performs a cd to the new top directory. The elements are numbered from 0 starting at the first directory listed with ``dirs``; that is, popd is equivalent to popd +0. Parameters ---------- cd : -n, --cd Suppresses the normal change of directory when removing directories from the stack, so that only the stack is manipulated. nth Removes the Nth directory (counting from the left/right of the list printed by dirs w.r.t. -/+ prefix), starting with zero. quiet : -q, --quiet Do not call dirs, regardless of $PUSHD_SILENT
def popd_fn( nth: Annotated[tp.Optional[str], Arg(metavar="+N|-N", nargs="?")] = None, cd=True, quiet=False, ): """When no arguments are given, popd removes the top directory from the stack and performs a cd to the new top directory. The elements are numbered from 0 starting at the first directory listed with ``dirs``; that is, popd is equivalent to popd +0. Parameters ---------- cd : -n, --cd Suppresses the normal change of directory when removing directories from the stack, so that only the stack is manipulated. nth Removes the Nth directory (counting from the left/right of the list printed by dirs w.r.t. -/+ prefix), starting with zero. quiet : -q, --quiet Do not call dirs, regardless of $PUSHD_SILENT """ global DIRSTACK env = XSH.env if env.get("PUSHD_MINUS"): BACKWARD = "-" FORWARD = "+" else: BACKWARD = "-" FORWARD = "+" new_pwd: tp.Optional[str] = None if nth is None: try: new_pwd = DIRSTACK.pop(0) except IndexError: e = "popd: Directory stack is empty\n" return None, e, 1 else: try: num = int(nth[1:]) except ValueError: e = "Invalid argument to popd: {0}\n" return None, e.format(nth), 1 if num < 0: e = "Invalid argument to popd: {0}\n" return None, e.format(nth), 1 if num > len(DIRSTACK): e = "Too few elements in dirstack ({0} elements)\n" return None, e.format(len(DIRSTACK)), 1 elif nth.startswith(FORWARD): if num == len(DIRSTACK): new_pwd = DIRSTACK.pop(0) else: DIRSTACK.pop(len(DIRSTACK) - 1 - num) elif nth.startswith(BACKWARD): if num == 0: new_pwd = DIRSTACK.pop(0) else: DIRSTACK.pop(num - 1) else: e = "Invalid argument to popd: {0}\n" return None, e.format(nth), 1 if new_pwd is not None: if cd: env = XSH.env pwd = env["PWD"] _change_working_directory(new_pwd) if ON_WINDOWS: drive, rem_path = os.path.splitdrive(pwd) _unc_unmap_temp_drive(drive.casefold(), new_pwd) if not quiet and not env.get("PUSHD_SILENT"): return dirs([], None) return None, None, 0
Manage the list of currently remembered directories. Parameters ---------- nth Displays the Nth directory (counting from the left/right according to +/x prefix respectively), starting with zero clear : -c Clears the directory stack by deleting all of the entries. print_long : -p Print the directory stack with one entry per line. verbose : -v Print the directory stack with one entry per line, prefixing each entry with its index in the stack. long : -l Produces a longer listing; the default listing format uses a tilde to denote the home directory.
def dirs_fn( nth: Annotated[tp.Optional[str], Arg(metavar="N", nargs="?")] = None, clear=False, print_long=False, verbose=False, long=False, ): """Manage the list of currently remembered directories. Parameters ---------- nth Displays the Nth directory (counting from the left/right according to +/x prefix respectively), starting with zero clear : -c Clears the directory stack by deleting all of the entries. print_long : -p Print the directory stack with one entry per line. verbose : -v Print the directory stack with one entry per line, prefixing each entry with its index in the stack. long : -l Produces a longer listing; the default listing format uses a tilde to denote the home directory. """ global DIRSTACK env = XSH.env dirstack = [os.path.expanduser(env["PWD"])] + DIRSTACK if env.get("PUSHD_MINUS"): BACKWARD = "-" FORWARD = "+" else: BACKWARD = "-" FORWARD = "+" if clear: DIRSTACK = [] return None, None, 0 if long: o = dirstack else: d = os.path.expanduser("~") o = [i.replace(d, "~") for i in dirstack] if verbose: out = "" pad = len(str(len(o) - 1)) for ix, e in enumerate(o): blanks = " " * (pad - len(str(ix))) out += f"\n{blanks}{ix} {e}" out = out[1:] elif print_long: out = "\n".join(o) else: out = " ".join(o) if nth is not None: try: num = int(nth[1:]) except ValueError: e = "Invalid argument to dirs: {0}\n" return None, e.format(nth), 1 if num < 0: e = "Invalid argument to dirs: {0}\n" return None, e.format(len(o)), 1 if num >= len(o): e = "Too few elements in dirstack ({0} elements)\n" return None, e.format(len(o)), 1 if nth.startswith(BACKWARD): idx = num elif nth.startswith(FORWARD): idx = len(o) - 1 - num else: e = "Invalid argument to dirs: {0}\n" return None, e.format(nth), 1 out = o[idx] return out + "\n", None, 0
Use pushd as a context manager
def with_pushd(d): """Use pushd as a context manager""" pushd_fn(d) try: yield finally: popd_fn()
Creates a converter for a locale key.
def locale_convert(key): """Creates a converter for a locale key.""" def lc_converter(val): try: locale.setlocale(LOCALE_CATS[key], val) val = locale.setlocale(LOCALE_CATS[key]) except (locale.Error, KeyError): msg = f"Failed to set locale {key!r} to {val!r}" warnings.warn(msg, RuntimeWarning, stacklevel=2) return val return lc_converter
Converts value using to_bool_or_int() and sets this value on as the execer's debug level.
def to_debug(x): """Converts value using to_bool_or_int() and sets this value on as the execer's debug level. """ val = to_bool_or_int(x) if XSH.execer is not None: XSH.execer.debug_level = val return val
Checks if an object is an instance of LsColors
def is_lscolors(x): """Checks if an object is an instance of LsColors""" return isinstance(x, LsColors)
This ensures that the $LS_COLORS environment variable is in the environment. This fires exactly once upon the first time the ls command is called.
def ensure_ls_colors_in_env(spec=None, **kwargs): """This ensures that the $LS_COLORS environment variable is in the environment. This fires exactly once upon the first time the ls command is called. """ env = XSH.env if "LS_COLORS" not in env._d: # this adds it to the env too default_lscolors(env) events.on_pre_spec_run_ls.discard(ensure_ls_colors_in_env)
Decorator for making callable default values.
def default_value(f): """Decorator for making callable default values.""" f._xonsh_callable_default = True return f
Checks if a value is a callable default.
def is_callable_default(x): """Checks if a value is a callable default.""" return callable(x) and getattr(x, "_xonsh_callable_default", False)
Ensures and returns the $XONSH_DATA_DIR
def xonsh_data_dir(env): """Ensures and returns the $XONSH_DATA_DIR""" xdd = os.path.expanduser(os.path.join(env.get("XDG_DATA_HOME"), "xonsh")) os.makedirs(xdd, exist_ok=True) return xdd
Ensures and returns the $XONSH_CACHE_DIR
def xonsh_cache_dir(env): """Ensures and returns the $XONSH_CACHE_DIR""" xdd = os.path.expanduser(os.path.join(env.get("XDG_CACHE_HOME"), "xonsh")) os.makedirs(xdd, exist_ok=True) return xdd
``$XDG_CONFIG_HOME/xonsh``
def xonsh_config_dir(env): """``$XDG_CONFIG_HOME/xonsh``""" xcd = os.path.expanduser(os.path.join(env.get("XDG_CONFIG_HOME"), "xonsh")) os.makedirs(xcd, exist_ok=True) return xcd
On Windows: ``[%ProgramData%]`` (normally C:\ProgramData) - More Info: https://docs.microsoft.com/en-us/windows-hardware/customize/desktop/unattend/microsoft-windows-shell-setup-folderlocations-programdata On Linux and Unix based systemd it is the same as in open-desktop standard: ``['/usr/share', '/usr/local/share']``
def xdg_data_dirs(env): r""" On Windows: ``[%ProgramData%]`` (normally C:\ProgramData) - More Info: https://docs.microsoft.com/en-us/windows-hardware/customize/desktop/unattend/microsoft-windows-shell-setup-folderlocations-programdata On Linux and Unix based systemd it is the same as in open-desktop standard: ``['/usr/share', '/usr/local/share']`` """ if ON_WINDOWS: return [os_environ["ProgramData"]] return [ os.path.join("/usr", "share"), os.path.join("/usr", "local", "share"), ]
On Linux & Mac OSX: ``'/etc/xonsh'`` On Windows: ``'%ALLUSERSPROFILE%\\xonsh'``
def xonsh_sys_config_dir(env): """ On Linux & Mac OSX: ``'/etc/xonsh'`` On Windows: ``'%ALLUSERSPROFILE%\\\\xonsh'`` """ if ON_WINDOWS: etc_path = os_environ["ALLUSERSPROFILE"] else: etc_path = "/etc" return os.path.join(etc_path, "xonsh")
Ensures and returns the $XONSHCONFIG
def xonshconfig(env): """Ensures and returns the $XONSHCONFIG""" xcd = env.get("XONSH_CONFIG_DIR") xc = os.path.join(xcd, "config.json") return xc