text
stringlengths 4
288k
| id
stringlengths 17
110
| metadata
dict | __index_level_0__
int64 0
47
|
---|---|---|---|
<svg width="15" height="60" viewBox="0 0 1792 7168" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<g id="previous">
<path d="M1037 1395l102-102q19-19 19-45t-19-45l-307-307 307-307q19-19 19-45t-19-45l-102-102q-19-19-45-19t-45 19l-454 454q-19 19-19 45t19 45l454 454q19 19 45 19t45-19zm627-499q0 209-103 385.5t-279.5 279.5-385.5 103-385.5-103-279.5-279.5-103-385.5 103-385.5 279.5-279.5 385.5-103 385.5 103 279.5 279.5 103 385.5z"/>
</g>
<g id="next">
<path d="M845 1395l454-454q19-19 19-45t-19-45l-454-454q-19-19-45-19t-45 19l-102 102q-19 19-19 45t19 45l307 307-307 307q-19 19-19 45t19 45l102 102q19 19 45 19t45-19zm819-499q0 209-103 385.5t-279.5 279.5-385.5 103-385.5-103-279.5-279.5-103-385.5 103-385.5 279.5-279.5 385.5-103 385.5 103 279.5 279.5 103 385.5z"/>
</g>
</defs>
<use xlink:href="#previous" x="0" y="0" fill="#333333" />
<use xlink:href="#previous" x="0" y="1792" fill="#000000" />
<use xlink:href="#next" x="0" y="3584" fill="#333333" />
<use xlink:href="#next" x="0" y="5376" fill="#000000" />
</svg>
| Django-locallibrary/LocalLibrary/staticfiles/admin/img/calendar-icons.svg/0 | {
"file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/img/calendar-icons.svg",
"repo_id": "Django-locallibrary",
"token_count": 561
} | 5 |
<svg width="14" height="14" viewBox="0 0 1792 1792" xmlns="http://www.w3.org/2000/svg">
<path fill="#dd4646" d="M1490 1322q0 40-28 68l-136 136q-28 28-68 28t-68-28l-294-294-294 294q-28 28-68 28t-68-28l-136-136q-28-28-28-68t28-68l294-294-294-294q-28-28-28-68t28-68l136-136q28-28 68-28t68 28l294 294 294-294q28-28 68-28t68 28l136 136q28 28 28 68t-28 68l-294 294 294 294q28 28 28 68z"/>
</svg>
| Django-locallibrary/LocalLibrary/staticfiles/admin/img/icon-deletelink.svg/0 | {
"file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/img/icon-deletelink.svg",
"repo_id": "Django-locallibrary",
"token_count": 197
} | 6 |
<svg width="16" height="192" viewBox="0 0 1792 21504" xmlns="http://www.w3.org/2000/svg" xmlns:xlink="http://www.w3.org/1999/xlink">
<defs>
<g id="up">
<path d="M1412 895q0-27-18-45l-362-362-91-91q-18-18-45-18t-45 18l-91 91-362 362q-18 18-18 45t18 45l91 91q18 18 45 18t45-18l189-189v502q0 26 19 45t45 19h128q26 0 45-19t19-45v-502l189 189q19 19 45 19t45-19l91-91q18-18 18-45zm252 1q0 209-103 385.5t-279.5 279.5-385.5 103-385.5-103-279.5-279.5-103-385.5 103-385.5 279.5-279.5 385.5-103 385.5 103 279.5 279.5 103 385.5z"/>
</g>
<g id="down">
<path d="M1412 897q0-27-18-45l-91-91q-18-18-45-18t-45 18l-189 189v-502q0-26-19-45t-45-19h-128q-26 0-45 19t-19 45v502l-189-189q-19-19-45-19t-45 19l-91 91q-18 18-18 45t18 45l362 362 91 91q18 18 45 18t45-18l91-91 362-362q18-18 18-45zm252-1q0 209-103 385.5t-279.5 279.5-385.5 103-385.5-103-279.5-279.5-103-385.5 103-385.5 279.5-279.5 385.5-103 385.5 103 279.5 279.5 103 385.5z"/>
</g>
<g id="left">
<path d="M1408 960v-128q0-26-19-45t-45-19h-502l189-189q19-19 19-45t-19-45l-91-91q-18-18-45-18t-45 18l-362 362-91 91q-18 18-18 45t18 45l91 91 362 362q18 18 45 18t45-18l91-91q18-18 18-45t-18-45l-189-189h502q26 0 45-19t19-45zm256-64q0 209-103 385.5t-279.5 279.5-385.5 103-385.5-103-279.5-279.5-103-385.5 103-385.5 279.5-279.5 385.5-103 385.5 103 279.5 279.5 103 385.5z"/>
</g>
<g id="right">
<path d="M1413 896q0-27-18-45l-91-91-362-362q-18-18-45-18t-45 18l-91 91q-18 18-18 45t18 45l189 189h-502q-26 0-45 19t-19 45v128q0 26 19 45t45 19h502l-189 189q-19 19-19 45t19 45l91 91q18 18 45 18t45-18l362-362 91-91q18-18 18-45zm251 0q0 209-103 385.5t-279.5 279.5-385.5 103-385.5-103-279.5-279.5-103-385.5 103-385.5 279.5-279.5 385.5-103 385.5 103 279.5 279.5 103 385.5z"/>
</g>
<g id="clearall">
<path transform="translate(336, 336) scale(0.75)" d="M1037 1395l102-102q19-19 19-45t-19-45l-307-307 307-307q19-19 19-45t-19-45l-102-102q-19-19-45-19t-45 19l-454 454q-19 19-19 45t19 45l454 454q19 19 45 19t45-19zm627-499q0 209-103 385.5t-279.5 279.5-385.5 103-385.5-103-279.5-279.5-103-385.5 103-385.5 279.5-279.5 385.5-103 385.5 103 279.5 279.5 103 385.5z"/>
</g>
<g id="chooseall">
<path transform="translate(336, 336) scale(0.75)" d="M845 1395l454-454q19-19 19-45t-19-45l-454-454q-19-19-45-19t-45 19l-102 102q-19 19-19 45t19 45l307 307-307 307q-19 19-19 45t19 45l102 102q19 19 45 19t45-19zm819-499q0 209-103 385.5t-279.5 279.5-385.5 103-385.5-103-279.5-279.5-103-385.5 103-385.5 279.5-279.5 385.5-103 385.5 103 279.5 279.5 103 385.5z"/>
</g>
</defs>
<use xlink:href="#up" x="0" y="0" fill="#666666" />
<use xlink:href="#up" x="0" y="1792" fill="#447e9b" />
<use xlink:href="#down" x="0" y="3584" fill="#666666" />
<use xlink:href="#down" x="0" y="5376" fill="#447e9b" />
<use xlink:href="#left" x="0" y="7168" fill="#666666" />
<use xlink:href="#left" x="0" y="8960" fill="#447e9b" />
<use xlink:href="#right" x="0" y="10752" fill="#666666" />
<use xlink:href="#right" x="0" y="12544" fill="#447e9b" />
<use xlink:href="#clearall" x="0" y="14336" fill="#666666" />
<use xlink:href="#clearall" x="0" y="16128" fill="#447e9b" />
<use xlink:href="#chooseall" x="0" y="17920" fill="#666666" />
<use xlink:href="#chooseall" x="0" y="19712" fill="#447e9b" />
</svg>
| Django-locallibrary/LocalLibrary/staticfiles/admin/img/selector-icons.svg/0 | {
"file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/img/selector-icons.svg",
"repo_id": "Django-locallibrary",
"token_count": 1729
} | 7 |
'use strict';
{
const SelectBox = {
cache: {},
init: function(id) {
const box = document.getElementById(id);
SelectBox.cache[id] = [];
const cache = SelectBox.cache[id];
for (const node of box.options) {
cache.push({value: node.value, text: node.text, displayed: 1});
}
},
redisplay: function(id) {
// Repopulate HTML select box from cache
const box = document.getElementById(id);
const scroll_value_from_top = box.scrollTop;
box.innerHTML = '';
for (const node of SelectBox.cache[id]) {
if (node.displayed) {
const new_option = new Option(node.text, node.value, false, false);
// Shows a tooltip when hovering over the option
new_option.title = node.text;
box.appendChild(new_option);
}
}
box.scrollTop = scroll_value_from_top;
},
filter: function(id, text) {
// Redisplay the HTML select box, displaying only the choices containing ALL
// the words in text. (It's an AND search.)
const tokens = text.toLowerCase().split(/\s+/);
for (const node of SelectBox.cache[id]) {
node.displayed = 1;
const node_text = node.text.toLowerCase();
for (const token of tokens) {
if (!node_text.includes(token)) {
node.displayed = 0;
break; // Once the first token isn't found we're done
}
}
}
SelectBox.redisplay(id);
},
delete_from_cache: function(id, value) {
let delete_index = null;
const cache = SelectBox.cache[id];
for (const [i, node] of cache.entries()) {
if (node.value === value) {
delete_index = i;
break;
}
}
cache.splice(delete_index, 1);
},
add_to_cache: function(id, option) {
SelectBox.cache[id].push({value: option.value, text: option.text, displayed: 1});
},
cache_contains: function(id, value) {
// Check if an item is contained in the cache
for (const node of SelectBox.cache[id]) {
if (node.value === value) {
return true;
}
}
return false;
},
move: function(from, to) {
const from_box = document.getElementById(from);
for (const option of from_box.options) {
const option_value = option.value;
if (option.selected && SelectBox.cache_contains(from, option_value)) {
SelectBox.add_to_cache(to, {value: option_value, text: option.text, displayed: 1});
SelectBox.delete_from_cache(from, option_value);
}
}
SelectBox.redisplay(from);
SelectBox.redisplay(to);
},
move_all: function(from, to) {
const from_box = document.getElementById(from);
for (const option of from_box.options) {
const option_value = option.value;
if (SelectBox.cache_contains(from, option_value)) {
SelectBox.add_to_cache(to, {value: option_value, text: option.text, displayed: 1});
SelectBox.delete_from_cache(from, option_value);
}
}
SelectBox.redisplay(from);
SelectBox.redisplay(to);
},
sort: function(id) {
SelectBox.cache[id].sort(function(a, b) {
a = a.text.toLowerCase();
b = b.text.toLowerCase();
if (a > b) {
return 1;
}
if (a < b) {
return -1;
}
return 0;
} );
},
select_all: function(id) {
const box = document.getElementById(id);
for (const option of box.options) {
option.selected = true;
}
}
};
window.SelectBox = SelectBox;
}
| Django-locallibrary/LocalLibrary/staticfiles/admin/js/SelectBox.js/0 | {
"file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/js/SelectBox.js",
"repo_id": "Django-locallibrary",
"token_count": 2346
} | 8 |
/*global SelectBox, interpolate*/
// Handles related-objects functionality: lookup link for raw_id_fields
// and Add Another links.
'use strict';
{
const $ = django.jQuery;
function showAdminPopup(triggeringLink, name_regexp, add_popup) {
const name = triggeringLink.id.replace(name_regexp, '');
const href = new URL(triggeringLink.href);
if (add_popup) {
href.searchParams.set('_popup', 1);
}
const win = window.open(href, name, 'height=500,width=800,resizable=yes,scrollbars=yes');
win.focus();
return false;
}
function showRelatedObjectLookupPopup(triggeringLink) {
return showAdminPopup(triggeringLink, /^lookup_/, true);
}
function dismissRelatedLookupPopup(win, chosenId) {
const name = win.name;
const elem = document.getElementById(name);
if (elem.classList.contains('vManyToManyRawIdAdminField') && elem.value) {
elem.value += ',' + chosenId;
} else {
document.getElementById(name).value = chosenId;
}
win.close();
}
function showRelatedObjectPopup(triggeringLink) {
return showAdminPopup(triggeringLink, /^(change|add|delete)_/, false);
}
function updateRelatedObjectLinks(triggeringLink) {
const $this = $(triggeringLink);
const siblings = $this.nextAll('.view-related, .change-related, .delete-related');
if (!siblings.length) {
return;
}
const value = $this.val();
if (value) {
siblings.each(function() {
const elm = $(this);
elm.attr('href', elm.attr('data-href-template').replace('__fk__', value));
});
} else {
siblings.removeAttr('href');
}
}
function dismissAddRelatedObjectPopup(win, newId, newRepr) {
const name = win.name;
const elem = document.getElementById(name);
if (elem) {
const elemName = elem.nodeName.toUpperCase();
if (elemName === 'SELECT') {
elem.options[elem.options.length] = new Option(newRepr, newId, true, true);
} else if (elemName === 'INPUT') {
if (elem.classList.contains('vManyToManyRawIdAdminField') && elem.value) {
elem.value += ',' + newId;
} else {
elem.value = newId;
}
}
// Trigger a change event to update related links if required.
$(elem).trigger('change');
} else {
const toId = name + "_to";
const o = new Option(newRepr, newId);
SelectBox.add_to_cache(toId, o);
SelectBox.redisplay(toId);
}
win.close();
}
function dismissChangeRelatedObjectPopup(win, objId, newRepr, newId) {
const id = win.name.replace(/^edit_/, '');
const selectsSelector = interpolate('#%s, #%s_from, #%s_to', [id, id, id]);
const selects = $(selectsSelector);
selects.find('option').each(function() {
if (this.value === objId) {
this.textContent = newRepr;
this.value = newId;
}
});
selects.next().find('.select2-selection__rendered').each(function() {
// The element can have a clear button as a child.
// Use the lastChild to modify only the displayed value.
this.lastChild.textContent = newRepr;
this.title = newRepr;
});
win.close();
}
function dismissDeleteRelatedObjectPopup(win, objId) {
const id = win.name.replace(/^delete_/, '');
const selectsSelector = interpolate('#%s, #%s_from, #%s_to', [id, id, id]);
const selects = $(selectsSelector);
selects.find('option').each(function() {
if (this.value === objId) {
$(this).remove();
}
}).trigger('change');
win.close();
}
window.showRelatedObjectLookupPopup = showRelatedObjectLookupPopup;
window.dismissRelatedLookupPopup = dismissRelatedLookupPopup;
window.showRelatedObjectPopup = showRelatedObjectPopup;
window.updateRelatedObjectLinks = updateRelatedObjectLinks;
window.dismissAddRelatedObjectPopup = dismissAddRelatedObjectPopup;
window.dismissChangeRelatedObjectPopup = dismissChangeRelatedObjectPopup;
window.dismissDeleteRelatedObjectPopup = dismissDeleteRelatedObjectPopup;
// Kept for backward compatibility
window.showAddAnotherPopup = showRelatedObjectPopup;
window.dismissAddAnotherPopup = dismissAddRelatedObjectPopup;
$(document).ready(function() {
$("a[data-popup-opener]").on('click', function(event) {
event.preventDefault();
opener.dismissRelatedLookupPopup(window, $(this).data("popup-opener"));
});
$('body').on('click', '.related-widget-wrapper-link', function(e) {
e.preventDefault();
if (this.href) {
const event = $.Event('django:show-related', {href: this.href});
$(this).trigger(event);
if (!event.isDefaultPrevented()) {
showRelatedObjectPopup(this);
}
}
});
$('body').on('change', '.related-widget-wrapper select', function(e) {
const event = $.Event('django:update-related');
$(this).trigger(event);
if (!event.isDefaultPrevented()) {
updateRelatedObjectLinks(this);
}
});
$('.related-widget-wrapper select').trigger('change');
$('body').on('click', '.related-lookup', function(e) {
e.preventDefault();
const event = $.Event('django:lookup-related');
$(this).trigger(event);
if (!event.isDefaultPrevented()) {
showRelatedObjectLookupPopup(this);
}
});
});
}
| Django-locallibrary/LocalLibrary/staticfiles/admin/js/admin/RelatedObjectLookups.js/0 | {
"file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/js/admin/RelatedObjectLookups.js",
"repo_id": "Django-locallibrary",
"token_count": 2723
} | 9 |
'use strict';
{
const inputTags = ['BUTTON', 'INPUT', 'SELECT', 'TEXTAREA'];
const modelName = document.getElementById('django-admin-form-add-constants').dataset.modelName;
if (modelName) {
const form = document.getElementById(modelName + '_form');
for (const element of form.elements) {
// HTMLElement.offsetParent returns null when the element is not
// rendered.
if (inputTags.includes(element.tagName) && !element.disabled && element.offsetParent) {
element.focus();
break;
}
}
}
}
| Django-locallibrary/LocalLibrary/staticfiles/admin/js/change_form.js/0 | {
"file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/js/change_form.js",
"repo_id": "Django-locallibrary",
"token_count": 263
} | 10 |
/*global jQuery:false*/
'use strict';
/* Puts the included jQuery into our own namespace using noConflict and passing
* it 'true'. This ensures that the included jQuery doesn't pollute the global
* namespace (i.e. this preserves pre-existing values for both window.$ and
* window.jQuery).
*/
window.django = {jQuery: jQuery.noConflict(true)};
| Django-locallibrary/LocalLibrary/staticfiles/admin/js/jquery.init.js/0 | {
"file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/js/jquery.init.js",
"repo_id": "Django-locallibrary",
"token_count": 94
} | 11 |
'use strict';
{
const $ = django.jQuery;
const fields = $('#django-admin-prepopulated-fields-constants').data('prepopulatedFields');
$.each(fields, function(index, field) {
$('.empty-form .form-row .field-' + field.name + ', .empty-form.form-row .field-' + field.name).addClass('prepopulated_field');
$(field.id).data('dependency_list', field.dependency_list).prepopulate(
field.dependency_ids, field.maxLength, field.allowUnicode
);
});
}
| Django-locallibrary/LocalLibrary/staticfiles/admin/js/prepopulate_init.js/0 | {
"file_path": "Django-locallibrary/LocalLibrary/staticfiles/admin/js/prepopulate_init.js",
"repo_id": "Django-locallibrary",
"token_count": 197
} | 12 |
@import url("https://fonts.googleapis.com/css?family=Ubuntu");
*{
list-style: none;
text-decoration: none;
font-family: Ubuntu;
}
a {
text-decoration: none;
color: rgb(78, 169, 221);
}
.sidebar {
width: 225px;
height: 100%;
position: fixed;
left: 0;
top: 0;
padding: 20px 0;
transition: all 0.5s ease;
background-color: dodgerblue;
a {
color: white;
display: table-row;
padding: 10px 0;
font-weight: bold;
}
}
.center-div{
position: absolute;
margin-left: 230px;
}
.pagination {
margin-left: 300px;
position: absolute;
margin-top: 170px;
}
.author_dates, .book_details {
margin-left: 30px;
}
.text-success {
color: dodgerblue;
}
.text-danger {
color: rgb(255, 192, 44);
}
.text-warning {
color: rgb(211, 46, 46);
}
.text-muted {
color: white;
}
.user_status {
position: absolute;
padding: 20px 0;
left: 40px;
}
.hr {
position: absolute;
width: 70%;
margin: 0 15%;
background-color: white;
height: 1px;
} | Django-locallibrary/LocalLibrary/staticfiles/css/css.scss/0 | {
"file_path": "Django-locallibrary/LocalLibrary/staticfiles/css/css.scss",
"repo_id": "Django-locallibrary",
"token_count": 494
} | 13 |
from __future__ import absolute_import
import sys
import textwrap
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import SUCCESS
from pip._internal.utils.misc import get_prog
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import List
from optparse import Values
BASE_COMPLETION = """
# pip {shell} completion start{script}# pip {shell} completion end
"""
COMPLETION_SCRIPTS = {
'bash': """
_pip_completion()
{{
COMPREPLY=( $( COMP_WORDS="${{COMP_WORDS[*]}}" \\
COMP_CWORD=$COMP_CWORD \\
PIP_AUTO_COMPLETE=1 $1 2>/dev/null ) )
}}
complete -o default -F _pip_completion {prog}
""",
'zsh': """
function _pip_completion {{
local words cword
read -Ac words
read -cn cword
reply=( $( COMP_WORDS="$words[*]" \\
COMP_CWORD=$(( cword-1 )) \\
PIP_AUTO_COMPLETE=1 $words[1] 2>/dev/null ))
}}
compctl -K _pip_completion {prog}
""",
'fish': """
function __fish_complete_pip
set -lx COMP_WORDS (commandline -o) ""
set -lx COMP_CWORD ( \\
math (contains -i -- (commandline -t) $COMP_WORDS)-1 \\
)
set -lx PIP_AUTO_COMPLETE 1
string split \\ -- (eval $COMP_WORDS[1])
end
complete -fa "(__fish_complete_pip)" -c {prog}
""",
}
class CompletionCommand(Command):
"""A helper command to be used for command completion."""
ignore_require_venv = True
def add_options(self):
# type: () -> None
self.cmd_opts.add_option(
'--bash', '-b',
action='store_const',
const='bash',
dest='shell',
help='Emit completion code for bash')
self.cmd_opts.add_option(
'--zsh', '-z',
action='store_const',
const='zsh',
dest='shell',
help='Emit completion code for zsh')
self.cmd_opts.add_option(
'--fish', '-f',
action='store_const',
const='fish',
dest='shell',
help='Emit completion code for fish')
self.parser.insert_option_group(0, self.cmd_opts)
def run(self, options, args):
# type: (Values, List[str]) -> int
"""Prints the completion code of the given shell"""
shells = COMPLETION_SCRIPTS.keys()
shell_options = ['--' + shell for shell in sorted(shells)]
if options.shell in shells:
script = textwrap.dedent(
COMPLETION_SCRIPTS.get(options.shell, '').format(
prog=get_prog())
)
print(BASE_COMPLETION.format(script=script, shell=options.shell))
return SUCCESS
else:
sys.stderr.write(
'ERROR: You must pass {}\n' .format(' or '.join(shell_options))
)
return SUCCESS
| Django-locallibrary/env/Lib/site-packages/pip/_internal/commands/completion.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/commands/completion.py",
"repo_id": "Django-locallibrary",
"token_count": 1537
} | 14 |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Optional, List
def main(args=None):
# type: (Optional[List[str]]) -> int
"""This is preserved for old console scripts that may still be referencing
it.
For additional details, see https://github.com/pypa/pip/issues/7498.
"""
from pip._internal.utils.entrypoints import _wrapper
return _wrapper(args)
| Django-locallibrary/env/Lib/site-packages/pip/_internal/main.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/main.py",
"repo_id": "Django-locallibrary",
"token_count": 150
} | 15 |
from pip._vendor.six.moves.urllib import parse as urllib_parse
class PackageIndex(object):
"""Represents a Package Index and provides easier access to endpoints
"""
__slots__ = ['url', 'netloc', 'simple_url', 'pypi_url',
'file_storage_domain']
def __init__(self, url, file_storage_domain):
# type: (str, str) -> None
super(PackageIndex, self).__init__()
self.url = url
self.netloc = urllib_parse.urlsplit(url).netloc
self.simple_url = self._url_for_path('simple')
self.pypi_url = self._url_for_path('pypi')
# This is part of a temporary hack used to block installs of PyPI
# packages which depend on external urls only necessary until PyPI can
# block such packages themselves
self.file_storage_domain = file_storage_domain
def _url_for_path(self, path):
# type: (str) -> str
return urllib_parse.urljoin(self.url, path)
PyPI = PackageIndex(
'https://pypi.org/', file_storage_domain='files.pythonhosted.org'
)
TestPyPI = PackageIndex(
'https://test.pypi.org/', file_storage_domain='test-files.pythonhosted.org'
)
| Django-locallibrary/env/Lib/site-packages/pip/_internal/models/index.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/models/index.py",
"repo_id": "Django-locallibrary",
"token_count": 467
} | 16 |
"""Network Authentication Helpers
Contains interface (MultiDomainBasicAuth) and associated glue code for
providing credentials in the context of network requests.
"""
import logging
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.utils import get_netrc_auth
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._internal.utils.misc import (
ask,
ask_input,
ask_password,
remove_auth_from_url,
split_auth_netloc_from_url,
)
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Dict, Optional, Tuple, List, Any
from pip._internal.vcs.versioncontrol import AuthInfo
from pip._vendor.requests.models import Response, Request
Credentials = Tuple[str, str, str]
logger = logging.getLogger(__name__)
try:
import keyring # noqa
except ImportError:
keyring = None
except Exception as exc:
logger.warning(
"Keyring is skipped due to an exception: %s", str(exc),
)
keyring = None
def get_keyring_auth(url, username):
# type: (str, str) -> Optional[AuthInfo]
"""Return the tuple auth for a given url from keyring."""
global keyring
if not url or not keyring:
return None
try:
try:
get_credential = keyring.get_credential
except AttributeError:
pass
else:
logger.debug("Getting credentials from keyring for %s", url)
cred = get_credential(url, username)
if cred is not None:
return cred.username, cred.password
return None
if username:
logger.debug("Getting password from keyring for %s", url)
password = keyring.get_password(url, username)
if password:
return username, password
except Exception as exc:
logger.warning(
"Keyring is skipped due to an exception: %s", str(exc),
)
keyring = None
return None
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True, index_urls=None):
# type: (bool, Optional[List[str]]) -> None
self.prompting = prompting
self.index_urls = index_urls
self.passwords = {} # type: Dict[str, AuthInfo]
# When the user is prompted to enter credentials and keyring is
# available, we will offer to save them. If the user accepts,
# this value is set to the credentials they entered. After the
# request authenticates, the caller should call
# ``save_credentials`` to save these.
self._credentials_to_save = None # type: Optional[Credentials]
def _get_index_url(self, url):
# type: (str) -> Optional[str]
"""Return the original index URL matching the requested URL.
Cached or dynamically generated credentials may work against
the original index URL rather than just the netloc.
The provided url should have had its username and password
removed already. If the original index url had credentials then
they will be included in the return value.
Returns None if no matching index was found, or if --no-index
was specified by the user.
"""
if not url or not self.index_urls:
return None
for u in self.index_urls:
prefix = remove_auth_from_url(u).rstrip("/") + "/"
if url.startswith(prefix):
return u
return None
def _get_new_credentials(self, original_url, allow_netrc=True,
allow_keyring=True):
# type: (str, bool, bool) -> AuthInfo
"""Find and return credentials for the specified URL."""
# Split the credentials and netloc from the url.
url, netloc, url_user_password = split_auth_netloc_from_url(
original_url,
)
# Start with the credentials embedded in the url
username, password = url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in url for %s", netloc)
return url_user_password
# Find a matching index url for this request
index_url = self._get_index_url(url)
if index_url:
# Split the credentials from the url.
index_info = split_auth_netloc_from_url(index_url)
if index_info:
index_url, _, index_url_user_password = index_info
logger.debug("Found index url %s", index_url)
# If an index URL was found, try its embedded credentials
if index_url and index_url_user_password[0] is not None:
username, password = index_url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in index url for %s", netloc)
return index_url_user_password
# Get creds from netrc if we still don't have them
if allow_netrc:
netrc_auth = get_netrc_auth(original_url)
if netrc_auth:
logger.debug("Found credentials in netrc for %s", netloc)
return netrc_auth
# If we don't have a password and keyring is available, use it.
if allow_keyring:
# The index url is more specific than the netloc, so try it first
kr_auth = (
get_keyring_auth(index_url, username) or
get_keyring_auth(netloc, username)
)
if kr_auth:
logger.debug("Found credentials in keyring for %s", netloc)
return kr_auth
return username, password
def _get_url_and_credentials(self, original_url):
# type: (str) -> Tuple[str, Optional[str], Optional[str]]
"""Return the credentials to use for the provided URL.
If allowed, netrc and keyring may be used to obtain the
correct credentials.
Returns (url_without_credentials, username, password). Note
that even if the original URL contains credentials, this
function may return a different username and password.
"""
url, netloc, _ = split_auth_netloc_from_url(original_url)
# Use any stored credentials that we have for this netloc
username, password = self.passwords.get(netloc, (None, None))
if username is None and password is None:
# No stored credentials. Acquire new credentials without prompting
# the user. (e.g. from netrc, keyring, or the URL itself)
username, password = self._get_new_credentials(original_url)
if username is not None or password is not None:
# Convert the username and password if they're None, so that
# this netloc will show up as "cached" in the conditional above.
# Further, HTTPBasicAuth doesn't accept None, so it makes sense to
# cache the value that is going to be used.
username = username or ""
password = password or ""
# Store any acquired credentials.
self.passwords[netloc] = (username, password)
assert (
# Credentials were found
(username is not None and password is not None) or
# Credentials were not found
(username is None and password is None)
), "Could not load credentials from url: {}".format(original_url)
return url, username, password
def __call__(self, req):
# type: (Request) -> Request
# Get credentials for this request
url, username, password = self._get_url_and_credentials(req.url)
# Set the url of the request to the url without any credentials
req.url = url
if username is not None and password is not None:
# Send the basic auth with this request
req = HTTPBasicAuth(username, password)(req)
# Attach a hook to handle 401 responses
req.register_hook("response", self.handle_401)
return req
# Factored out to allow for easy patching in tests
def _prompt_for_password(self, netloc):
# type: (str) -> Tuple[Optional[str], Optional[str], bool]
username = ask_input("User for {}: ".format(netloc))
if not username:
return None, None, False
auth = get_keyring_auth(netloc, username)
if auth and auth[0] is not None and auth[1] is not None:
return auth[0], auth[1], False
password = ask_password("Password: ")
return username, password, True
# Factored out to allow for easy patching in tests
def _should_save_password_to_keyring(self):
# type: () -> bool
if not keyring:
return False
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
def handle_401(self, resp, **kwargs):
# type: (Response, **Any) -> Response
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simply return the response
if not self.prompting:
return resp
parsed = urllib_parse.urlparse(resp.url)
# Prompt the user for a new username and password
username, password, save = self._prompt_for_password(parsed.netloc)
# Store the new username and password to use for future requests
self._credentials_to_save = None
if username is not None and password is not None:
self.passwords[parsed.netloc] = (username, password)
# Prompt to save the password to keyring
if save and self._should_save_password_to_keyring():
self._credentials_to_save = (parsed.netloc, username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
req.register_hook("response", self.warn_on_401)
# On successful request, save the credentials that were used to
# keyring. (Note that if the user responded "no" above, this member
# is not set and nothing will be saved.)
if self._credentials_to_save:
req.register_hook("response", self.save_credentials)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def warn_on_401(self, resp, **kwargs):
# type: (Response, **Any) -> None
"""Response callback to warn about incorrect credentials."""
if resp.status_code == 401:
logger.warning(
'401 Error, Credentials not correct for %s', resp.request.url,
)
def save_credentials(self, resp, **kwargs):
# type: (Response, **Any) -> None
"""Response callback to save credentials on success."""
assert keyring is not None, "should never reach here without keyring"
if not keyring:
return
creds = self._credentials_to_save
self._credentials_to_save = None
if creds and resp.status_code < 400:
try:
logger.info('Saving credentials to keyring')
keyring.set_password(*creds)
except Exception:
logger.exception('Failed to save credentials')
| Django-locallibrary/env/Lib/site-packages/pip/_internal/network/auth.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/network/auth.py",
"repo_id": "Django-locallibrary",
"token_count": 4707
} | 17 |
"""Prepares a distribution for installation
"""
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
import logging
import mimetypes
import os
import shutil
from pip._vendor.six import PY2
from pip._internal.distributions import (
make_distribution_for_install_requirement,
)
from pip._internal.distributions.installed import InstalledDistribution
from pip._internal.exceptions import (
DirectoryUrlHashUnsupported,
HashMismatch,
HashUnpinned,
InstallationError,
NetworkConnectionError,
PreviousBuildDirError,
VcsHashUnsupported,
)
from pip._internal.utils.filesystem import copy2_fixed
from pip._internal.utils.hashes import MissingHashes
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
display_path,
hide_url,
path_to_display,
rmtree,
)
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.unpacking import unpack_file
from pip._internal.vcs import vcs
if MYPY_CHECK_RUNNING:
from typing import (
Callable, List, Optional, Tuple,
)
from mypy_extensions import TypedDict
from pip._internal.distributions import AbstractDistribution
from pip._internal.index.package_finder import PackageFinder
from pip._internal.models.link import Link
from pip._internal.network.download import Downloader
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_tracker import RequirementTracker
from pip._internal.utils.hashes import Hashes
if PY2:
CopytreeKwargs = TypedDict(
'CopytreeKwargs',
{
'ignore': Callable[[str, List[str]], List[str]],
'symlinks': bool,
},
total=False,
)
else:
CopytreeKwargs = TypedDict(
'CopytreeKwargs',
{
'copy_function': Callable[[str, str], None],
'ignore': Callable[[str, List[str]], List[str]],
'ignore_dangling_symlinks': bool,
'symlinks': bool,
},
total=False,
)
logger = logging.getLogger(__name__)
def _get_prepared_distribution(
req, # type: InstallRequirement
req_tracker, # type: RequirementTracker
finder, # type: PackageFinder
build_isolation # type: bool
):
# type: (...) -> AbstractDistribution
"""Prepare a distribution for installation.
"""
abstract_dist = make_distribution_for_install_requirement(req)
with req_tracker.track(req):
abstract_dist.prepare_distribution_metadata(finder, build_isolation)
return abstract_dist
def unpack_vcs_link(link, location):
# type: (Link, str) -> None
vcs_backend = vcs.get_backend_for_scheme(link.scheme)
assert vcs_backend is not None
vcs_backend.unpack(location, url=hide_url(link.url))
class File(object):
def __init__(self, path, content_type):
# type: (str, str) -> None
self.path = path
self.content_type = content_type
def get_http_url(
link, # type: Link
downloader, # type: Downloader
download_dir=None, # type: Optional[str]
hashes=None, # type: Optional[Hashes]
):
# type: (...) -> File
temp_dir = TempDirectory(kind="unpack", globally_managed=True)
# If a download dir is specified, is the file already downloaded there?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(
link, download_dir, hashes
)
if already_downloaded_path:
from_path = already_downloaded_path
content_type = mimetypes.guess_type(from_path)[0]
else:
# let's download to a tmp dir
from_path, content_type = _download_http_url(
link, downloader, temp_dir.path, hashes
)
return File(from_path, content_type)
def _copy2_ignoring_special_files(src, dest):
# type: (str, str) -> None
"""Copying special files is not supported, but as a convenience to users
we skip errors copying them. This supports tools that may create e.g.
socket files in the project source directory.
"""
try:
copy2_fixed(src, dest)
except shutil.SpecialFileError as e:
# SpecialFileError may be raised due to either the source or
# destination. If the destination was the cause then we would actually
# care, but since the destination directory is deleted prior to
# copy we ignore all of them assuming it is caused by the source.
logger.warning(
"Ignoring special file error '%s' encountered copying %s to %s.",
str(e),
path_to_display(src),
path_to_display(dest),
)
def _copy_source_tree(source, target):
# type: (str, str) -> None
target_abspath = os.path.abspath(target)
target_basename = os.path.basename(target_abspath)
target_dirname = os.path.dirname(target_abspath)
def ignore(d, names):
# type: (str, List[str]) -> List[str]
skipped = [] # type: List[str]
if d == source:
# Pulling in those directories can potentially be very slow,
# exclude the following directories if they appear in the top
# level dir (and only it).
# See discussion at https://github.com/pypa/pip/pull/6770
skipped += ['.tox', '.nox']
if os.path.abspath(d) == target_dirname:
# Prevent an infinite recursion if the target is in source.
# This can happen when TMPDIR is set to ${PWD}/...
# and we copy PWD to TMPDIR.
skipped += [target_basename]
return skipped
kwargs = dict(ignore=ignore, symlinks=True) # type: CopytreeKwargs
if not PY2:
# Python 2 does not support copy_function, so we only ignore
# errors on special file copy in Python 3.
kwargs['copy_function'] = _copy2_ignoring_special_files
shutil.copytree(source, target, **kwargs)
def get_file_url(
link, # type: Link
download_dir=None, # type: Optional[str]
hashes=None # type: Optional[Hashes]
):
# type: (...) -> File
"""Get file and optionally check its hash.
"""
# If a download dir is specified, is the file already there and valid?
already_downloaded_path = None
if download_dir:
already_downloaded_path = _check_download_dir(
link, download_dir, hashes
)
if already_downloaded_path:
from_path = already_downloaded_path
else:
from_path = link.file_path
# If --require-hashes is off, `hashes` is either empty, the
# link's embedded hash, or MissingHashes; it is required to
# match. If --require-hashes is on, we are satisfied by any
# hash in `hashes` matching: a URL-based or an option-based
# one; no internet-sourced hash will be in `hashes`.
if hashes:
hashes.check_against_path(from_path)
content_type = mimetypes.guess_type(from_path)[0]
return File(from_path, content_type)
def unpack_url(
link, # type: Link
location, # type: str
downloader, # type: Downloader
download_dir=None, # type: Optional[str]
hashes=None, # type: Optional[Hashes]
):
# type: (...) -> Optional[File]
"""Unpack link into location, downloading if required.
:param hashes: A Hashes object, one of whose embedded hashes must match,
or HashMismatch will be raised. If the Hashes is empty, no matches are
required, and unhashable types of requirements (like VCS ones, which
would ordinarily raise HashUnsupported) are allowed.
"""
# non-editable vcs urls
if link.is_vcs:
unpack_vcs_link(link, location)
return None
# If it's a url to a local directory
if link.is_existing_dir():
if os.path.isdir(location):
rmtree(location)
_copy_source_tree(link.file_path, location)
return None
# file urls
if link.is_file:
file = get_file_url(link, download_dir, hashes=hashes)
# http urls
else:
file = get_http_url(
link,
downloader,
download_dir,
hashes=hashes,
)
# unpack the archive to the build dir location. even when only downloading
# archives, they have to be unpacked to parse dependencies, except wheels
if not link.is_wheel:
unpack_file(file.path, location, file.content_type)
return file
def _download_http_url(
link, # type: Link
downloader, # type: Downloader
temp_dir, # type: str
hashes, # type: Optional[Hashes]
):
# type: (...) -> Tuple[str, str]
"""Download link url into temp_dir using provided session"""
download = downloader(link)
file_path = os.path.join(temp_dir, download.filename)
with open(file_path, 'wb') as content_file:
for chunk in download.chunks:
content_file.write(chunk)
if hashes:
hashes.check_against_path(file_path)
return file_path, download.response.headers.get('content-type', '')
def _check_download_dir(link, download_dir, hashes):
# type: (Link, str, Optional[Hashes]) -> Optional[str]
""" Check download_dir for previously downloaded file with correct hash
If a correct file is found return its path else None
"""
download_path = os.path.join(download_dir, link.filename)
if not os.path.exists(download_path):
return None
# If already downloaded, does its hash match?
logger.info('File was already downloaded %s', download_path)
if hashes:
try:
hashes.check_against_path(download_path)
except HashMismatch:
logger.warning(
'Previously-downloaded file %s has bad hash. '
'Re-downloading.',
download_path
)
os.unlink(download_path)
return None
return download_path
class RequirementPreparer(object):
"""Prepares a Requirement
"""
def __init__(
self,
build_dir, # type: str
download_dir, # type: Optional[str]
src_dir, # type: str
wheel_download_dir, # type: Optional[str]
build_isolation, # type: bool
req_tracker, # type: RequirementTracker
downloader, # type: Downloader
finder, # type: PackageFinder
require_hashes, # type: bool
use_user_site, # type: bool
):
# type: (...) -> None
super(RequirementPreparer, self).__init__()
self.src_dir = src_dir
self.build_dir = build_dir
self.req_tracker = req_tracker
self.downloader = downloader
self.finder = finder
# Where still-packed archives should be written to. If None, they are
# not saved, and are deleted immediately after unpacking.
self.download_dir = download_dir
# Where still-packed .whl files should be written to. If None, they are
# written to the download_dir parameter. Separate to download_dir to
# permit only keeping wheel archives for pip wheel.
self.wheel_download_dir = wheel_download_dir
# NOTE
# download_dir and wheel_download_dir overlap semantically and may
# be combined if we're willing to have non-wheel archives present in
# the wheelhouse output by 'pip wheel'.
# Is build isolation allowed?
self.build_isolation = build_isolation
# Should hash-checking be required?
self.require_hashes = require_hashes
# Should install in user site-packages?
self.use_user_site = use_user_site
@property
def _download_should_save(self):
# type: () -> bool
if not self.download_dir:
return False
if os.path.exists(self.download_dir):
return True
logger.critical('Could not find download directory')
raise InstallationError(
"Could not find or access download directory '{}'"
.format(self.download_dir))
def _log_preparing_link(self, req):
# type: (InstallRequirement) -> None
"""Log the way the link prepared."""
if req.link.is_file:
path = req.link.file_path
logger.info('Processing %s', display_path(path))
else:
logger.info('Collecting %s', req.req or req)
def _ensure_link_req_src_dir(self, req, download_dir, parallel_builds):
# type: (InstallRequirement, Optional[str], bool) -> None
"""Ensure source_dir of a linked InstallRequirement."""
# Since source_dir is only set for editable requirements.
if req.link.is_wheel:
# We don't need to unpack wheels, so no need for a source
# directory.
return
assert req.source_dir is None
# We always delete unpacked sdists after pip runs.
req.ensure_has_source_dir(
self.build_dir,
autodelete=True,
parallel_builds=parallel_builds,
)
# If a checkout exists, it's unwise to keep going. version
# inconsistencies are logged later, but do not fail the
# installation.
# FIXME: this won't upgrade when there's an existing
# package unpacked in `req.source_dir`
if os.path.exists(os.path.join(req.source_dir, 'setup.py')):
raise PreviousBuildDirError(
"pip can't proceed with requirements '{}' due to a"
"pre-existing build directory ({}). This is likely "
"due to a previous installation that failed . pip is "
"being responsible and not assuming it can delete this. "
"Please delete it and try again.".format(req, req.source_dir)
)
def _get_linked_req_hashes(self, req):
# type: (InstallRequirement) -> Hashes
# By the time this is called, the requirement's link should have
# been checked so we can tell what kind of requirements req is
# and raise some more informative errors than otherwise.
# (For example, we can raise VcsHashUnsupported for a VCS URL
# rather than HashMissing.)
if not self.require_hashes:
return req.hashes(trust_internet=True)
# We could check these first 2 conditions inside unpack_url
# and save repetition of conditions, but then we would
# report less-useful error messages for unhashable
# requirements, complaining that there's no hash provided.
if req.link.is_vcs:
raise VcsHashUnsupported()
if req.link.is_existing_dir():
raise DirectoryUrlHashUnsupported()
# Unpinned packages are asking for trouble when a new version
# is uploaded. This isn't a security check, but it saves users
# a surprising hash mismatch in the future.
# file:/// URLs aren't pinnable, so don't complain about them
# not being pinned.
if req.original_link is None and not req.is_pinned:
raise HashUnpinned()
# If known-good hashes are missing for this requirement,
# shim it with a facade object that will provoke hash
# computation and then raise a HashMissing exception
# showing the user what the hash should be.
return req.hashes(trust_internet=False) or MissingHashes()
def prepare_linked_requirement(self, req, parallel_builds=False):
# type: (InstallRequirement, bool) -> AbstractDistribution
"""Prepare a requirement to be obtained from req.link."""
assert req.link
link = req.link
self._log_preparing_link(req)
if link.is_wheel and self.wheel_download_dir:
# Download wheels to a dedicated dir when doing `pip wheel`.
download_dir = self.wheel_download_dir
else:
download_dir = self.download_dir
with indent_log():
self._ensure_link_req_src_dir(req, download_dir, parallel_builds)
try:
local_file = unpack_url(
link, req.source_dir, self.downloader, download_dir,
hashes=self._get_linked_req_hashes(req)
)
except NetworkConnectionError as exc:
raise InstallationError(
'Could not install requirement {} because of HTTP '
'error {} for URL {}'.format(req, exc, link)
)
# For use in later processing, preserve the file path on the
# requirement.
if local_file:
req.local_file_path = local_file.path
abstract_dist = _get_prepared_distribution(
req, self.req_tracker, self.finder, self.build_isolation,
)
if download_dir:
if link.is_existing_dir():
logger.info('Link is a directory, ignoring download_dir')
elif local_file:
download_location = os.path.join(
download_dir, link.filename
)
if not os.path.exists(download_location):
shutil.copy(local_file.path, download_location)
download_path = display_path(download_location)
logger.info('Saved %s', download_path)
if self._download_should_save:
# Make a .zip of the source_dir we already created.
if link.is_vcs:
req.archive(self.download_dir)
return abstract_dist
def prepare_editable_requirement(
self,
req, # type: InstallRequirement
):
# type: (...) -> AbstractDistribution
"""Prepare an editable requirement
"""
assert req.editable, "cannot prepare a non-editable req as editable"
logger.info('Obtaining %s', req)
with indent_log():
if self.require_hashes:
raise InstallationError(
'The editable requirement {} cannot be installed when '
'requiring hashes, because there is no single file to '
'hash.'.format(req)
)
req.ensure_has_source_dir(self.src_dir)
req.update_editable(not self._download_should_save)
abstract_dist = _get_prepared_distribution(
req, self.req_tracker, self.finder, self.build_isolation,
)
if self._download_should_save:
req.archive(self.download_dir)
req.check_if_exists(self.use_user_site)
return abstract_dist
def prepare_installed_requirement(
self,
req, # type: InstallRequirement
skip_reason # type: str
):
# type: (...) -> AbstractDistribution
"""Prepare an already-installed requirement
"""
assert req.satisfied_by, "req should have been satisfied but isn't"
assert skip_reason is not None, (
"did not get skip reason skipped but req.satisfied_by "
"is set to {}".format(req.satisfied_by)
)
logger.info(
'Requirement %s: %s (%s)',
skip_reason, req, req.satisfied_by.version
)
with indent_log():
if self.require_hashes:
logger.debug(
'Since it is already installed, we are trusting this '
'package without checking its hash. To ensure a '
'completely repeatable environment, install into an '
'empty virtualenv.'
)
abstract_dist = InstalledDistribution(req)
return abstract_dist
| Django-locallibrary/env/Lib/site-packages/pip/_internal/operations/prepare.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/operations/prepare.py",
"repo_id": "Django-locallibrary",
"token_count": 8344
} | 18 |
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import FrozenSet, Iterable, Optional, Tuple
from pip._vendor.packaging.version import _BaseVersion
from pip._internal.models.link import Link
from pip._internal.req.req_install import InstallRequirement
CandidateLookup = Tuple[
Optional["Candidate"],
Optional[InstallRequirement],
]
def format_name(project, extras):
# type: (str, FrozenSet[str]) -> str
if not extras:
return project
canonical_extras = sorted(canonicalize_name(e) for e in extras)
return "{}[{}]".format(project, ",".join(canonical_extras))
class Requirement(object):
@property
def name(self):
# type: () -> str
raise NotImplementedError("Subclass should override")
def is_satisfied_by(self, candidate):
# type: (Candidate) -> bool
return False
def get_candidate_lookup(self):
# type: () -> CandidateLookup
raise NotImplementedError("Subclass should override")
def format_for_error(self):
# type: () -> str
raise NotImplementedError("Subclass should override")
class Candidate(object):
@property
def name(self):
# type: () -> str
raise NotImplementedError("Override in subclass")
@property
def version(self):
# type: () -> _BaseVersion
raise NotImplementedError("Override in subclass")
@property
def is_installed(self):
# type: () -> bool
raise NotImplementedError("Override in subclass")
@property
def is_editable(self):
# type: () -> bool
raise NotImplementedError("Override in subclass")
@property
def source_link(self):
# type: () -> Optional[Link]
raise NotImplementedError("Override in subclass")
def iter_dependencies(self, with_requires):
# type: (bool) -> Iterable[Optional[Requirement]]
raise NotImplementedError("Override in subclass")
def get_install_requirement(self):
# type: () -> Optional[InstallRequirement]
raise NotImplementedError("Override in subclass")
def format_for_error(self):
# type: () -> str
raise NotImplementedError("Subclass should override")
| Django-locallibrary/env/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/resolution/resolvelib/base.py",
"repo_id": "Django-locallibrary",
"token_count": 872
} | 19 |
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
from __future__ import absolute_import
import os
import sys
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Optional, Tuple
def glibc_version_string():
# type: () -> Optional[str]
"Returns glibc version string, or None if not using glibc."
return glibc_version_string_confstr() or glibc_version_string_ctypes()
def glibc_version_string_confstr():
# type: () -> Optional[str]
"Primary implementation of glibc_version_string using os.confstr."
# os.confstr is quite a bit faster than ctypes.DLL. It's also less likely
# to be broken or missing. This strategy is used in the standard library
# platform module:
# https://github.com/python/cpython/blob/fcf1d003bf4f0100c9d0921ff3d70e1127ca1b71/Lib/platform.py#L175-L183
if sys.platform == "win32":
return None
try:
# os.confstr("CS_GNU_LIBC_VERSION") returns a string like "glibc 2.17":
_, version = os.confstr("CS_GNU_LIBC_VERSION").split()
except (AttributeError, OSError, ValueError):
# os.confstr() or CS_GNU_LIBC_VERSION not available (or a bad value)...
return None
return version
def glibc_version_string_ctypes():
# type: () -> Optional[str]
"Fallback implementation of glibc_version_string using ctypes."
try:
import ctypes
except ImportError:
return None
# ctypes.CDLL(None) internally calls dlopen(NULL), and as the dlopen
# manpage says, "If filename is NULL, then the returned handle is for the
# main program". This way we can let the linker do the work to figure out
# which libc our process is actually using.
process_namespace = ctypes.CDLL(None)
try:
gnu_get_libc_version = process_namespace.gnu_get_libc_version
except AttributeError:
# Symbol doesn't exist -> therefore, we are not linked to
# glibc.
return None
# Call gnu_get_libc_version, which returns a string like "2.5"
gnu_get_libc_version.restype = ctypes.c_char_p
version_str = gnu_get_libc_version()
# py2 / py3 compatibility:
if not isinstance(version_str, str):
version_str = version_str.decode("ascii")
return version_str
# platform.libc_ver regularly returns completely nonsensical glibc
# versions. E.g. on my computer, platform says:
#
# ~$ python2.7 -c 'import platform; print(platform.libc_ver())'
# ('glibc', '2.7')
# ~$ python3.5 -c 'import platform; print(platform.libc_ver())'
# ('glibc', '2.9')
#
# But the truth is:
#
# ~$ ldd --version
# ldd (Debian GLIBC 2.22-11) 2.22
#
# This is unfortunate, because it means that the linehaul data on libc
# versions that was generated by pip 8.1.2 and earlier is useless and
# misleading. Solution: instead of using platform, use our code that actually
# works.
def libc_ver():
# type: () -> Tuple[str, str]
"""Try to determine the glibc version
Returns a tuple of strings (lib, version) which default to empty strings
in case the lookup fails.
"""
glibc_version = glibc_version_string()
if glibc_version is None:
return ("", "")
else:
return ("glibc", glibc_version)
| Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/glibc.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/glibc.py",
"repo_id": "Django-locallibrary",
"token_count": 1229
} | 20 |
"""Support functions for working with wheel files.
"""
from __future__ import absolute_import
import logging
from email.parser import Parser
from zipfile import ZipFile
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.pkg_resources import DistInfoDistribution
from pip._vendor.six import PY2, ensure_str
from pip._internal.exceptions import UnsupportedWheel
from pip._internal.utils.pkg_resources import DictMetadata
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from email.message import Message
from typing import Dict, Tuple
from pip._vendor.pkg_resources import Distribution
if PY2:
from zipfile import BadZipfile as BadZipFile
else:
from zipfile import BadZipFile
VERSION_COMPATIBLE = (1, 0)
logger = logging.getLogger(__name__)
class WheelMetadata(DictMetadata):
"""Metadata provider that maps metadata decoding exceptions to our
internal exception type.
"""
def __init__(self, metadata, wheel_name):
# type: (Dict[str, bytes], str) -> None
super(WheelMetadata, self).__init__(metadata)
self._wheel_name = wheel_name
def get_metadata(self, name):
# type: (str) -> str
try:
return super(WheelMetadata, self).get_metadata(name)
except UnicodeDecodeError as e:
# Augment the default error with the origin of the file.
raise UnsupportedWheel(
"Error decoding metadata for {}: {}".format(
self._wheel_name, e
)
)
def pkg_resources_distribution_for_wheel(wheel_zip, name, location):
# type: (ZipFile, str, str) -> Distribution
"""Get a pkg_resources distribution given a wheel.
:raises UnsupportedWheel: on any errors
"""
info_dir, _ = parse_wheel(wheel_zip, name)
metadata_files = [
p for p in wheel_zip.namelist() if p.startswith("{}/".format(info_dir))
]
metadata_text = {} # type: Dict[str, bytes]
for path in metadata_files:
# If a flag is set, namelist entries may be unicode in Python 2.
# We coerce them to native str type to match the types used in the rest
# of the code. This cannot fail because unicode can always be encoded
# with UTF-8.
full_path = ensure_str(path)
_, metadata_name = full_path.split("/", 1)
try:
metadata_text[metadata_name] = read_wheel_metadata_file(
wheel_zip, full_path
)
except UnsupportedWheel as e:
raise UnsupportedWheel(
"{} has an invalid wheel, {}".format(name, str(e))
)
metadata = WheelMetadata(metadata_text, location)
return DistInfoDistribution(
location=location, metadata=metadata, project_name=name
)
def parse_wheel(wheel_zip, name):
# type: (ZipFile, str) -> Tuple[str, Message]
"""Extract information from the provided wheel, ensuring it meets basic
standards.
Returns the name of the .dist-info directory and the parsed WHEEL metadata.
"""
try:
info_dir = wheel_dist_info_dir(wheel_zip, name)
metadata = wheel_metadata(wheel_zip, info_dir)
version = wheel_version(metadata)
except UnsupportedWheel as e:
raise UnsupportedWheel(
"{} has an invalid wheel, {}".format(name, str(e))
)
check_compatibility(version, name)
return info_dir, metadata
def wheel_dist_info_dir(source, name):
# type: (ZipFile, str) -> str
"""Returns the name of the contained .dist-info directory.
Raises AssertionError or UnsupportedWheel if not found, >1 found, or
it doesn't match the provided name.
"""
# Zip file path separators must be /
subdirs = set(p.split("/", 1)[0] for p in source.namelist())
info_dirs = [s for s in subdirs if s.endswith('.dist-info')]
if not info_dirs:
raise UnsupportedWheel(".dist-info directory not found")
if len(info_dirs) > 1:
raise UnsupportedWheel(
"multiple .dist-info directories found: {}".format(
", ".join(info_dirs)
)
)
info_dir = info_dirs[0]
info_dir_name = canonicalize_name(info_dir)
canonical_name = canonicalize_name(name)
if not info_dir_name.startswith(canonical_name):
raise UnsupportedWheel(
".dist-info directory {!r} does not start with {!r}".format(
info_dir, canonical_name
)
)
# Zip file paths can be unicode or str depending on the zip entry flags,
# so normalize it.
return ensure_str(info_dir)
def read_wheel_metadata_file(source, path):
# type: (ZipFile, str) -> bytes
try:
return source.read(path)
# BadZipFile for general corruption, KeyError for missing entry,
# and RuntimeError for password-protected files
except (BadZipFile, KeyError, RuntimeError) as e:
raise UnsupportedWheel(
"could not read {!r} file: {!r}".format(path, e)
)
def wheel_metadata(source, dist_info_dir):
# type: (ZipFile, str) -> Message
"""Return the WHEEL metadata of an extracted wheel, if possible.
Otherwise, raise UnsupportedWheel.
"""
path = "{}/WHEEL".format(dist_info_dir)
# Zip file path separators must be /
wheel_contents = read_wheel_metadata_file(source, path)
try:
wheel_text = ensure_str(wheel_contents)
except UnicodeDecodeError as e:
raise UnsupportedWheel("error decoding {!r}: {!r}".format(path, e))
# FeedParser (used by Parser) does not raise any exceptions. The returned
# message may have .defects populated, but for backwards-compatibility we
# currently ignore them.
return Parser().parsestr(wheel_text)
def wheel_version(wheel_data):
# type: (Message) -> Tuple[int, ...]
"""Given WHEEL metadata, return the parsed Wheel-Version.
Otherwise, raise UnsupportedWheel.
"""
version_text = wheel_data["Wheel-Version"]
if version_text is None:
raise UnsupportedWheel("WHEEL is missing Wheel-Version")
version = version_text.strip()
try:
return tuple(map(int, version.split('.')))
except ValueError:
raise UnsupportedWheel("invalid Wheel-Version: {!r}".format(version))
def check_compatibility(version, name):
# type: (Tuple[int, ...], str) -> None
"""Raises errors or warns if called with an incompatible Wheel-Version.
pip should refuse to install a Wheel-Version that's a major series
ahead of what it's compatible with (e.g 2.0 > 1.1); and warn when
installing a version only minor version ahead (e.g 1.2 > 1.1).
version: a 2-tuple representing a Wheel-Version (Major, Minor)
name: name of wheel or package to raise exception about
:raises UnsupportedWheel: when an incompatible Wheel-Version is given
"""
if version[0] > VERSION_COMPATIBLE[0]:
raise UnsupportedWheel(
"{}'s Wheel-Version ({}) is not compatible with this version "
"of pip".format(name, '.'.join(map(str, version)))
)
elif version > VERSION_COMPATIBLE:
logger.warning(
'Installing from a newer Wheel-Version (%s)',
'.'.join(map(str, version)),
)
| Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/wheel.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/wheel.py",
"repo_id": "Django-locallibrary",
"token_count": 2796
} | 21 |
import base64
import io
import json
import zlib
from pip._vendor import msgpack
from pip._vendor.requests.structures import CaseInsensitiveDict
from .compat import HTTPResponse, pickle, text_type
def _b64_decode_bytes(b):
return base64.b64decode(b.encode("ascii"))
def _b64_decode_str(s):
return _b64_decode_bytes(s).decode("utf8")
class Serializer(object):
def dumps(self, request, response, body=None):
response_headers = CaseInsensitiveDict(response.headers)
if body is None:
body = response.read(decode_content=False)
# NOTE: 99% sure this is dead code. I'm only leaving it
# here b/c I don't have a test yet to prove
# it. Basically, before using
# `cachecontrol.filewrapper.CallbackFileWrapper`,
# this made an effort to reset the file handle. The
# `CallbackFileWrapper` short circuits this code by
# setting the body as the content is consumed, the
# result being a `body` argument is *always* passed
# into cache_response, and in turn,
# `Serializer.dump`.
response._fp = io.BytesIO(body)
# NOTE: This is all a bit weird, but it's really important that on
# Python 2.x these objects are unicode and not str, even when
# they contain only ascii. The problem here is that msgpack
# understands the difference between unicode and bytes and we
# have it set to differentiate between them, however Python 2
# doesn't know the difference. Forcing these to unicode will be
# enough to have msgpack know the difference.
data = {
u"response": {
u"body": body,
u"headers": dict(
(text_type(k), text_type(v)) for k, v in response.headers.items()
),
u"status": response.status,
u"version": response.version,
u"reason": text_type(response.reason),
u"strict": response.strict,
u"decode_content": response.decode_content,
}
}
# Construct our vary headers
data[u"vary"] = {}
if u"vary" in response_headers:
varied_headers = response_headers[u"vary"].split(",")
for header in varied_headers:
header = text_type(header).strip()
header_value = request.headers.get(header, None)
if header_value is not None:
header_value = text_type(header_value)
data[u"vary"][header] = header_value
return b",".join([b"cc=4", msgpack.dumps(data, use_bin_type=True)])
def loads(self, request, data):
# Short circuit if we've been given an empty set of data
if not data:
return
# Determine what version of the serializer the data was serialized
# with
try:
ver, data = data.split(b",", 1)
except ValueError:
ver = b"cc=0"
# Make sure that our "ver" is actually a version and isn't a false
# positive from a , being in the data stream.
if ver[:3] != b"cc=":
data = ver + data
ver = b"cc=0"
# Get the version number out of the cc=N
ver = ver.split(b"=", 1)[-1].decode("ascii")
# Dispatch to the actual load method for the given version
try:
return getattr(self, "_loads_v{}".format(ver))(request, data)
except AttributeError:
# This is a version we don't have a loads function for, so we'll
# just treat it as a miss and return None
return
def prepare_response(self, request, cached):
"""Verify our vary headers match and construct a real urllib3
HTTPResponse object.
"""
# Special case the '*' Vary value as it means we cannot actually
# determine if the cached response is suitable for this request.
# This case is also handled in the controller code when creating
# a cache entry, but is left here for backwards compatibility.
if "*" in cached.get("vary", {}):
return
# Ensure that the Vary headers for the cached response match our
# request
for header, value in cached.get("vary", {}).items():
if request.headers.get(header, None) != value:
return
body_raw = cached["response"].pop("body")
headers = CaseInsensitiveDict(data=cached["response"]["headers"])
if headers.get("transfer-encoding", "") == "chunked":
headers.pop("transfer-encoding")
cached["response"]["headers"] = headers
try:
body = io.BytesIO(body_raw)
except TypeError:
# This can happen if cachecontrol serialized to v1 format (pickle)
# using Python 2. A Python 2 str(byte string) will be unpickled as
# a Python 3 str (unicode string), which will cause the above to
# fail with:
#
# TypeError: 'str' does not support the buffer interface
body = io.BytesIO(body_raw.encode("utf8"))
return HTTPResponse(body=body, preload_content=False, **cached["response"])
def _loads_v0(self, request, data):
# The original legacy cache data. This doesn't contain enough
# information to construct everything we need, so we'll treat this as
# a miss.
return
def _loads_v1(self, request, data):
try:
cached = pickle.loads(data)
except ValueError:
return
return self.prepare_response(request, cached)
def _loads_v2(self, request, data):
try:
cached = json.loads(zlib.decompress(data).decode("utf8"))
except (ValueError, zlib.error):
return
# We need to decode the items that we've base64 encoded
cached["response"]["body"] = _b64_decode_bytes(cached["response"]["body"])
cached["response"]["headers"] = dict(
(_b64_decode_str(k), _b64_decode_str(v))
for k, v in cached["response"]["headers"].items()
)
cached["response"]["reason"] = _b64_decode_str(cached["response"]["reason"])
cached["vary"] = dict(
(_b64_decode_str(k), _b64_decode_str(v) if v is not None else v)
for k, v in cached["vary"].items()
)
return self.prepare_response(request, cached)
def _loads_v3(self, request, data):
# Due to Python 2 encoding issues, it's impossible to know for sure
# exactly how to load v3 entries, thus we'll treat these as a miss so
# that they get rewritten out as v4 entries.
return
def _loads_v4(self, request, data):
try:
cached = msgpack.loads(data, raw=False)
except ValueError:
return
return self.prepare_response(request, cached)
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/cachecontrol/serialize.py",
"repo_id": "Django-locallibrary",
"token_count": 3127
} | 22 |
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .enums import ProbingState, MachineState
from .mbcharsetprober import MultiByteCharSetProber
from .codingstatemachine import CodingStateMachine
from .chardistribution import EUCJPDistributionAnalysis
from .jpcntx import EUCJPContextAnalysis
from .mbcssm import EUCJP_SM_MODEL
class EUCJPProber(MultiByteCharSetProber):
def __init__(self):
super(EUCJPProber, self).__init__()
self.coding_sm = CodingStateMachine(EUCJP_SM_MODEL)
self.distribution_analyzer = EUCJPDistributionAnalysis()
self.context_analyzer = EUCJPContextAnalysis()
self.reset()
def reset(self):
super(EUCJPProber, self).reset()
self.context_analyzer.reset()
@property
def charset_name(self):
return "EUC-JP"
@property
def language(self):
return "Japanese"
def feed(self, byte_str):
for i in range(len(byte_str)):
# PY3K: byte_str is a byte array, so byte_str[i] is an int, not a byte
coding_state = self.coding_sm.next_state(byte_str[i])
if coding_state == MachineState.ERROR:
self.logger.debug('%s %s prober hit error at byte %s',
self.charset_name, self.language, i)
self._state = ProbingState.NOT_ME
break
elif coding_state == MachineState.ITS_ME:
self._state = ProbingState.FOUND_IT
break
elif coding_state == MachineState.START:
char_len = self.coding_sm.get_current_charlen()
if i == 0:
self._last_char[1] = byte_str[0]
self.context_analyzer.feed(self._last_char, char_len)
self.distribution_analyzer.feed(self._last_char, char_len)
else:
self.context_analyzer.feed(byte_str[i - 1:i + 1],
char_len)
self.distribution_analyzer.feed(byte_str[i - 1:i + 1],
char_len)
self._last_char[0] = byte_str[-1]
if self.state == ProbingState.DETECTING:
if (self.context_analyzer.got_enough_data() and
(self.get_confidence() > self.SHORTCUT_THRESHOLD)):
self._state = ProbingState.FOUND_IT
return self.state
def get_confidence(self):
context_conf = self.context_analyzer.get_confidence()
distrib_conf = self.distribution_analyzer.get_confidence()
return max(context_conf, distrib_conf)
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/eucjpprober.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/eucjpprober.py",
"repo_id": "Django-locallibrary",
"token_count": 1571
} | 23 |
#-------------------------------------------------------------------
# tarfile.py
#-------------------------------------------------------------------
# Copyright (C) 2002 Lars Gustaebel <[email protected]>
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation
# files (the "Software"), to deal in the Software without
# restriction, including without limitation the rights to use,
# copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following
# conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
# OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
# HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
# OTHER DEALINGS IN THE SOFTWARE.
#
from __future__ import print_function
"""Read from and write to tar format archives.
"""
__version__ = "$Revision$"
version = "0.9.0"
__author__ = "Lars Gust\u00e4bel ([email protected])"
__date__ = "$Date: 2011-02-25 17:42:01 +0200 (Fri, 25 Feb 2011) $"
__cvsid__ = "$Id: tarfile.py 88586 2011-02-25 15:42:01Z marc-andre.lemburg $"
__credits__ = "Gustavo Niemeyer, Niels Gust\u00e4bel, Richard Townsend."
#---------
# Imports
#---------
import sys
import os
import stat
import errno
import time
import struct
import copy
import re
try:
import grp, pwd
except ImportError:
grp = pwd = None
# os.symlink on Windows prior to 6.0 raises NotImplementedError
symlink_exception = (AttributeError, NotImplementedError)
try:
# WindowsError (1314) will be raised if the caller does not hold the
# SeCreateSymbolicLinkPrivilege privilege
symlink_exception += (WindowsError,)
except NameError:
pass
# from tarfile import *
__all__ = ["TarFile", "TarInfo", "is_tarfile", "TarError"]
if sys.version_info[0] < 3:
import __builtin__ as builtins
else:
import builtins
_open = builtins.open # Since 'open' is TarFile.open
#---------------------------------------------------------
# tar constants
#---------------------------------------------------------
NUL = b"\0" # the null character
BLOCKSIZE = 512 # length of processing blocks
RECORDSIZE = BLOCKSIZE * 20 # length of records
GNU_MAGIC = b"ustar \0" # magic gnu tar string
POSIX_MAGIC = b"ustar\x0000" # magic posix tar string
LENGTH_NAME = 100 # maximum length of a filename
LENGTH_LINK = 100 # maximum length of a linkname
LENGTH_PREFIX = 155 # maximum length of the prefix field
REGTYPE = b"0" # regular file
AREGTYPE = b"\0" # regular file
LNKTYPE = b"1" # link (inside tarfile)
SYMTYPE = b"2" # symbolic link
CHRTYPE = b"3" # character special device
BLKTYPE = b"4" # block special device
DIRTYPE = b"5" # directory
FIFOTYPE = b"6" # fifo special device
CONTTYPE = b"7" # contiguous file
GNUTYPE_LONGNAME = b"L" # GNU tar longname
GNUTYPE_LONGLINK = b"K" # GNU tar longlink
GNUTYPE_SPARSE = b"S" # GNU tar sparse file
XHDTYPE = b"x" # POSIX.1-2001 extended header
XGLTYPE = b"g" # POSIX.1-2001 global header
SOLARIS_XHDTYPE = b"X" # Solaris extended header
USTAR_FORMAT = 0 # POSIX.1-1988 (ustar) format
GNU_FORMAT = 1 # GNU tar format
PAX_FORMAT = 2 # POSIX.1-2001 (pax) format
DEFAULT_FORMAT = GNU_FORMAT
#---------------------------------------------------------
# tarfile constants
#---------------------------------------------------------
# File types that tarfile supports:
SUPPORTED_TYPES = (REGTYPE, AREGTYPE, LNKTYPE,
SYMTYPE, DIRTYPE, FIFOTYPE,
CONTTYPE, CHRTYPE, BLKTYPE,
GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
GNUTYPE_SPARSE)
# File types that will be treated as a regular file.
REGULAR_TYPES = (REGTYPE, AREGTYPE,
CONTTYPE, GNUTYPE_SPARSE)
# File types that are part of the GNU tar format.
GNU_TYPES = (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK,
GNUTYPE_SPARSE)
# Fields from a pax header that override a TarInfo attribute.
PAX_FIELDS = ("path", "linkpath", "size", "mtime",
"uid", "gid", "uname", "gname")
# Fields from a pax header that are affected by hdrcharset.
PAX_NAME_FIELDS = set(("path", "linkpath", "uname", "gname"))
# Fields in a pax header that are numbers, all other fields
# are treated as strings.
PAX_NUMBER_FIELDS = {
"atime": float,
"ctime": float,
"mtime": float,
"uid": int,
"gid": int,
"size": int
}
#---------------------------------------------------------
# Bits used in the mode field, values in octal.
#---------------------------------------------------------
S_IFLNK = 0o120000 # symbolic link
S_IFREG = 0o100000 # regular file
S_IFBLK = 0o060000 # block device
S_IFDIR = 0o040000 # directory
S_IFCHR = 0o020000 # character device
S_IFIFO = 0o010000 # fifo
TSUID = 0o4000 # set UID on execution
TSGID = 0o2000 # set GID on execution
TSVTX = 0o1000 # reserved
TUREAD = 0o400 # read by owner
TUWRITE = 0o200 # write by owner
TUEXEC = 0o100 # execute/search by owner
TGREAD = 0o040 # read by group
TGWRITE = 0o020 # write by group
TGEXEC = 0o010 # execute/search by group
TOREAD = 0o004 # read by other
TOWRITE = 0o002 # write by other
TOEXEC = 0o001 # execute/search by other
#---------------------------------------------------------
# initialization
#---------------------------------------------------------
if os.name in ("nt", "ce"):
ENCODING = "utf-8"
else:
ENCODING = sys.getfilesystemencoding()
#---------------------------------------------------------
# Some useful functions
#---------------------------------------------------------
def stn(s, length, encoding, errors):
"""Convert a string to a null-terminated bytes object.
"""
s = s.encode(encoding, errors)
return s[:length] + (length - len(s)) * NUL
def nts(s, encoding, errors):
"""Convert a null-terminated bytes object to a string.
"""
p = s.find(b"\0")
if p != -1:
s = s[:p]
return s.decode(encoding, errors)
def nti(s):
"""Convert a number field to a python number.
"""
# There are two possible encodings for a number field, see
# itn() below.
if s[0] != chr(0o200):
try:
n = int(nts(s, "ascii", "strict") or "0", 8)
except ValueError:
raise InvalidHeaderError("invalid header")
else:
n = 0
for i in range(len(s) - 1):
n <<= 8
n += ord(s[i + 1])
return n
def itn(n, digits=8, format=DEFAULT_FORMAT):
"""Convert a python number to a number field.
"""
# POSIX 1003.1-1988 requires numbers to be encoded as a string of
# octal digits followed by a null-byte, this allows values up to
# (8**(digits-1))-1. GNU tar allows storing numbers greater than
# that if necessary. A leading 0o200 byte indicates this particular
# encoding, the following digits-1 bytes are a big-endian
# representation. This allows values up to (256**(digits-1))-1.
if 0 <= n < 8 ** (digits - 1):
s = ("%0*o" % (digits - 1, n)).encode("ascii") + NUL
else:
if format != GNU_FORMAT or n >= 256 ** (digits - 1):
raise ValueError("overflow in number field")
if n < 0:
# XXX We mimic GNU tar's behaviour with negative numbers,
# this could raise OverflowError.
n = struct.unpack("L", struct.pack("l", n))[0]
s = bytearray()
for i in range(digits - 1):
s.insert(0, n & 0o377)
n >>= 8
s.insert(0, 0o200)
return s
def calc_chksums(buf):
"""Calculate the checksum for a member's header by summing up all
characters except for the chksum field which is treated as if
it was filled with spaces. According to the GNU tar sources,
some tars (Sun and NeXT) calculate chksum with signed char,
which will be different if there are chars in the buffer with
the high bit set. So we calculate two checksums, unsigned and
signed.
"""
unsigned_chksum = 256 + sum(struct.unpack("148B", buf[:148]) + struct.unpack("356B", buf[156:512]))
signed_chksum = 256 + sum(struct.unpack("148b", buf[:148]) + struct.unpack("356b", buf[156:512]))
return unsigned_chksum, signed_chksum
def copyfileobj(src, dst, length=None):
"""Copy length bytes from fileobj src to fileobj dst.
If length is None, copy the entire content.
"""
if length == 0:
return
if length is None:
while True:
buf = src.read(16*1024)
if not buf:
break
dst.write(buf)
return
BUFSIZE = 16 * 1024
blocks, remainder = divmod(length, BUFSIZE)
for b in range(blocks):
buf = src.read(BUFSIZE)
if len(buf) < BUFSIZE:
raise IOError("end of file reached")
dst.write(buf)
if remainder != 0:
buf = src.read(remainder)
if len(buf) < remainder:
raise IOError("end of file reached")
dst.write(buf)
return
filemode_table = (
((S_IFLNK, "l"),
(S_IFREG, "-"),
(S_IFBLK, "b"),
(S_IFDIR, "d"),
(S_IFCHR, "c"),
(S_IFIFO, "p")),
((TUREAD, "r"),),
((TUWRITE, "w"),),
((TUEXEC|TSUID, "s"),
(TSUID, "S"),
(TUEXEC, "x")),
((TGREAD, "r"),),
((TGWRITE, "w"),),
((TGEXEC|TSGID, "s"),
(TSGID, "S"),
(TGEXEC, "x")),
((TOREAD, "r"),),
((TOWRITE, "w"),),
((TOEXEC|TSVTX, "t"),
(TSVTX, "T"),
(TOEXEC, "x"))
)
def filemode(mode):
"""Convert a file's mode to a string of the form
-rwxrwxrwx.
Used by TarFile.list()
"""
perm = []
for table in filemode_table:
for bit, char in table:
if mode & bit == bit:
perm.append(char)
break
else:
perm.append("-")
return "".join(perm)
class TarError(Exception):
"""Base exception."""
pass
class ExtractError(TarError):
"""General exception for extract errors."""
pass
class ReadError(TarError):
"""Exception for unreadable tar archives."""
pass
class CompressionError(TarError):
"""Exception for unavailable compression methods."""
pass
class StreamError(TarError):
"""Exception for unsupported operations on stream-like TarFiles."""
pass
class HeaderError(TarError):
"""Base exception for header errors."""
pass
class EmptyHeaderError(HeaderError):
"""Exception for empty headers."""
pass
class TruncatedHeaderError(HeaderError):
"""Exception for truncated headers."""
pass
class EOFHeaderError(HeaderError):
"""Exception for end of file headers."""
pass
class InvalidHeaderError(HeaderError):
"""Exception for invalid headers."""
pass
class SubsequentHeaderError(HeaderError):
"""Exception for missing and invalid extended headers."""
pass
#---------------------------
# internal stream interface
#---------------------------
class _LowLevelFile(object):
"""Low-level file object. Supports reading and writing.
It is used instead of a regular file object for streaming
access.
"""
def __init__(self, name, mode):
mode = {
"r": os.O_RDONLY,
"w": os.O_WRONLY | os.O_CREAT | os.O_TRUNC,
}[mode]
if hasattr(os, "O_BINARY"):
mode |= os.O_BINARY
self.fd = os.open(name, mode, 0o666)
def close(self):
os.close(self.fd)
def read(self, size):
return os.read(self.fd, size)
def write(self, s):
os.write(self.fd, s)
class _Stream(object):
"""Class that serves as an adapter between TarFile and
a stream-like object. The stream-like object only
needs to have a read() or write() method and is accessed
blockwise. Use of gzip or bzip2 compression is possible.
A stream-like object could be for example: sys.stdin,
sys.stdout, a socket, a tape device etc.
_Stream is intended to be used only internally.
"""
def __init__(self, name, mode, comptype, fileobj, bufsize):
"""Construct a _Stream object.
"""
self._extfileobj = True
if fileobj is None:
fileobj = _LowLevelFile(name, mode)
self._extfileobj = False
if comptype == '*':
# Enable transparent compression detection for the
# stream interface
fileobj = _StreamProxy(fileobj)
comptype = fileobj.getcomptype()
self.name = name or ""
self.mode = mode
self.comptype = comptype
self.fileobj = fileobj
self.bufsize = bufsize
self.buf = b""
self.pos = 0
self.closed = False
try:
if comptype == "gz":
try:
import zlib
except ImportError:
raise CompressionError("zlib module is not available")
self.zlib = zlib
self.crc = zlib.crc32(b"")
if mode == "r":
self._init_read_gz()
else:
self._init_write_gz()
if comptype == "bz2":
try:
import bz2
except ImportError:
raise CompressionError("bz2 module is not available")
if mode == "r":
self.dbuf = b""
self.cmp = bz2.BZ2Decompressor()
else:
self.cmp = bz2.BZ2Compressor()
except:
if not self._extfileobj:
self.fileobj.close()
self.closed = True
raise
def __del__(self):
if hasattr(self, "closed") and not self.closed:
self.close()
def _init_write_gz(self):
"""Initialize for writing with gzip compression.
"""
self.cmp = self.zlib.compressobj(9, self.zlib.DEFLATED,
-self.zlib.MAX_WBITS,
self.zlib.DEF_MEM_LEVEL,
0)
timestamp = struct.pack("<L", int(time.time()))
self.__write(b"\037\213\010\010" + timestamp + b"\002\377")
if self.name.endswith(".gz"):
self.name = self.name[:-3]
# RFC1952 says we must use ISO-8859-1 for the FNAME field.
self.__write(self.name.encode("iso-8859-1", "replace") + NUL)
def write(self, s):
"""Write string s to the stream.
"""
if self.comptype == "gz":
self.crc = self.zlib.crc32(s, self.crc)
self.pos += len(s)
if self.comptype != "tar":
s = self.cmp.compress(s)
self.__write(s)
def __write(self, s):
"""Write string s to the stream if a whole new block
is ready to be written.
"""
self.buf += s
while len(self.buf) > self.bufsize:
self.fileobj.write(self.buf[:self.bufsize])
self.buf = self.buf[self.bufsize:]
def close(self):
"""Close the _Stream object. No operation should be
done on it afterwards.
"""
if self.closed:
return
if self.mode == "w" and self.comptype != "tar":
self.buf += self.cmp.flush()
if self.mode == "w" and self.buf:
self.fileobj.write(self.buf)
self.buf = b""
if self.comptype == "gz":
# The native zlib crc is an unsigned 32-bit integer, but
# the Python wrapper implicitly casts that to a signed C
# long. So, on a 32-bit box self.crc may "look negative",
# while the same crc on a 64-bit box may "look positive".
# To avoid irksome warnings from the `struct` module, force
# it to look positive on all boxes.
self.fileobj.write(struct.pack("<L", self.crc & 0xffffffff))
self.fileobj.write(struct.pack("<L", self.pos & 0xffffFFFF))
if not self._extfileobj:
self.fileobj.close()
self.closed = True
def _init_read_gz(self):
"""Initialize for reading a gzip compressed fileobj.
"""
self.cmp = self.zlib.decompressobj(-self.zlib.MAX_WBITS)
self.dbuf = b""
# taken from gzip.GzipFile with some alterations
if self.__read(2) != b"\037\213":
raise ReadError("not a gzip file")
if self.__read(1) != b"\010":
raise CompressionError("unsupported compression method")
flag = ord(self.__read(1))
self.__read(6)
if flag & 4:
xlen = ord(self.__read(1)) + 256 * ord(self.__read(1))
self.read(xlen)
if flag & 8:
while True:
s = self.__read(1)
if not s or s == NUL:
break
if flag & 16:
while True:
s = self.__read(1)
if not s or s == NUL:
break
if flag & 2:
self.__read(2)
def tell(self):
"""Return the stream's file pointer position.
"""
return self.pos
def seek(self, pos=0):
"""Set the stream's file pointer to pos. Negative seeking
is forbidden.
"""
if pos - self.pos >= 0:
blocks, remainder = divmod(pos - self.pos, self.bufsize)
for i in range(blocks):
self.read(self.bufsize)
self.read(remainder)
else:
raise StreamError("seeking backwards is not allowed")
return self.pos
def read(self, size=None):
"""Return the next size number of bytes from the stream.
If size is not defined, return all bytes of the stream
up to EOF.
"""
if size is None:
t = []
while True:
buf = self._read(self.bufsize)
if not buf:
break
t.append(buf)
buf = "".join(t)
else:
buf = self._read(size)
self.pos += len(buf)
return buf
def _read(self, size):
"""Return size bytes from the stream.
"""
if self.comptype == "tar":
return self.__read(size)
c = len(self.dbuf)
while c < size:
buf = self.__read(self.bufsize)
if not buf:
break
try:
buf = self.cmp.decompress(buf)
except IOError:
raise ReadError("invalid compressed data")
self.dbuf += buf
c += len(buf)
buf = self.dbuf[:size]
self.dbuf = self.dbuf[size:]
return buf
def __read(self, size):
"""Return size bytes from stream. If internal buffer is empty,
read another block from the stream.
"""
c = len(self.buf)
while c < size:
buf = self.fileobj.read(self.bufsize)
if not buf:
break
self.buf += buf
c += len(buf)
buf = self.buf[:size]
self.buf = self.buf[size:]
return buf
# class _Stream
class _StreamProxy(object):
"""Small proxy class that enables transparent compression
detection for the Stream interface (mode 'r|*').
"""
def __init__(self, fileobj):
self.fileobj = fileobj
self.buf = self.fileobj.read(BLOCKSIZE)
def read(self, size):
self.read = self.fileobj.read
return self.buf
def getcomptype(self):
if self.buf.startswith(b"\037\213\010"):
return "gz"
if self.buf.startswith(b"BZh91"):
return "bz2"
return "tar"
def close(self):
self.fileobj.close()
# class StreamProxy
class _BZ2Proxy(object):
"""Small proxy class that enables external file object
support for "r:bz2" and "w:bz2" modes. This is actually
a workaround for a limitation in bz2 module's BZ2File
class which (unlike gzip.GzipFile) has no support for
a file object argument.
"""
blocksize = 16 * 1024
def __init__(self, fileobj, mode):
self.fileobj = fileobj
self.mode = mode
self.name = getattr(self.fileobj, "name", None)
self.init()
def init(self):
import bz2
self.pos = 0
if self.mode == "r":
self.bz2obj = bz2.BZ2Decompressor()
self.fileobj.seek(0)
self.buf = b""
else:
self.bz2obj = bz2.BZ2Compressor()
def read(self, size):
x = len(self.buf)
while x < size:
raw = self.fileobj.read(self.blocksize)
if not raw:
break
data = self.bz2obj.decompress(raw)
self.buf += data
x += len(data)
buf = self.buf[:size]
self.buf = self.buf[size:]
self.pos += len(buf)
return buf
def seek(self, pos):
if pos < self.pos:
self.init()
self.read(pos - self.pos)
def tell(self):
return self.pos
def write(self, data):
self.pos += len(data)
raw = self.bz2obj.compress(data)
self.fileobj.write(raw)
def close(self):
if self.mode == "w":
raw = self.bz2obj.flush()
self.fileobj.write(raw)
# class _BZ2Proxy
#------------------------
# Extraction file object
#------------------------
class _FileInFile(object):
"""A thin wrapper around an existing file object that
provides a part of its data as an individual file
object.
"""
def __init__(self, fileobj, offset, size, blockinfo=None):
self.fileobj = fileobj
self.offset = offset
self.size = size
self.position = 0
if blockinfo is None:
blockinfo = [(0, size)]
# Construct a map with data and zero blocks.
self.map_index = 0
self.map = []
lastpos = 0
realpos = self.offset
for offset, size in blockinfo:
if offset > lastpos:
self.map.append((False, lastpos, offset, None))
self.map.append((True, offset, offset + size, realpos))
realpos += size
lastpos = offset + size
if lastpos < self.size:
self.map.append((False, lastpos, self.size, None))
def seekable(self):
if not hasattr(self.fileobj, "seekable"):
# XXX gzip.GzipFile and bz2.BZ2File
return True
return self.fileobj.seekable()
def tell(self):
"""Return the current file position.
"""
return self.position
def seek(self, position):
"""Seek to a position in the file.
"""
self.position = position
def read(self, size=None):
"""Read data from the file.
"""
if size is None:
size = self.size - self.position
else:
size = min(size, self.size - self.position)
buf = b""
while size > 0:
while True:
data, start, stop, offset = self.map[self.map_index]
if start <= self.position < stop:
break
else:
self.map_index += 1
if self.map_index == len(self.map):
self.map_index = 0
length = min(size, stop - self.position)
if data:
self.fileobj.seek(offset + (self.position - start))
buf += self.fileobj.read(length)
else:
buf += NUL * length
size -= length
self.position += length
return buf
#class _FileInFile
class ExFileObject(object):
"""File-like object for reading an archive member.
Is returned by TarFile.extractfile().
"""
blocksize = 1024
def __init__(self, tarfile, tarinfo):
self.fileobj = _FileInFile(tarfile.fileobj,
tarinfo.offset_data,
tarinfo.size,
tarinfo.sparse)
self.name = tarinfo.name
self.mode = "r"
self.closed = False
self.size = tarinfo.size
self.position = 0
self.buffer = b""
def readable(self):
return True
def writable(self):
return False
def seekable(self):
return self.fileobj.seekable()
def read(self, size=None):
"""Read at most size bytes from the file. If size is not
present or None, read all data until EOF is reached.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
buf = b""
if self.buffer:
if size is None:
buf = self.buffer
self.buffer = b""
else:
buf = self.buffer[:size]
self.buffer = self.buffer[size:]
if size is None:
buf += self.fileobj.read()
else:
buf += self.fileobj.read(size - len(buf))
self.position += len(buf)
return buf
# XXX TextIOWrapper uses the read1() method.
read1 = read
def readline(self, size=-1):
"""Read one entire line from the file. If size is present
and non-negative, return a string with at most that
size, which may be an incomplete line.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
pos = self.buffer.find(b"\n") + 1
if pos == 0:
# no newline found.
while True:
buf = self.fileobj.read(self.blocksize)
self.buffer += buf
if not buf or b"\n" in buf:
pos = self.buffer.find(b"\n") + 1
if pos == 0:
# no newline found.
pos = len(self.buffer)
break
if size != -1:
pos = min(size, pos)
buf = self.buffer[:pos]
self.buffer = self.buffer[pos:]
self.position += len(buf)
return buf
def readlines(self):
"""Return a list with all remaining lines.
"""
result = []
while True:
line = self.readline()
if not line: break
result.append(line)
return result
def tell(self):
"""Return the current file position.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
return self.position
def seek(self, pos, whence=os.SEEK_SET):
"""Seek to a position in the file.
"""
if self.closed:
raise ValueError("I/O operation on closed file")
if whence == os.SEEK_SET:
self.position = min(max(pos, 0), self.size)
elif whence == os.SEEK_CUR:
if pos < 0:
self.position = max(self.position + pos, 0)
else:
self.position = min(self.position + pos, self.size)
elif whence == os.SEEK_END:
self.position = max(min(self.size + pos, self.size), 0)
else:
raise ValueError("Invalid argument")
self.buffer = b""
self.fileobj.seek(self.position)
def close(self):
"""Close the file object.
"""
self.closed = True
def __iter__(self):
"""Get an iterator over the file's lines.
"""
while True:
line = self.readline()
if not line:
break
yield line
#class ExFileObject
#------------------
# Exported Classes
#------------------
class TarInfo(object):
"""Informational class which holds the details about an
archive member given by a tar header block.
TarInfo objects are returned by TarFile.getmember(),
TarFile.getmembers() and TarFile.gettarinfo() and are
usually created internally.
"""
__slots__ = ("name", "mode", "uid", "gid", "size", "mtime",
"chksum", "type", "linkname", "uname", "gname",
"devmajor", "devminor",
"offset", "offset_data", "pax_headers", "sparse",
"tarfile", "_sparse_structs", "_link_target")
def __init__(self, name=""):
"""Construct a TarInfo object. name is the optional name
of the member.
"""
self.name = name # member name
self.mode = 0o644 # file permissions
self.uid = 0 # user id
self.gid = 0 # group id
self.size = 0 # file size
self.mtime = 0 # modification time
self.chksum = 0 # header checksum
self.type = REGTYPE # member type
self.linkname = "" # link name
self.uname = "" # user name
self.gname = "" # group name
self.devmajor = 0 # device major number
self.devminor = 0 # device minor number
self.offset = 0 # the tar header starts here
self.offset_data = 0 # the file's data starts here
self.sparse = None # sparse member information
self.pax_headers = {} # pax header information
# In pax headers the "name" and "linkname" field are called
# "path" and "linkpath".
def _getpath(self):
return self.name
def _setpath(self, name):
self.name = name
path = property(_getpath, _setpath)
def _getlinkpath(self):
return self.linkname
def _setlinkpath(self, linkname):
self.linkname = linkname
linkpath = property(_getlinkpath, _setlinkpath)
def __repr__(self):
return "<%s %r at %#x>" % (self.__class__.__name__,self.name,id(self))
def get_info(self):
"""Return the TarInfo's attributes as a dictionary.
"""
info = {
"name": self.name,
"mode": self.mode & 0o7777,
"uid": self.uid,
"gid": self.gid,
"size": self.size,
"mtime": self.mtime,
"chksum": self.chksum,
"type": self.type,
"linkname": self.linkname,
"uname": self.uname,
"gname": self.gname,
"devmajor": self.devmajor,
"devminor": self.devminor
}
if info["type"] == DIRTYPE and not info["name"].endswith("/"):
info["name"] += "/"
return info
def tobuf(self, format=DEFAULT_FORMAT, encoding=ENCODING, errors="surrogateescape"):
"""Return a tar header as a string of 512 byte blocks.
"""
info = self.get_info()
if format == USTAR_FORMAT:
return self.create_ustar_header(info, encoding, errors)
elif format == GNU_FORMAT:
return self.create_gnu_header(info, encoding, errors)
elif format == PAX_FORMAT:
return self.create_pax_header(info, encoding)
else:
raise ValueError("invalid format")
def create_ustar_header(self, info, encoding, errors):
"""Return the object as a ustar header block.
"""
info["magic"] = POSIX_MAGIC
if len(info["linkname"]) > LENGTH_LINK:
raise ValueError("linkname is too long")
if len(info["name"]) > LENGTH_NAME:
info["prefix"], info["name"] = self._posix_split_name(info["name"])
return self._create_header(info, USTAR_FORMAT, encoding, errors)
def create_gnu_header(self, info, encoding, errors):
"""Return the object as a GNU header block sequence.
"""
info["magic"] = GNU_MAGIC
buf = b""
if len(info["linkname"]) > LENGTH_LINK:
buf += self._create_gnu_long_header(info["linkname"], GNUTYPE_LONGLINK, encoding, errors)
if len(info["name"]) > LENGTH_NAME:
buf += self._create_gnu_long_header(info["name"], GNUTYPE_LONGNAME, encoding, errors)
return buf + self._create_header(info, GNU_FORMAT, encoding, errors)
def create_pax_header(self, info, encoding):
"""Return the object as a ustar header block. If it cannot be
represented this way, prepend a pax extended header sequence
with supplement information.
"""
info["magic"] = POSIX_MAGIC
pax_headers = self.pax_headers.copy()
# Test string fields for values that exceed the field length or cannot
# be represented in ASCII encoding.
for name, hname, length in (
("name", "path", LENGTH_NAME), ("linkname", "linkpath", LENGTH_LINK),
("uname", "uname", 32), ("gname", "gname", 32)):
if hname in pax_headers:
# The pax header has priority.
continue
# Try to encode the string as ASCII.
try:
info[name].encode("ascii", "strict")
except UnicodeEncodeError:
pax_headers[hname] = info[name]
continue
if len(info[name]) > length:
pax_headers[hname] = info[name]
# Test number fields for values that exceed the field limit or values
# that like to be stored as float.
for name, digits in (("uid", 8), ("gid", 8), ("size", 12), ("mtime", 12)):
if name in pax_headers:
# The pax header has priority. Avoid overflow.
info[name] = 0
continue
val = info[name]
if not 0 <= val < 8 ** (digits - 1) or isinstance(val, float):
pax_headers[name] = str(val)
info[name] = 0
# Create a pax extended header if necessary.
if pax_headers:
buf = self._create_pax_generic_header(pax_headers, XHDTYPE, encoding)
else:
buf = b""
return buf + self._create_header(info, USTAR_FORMAT, "ascii", "replace")
@classmethod
def create_pax_global_header(cls, pax_headers):
"""Return the object as a pax global header block sequence.
"""
return cls._create_pax_generic_header(pax_headers, XGLTYPE, "utf8")
def _posix_split_name(self, name):
"""Split a name longer than 100 chars into a prefix
and a name part.
"""
prefix = name[:LENGTH_PREFIX + 1]
while prefix and prefix[-1] != "/":
prefix = prefix[:-1]
name = name[len(prefix):]
prefix = prefix[:-1]
if not prefix or len(name) > LENGTH_NAME:
raise ValueError("name is too long")
return prefix, name
@staticmethod
def _create_header(info, format, encoding, errors):
"""Return a header block. info is a dictionary with file
information, format must be one of the *_FORMAT constants.
"""
parts = [
stn(info.get("name", ""), 100, encoding, errors),
itn(info.get("mode", 0) & 0o7777, 8, format),
itn(info.get("uid", 0), 8, format),
itn(info.get("gid", 0), 8, format),
itn(info.get("size", 0), 12, format),
itn(info.get("mtime", 0), 12, format),
b" ", # checksum field
info.get("type", REGTYPE),
stn(info.get("linkname", ""), 100, encoding, errors),
info.get("magic", POSIX_MAGIC),
stn(info.get("uname", ""), 32, encoding, errors),
stn(info.get("gname", ""), 32, encoding, errors),
itn(info.get("devmajor", 0), 8, format),
itn(info.get("devminor", 0), 8, format),
stn(info.get("prefix", ""), 155, encoding, errors)
]
buf = struct.pack("%ds" % BLOCKSIZE, b"".join(parts))
chksum = calc_chksums(buf[-BLOCKSIZE:])[0]
buf = buf[:-364] + ("%06o\0" % chksum).encode("ascii") + buf[-357:]
return buf
@staticmethod
def _create_payload(payload):
"""Return the string payload filled with zero bytes
up to the next 512 byte border.
"""
blocks, remainder = divmod(len(payload), BLOCKSIZE)
if remainder > 0:
payload += (BLOCKSIZE - remainder) * NUL
return payload
@classmethod
def _create_gnu_long_header(cls, name, type, encoding, errors):
"""Return a GNUTYPE_LONGNAME or GNUTYPE_LONGLINK sequence
for name.
"""
name = name.encode(encoding, errors) + NUL
info = {}
info["name"] = "././@LongLink"
info["type"] = type
info["size"] = len(name)
info["magic"] = GNU_MAGIC
# create extended header + name blocks.
return cls._create_header(info, USTAR_FORMAT, encoding, errors) + \
cls._create_payload(name)
@classmethod
def _create_pax_generic_header(cls, pax_headers, type, encoding):
"""Return a POSIX.1-2008 extended or global header sequence
that contains a list of keyword, value pairs. The values
must be strings.
"""
# Check if one of the fields contains surrogate characters and thereby
# forces hdrcharset=BINARY, see _proc_pax() for more information.
binary = False
for keyword, value in pax_headers.items():
try:
value.encode("utf8", "strict")
except UnicodeEncodeError:
binary = True
break
records = b""
if binary:
# Put the hdrcharset field at the beginning of the header.
records += b"21 hdrcharset=BINARY\n"
for keyword, value in pax_headers.items():
keyword = keyword.encode("utf8")
if binary:
# Try to restore the original byte representation of `value'.
# Needless to say, that the encoding must match the string.
value = value.encode(encoding, "surrogateescape")
else:
value = value.encode("utf8")
l = len(keyword) + len(value) + 3 # ' ' + '=' + '\n'
n = p = 0
while True:
n = l + len(str(p))
if n == p:
break
p = n
records += bytes(str(p), "ascii") + b" " + keyword + b"=" + value + b"\n"
# We use a hardcoded "././@PaxHeader" name like star does
# instead of the one that POSIX recommends.
info = {}
info["name"] = "././@PaxHeader"
info["type"] = type
info["size"] = len(records)
info["magic"] = POSIX_MAGIC
# Create pax header + record blocks.
return cls._create_header(info, USTAR_FORMAT, "ascii", "replace") + \
cls._create_payload(records)
@classmethod
def frombuf(cls, buf, encoding, errors):
"""Construct a TarInfo object from a 512 byte bytes object.
"""
if len(buf) == 0:
raise EmptyHeaderError("empty header")
if len(buf) != BLOCKSIZE:
raise TruncatedHeaderError("truncated header")
if buf.count(NUL) == BLOCKSIZE:
raise EOFHeaderError("end of file header")
chksum = nti(buf[148:156])
if chksum not in calc_chksums(buf):
raise InvalidHeaderError("bad checksum")
obj = cls()
obj.name = nts(buf[0:100], encoding, errors)
obj.mode = nti(buf[100:108])
obj.uid = nti(buf[108:116])
obj.gid = nti(buf[116:124])
obj.size = nti(buf[124:136])
obj.mtime = nti(buf[136:148])
obj.chksum = chksum
obj.type = buf[156:157]
obj.linkname = nts(buf[157:257], encoding, errors)
obj.uname = nts(buf[265:297], encoding, errors)
obj.gname = nts(buf[297:329], encoding, errors)
obj.devmajor = nti(buf[329:337])
obj.devminor = nti(buf[337:345])
prefix = nts(buf[345:500], encoding, errors)
# Old V7 tar format represents a directory as a regular
# file with a trailing slash.
if obj.type == AREGTYPE and obj.name.endswith("/"):
obj.type = DIRTYPE
# The old GNU sparse format occupies some of the unused
# space in the buffer for up to 4 sparse structures.
# Save the them for later processing in _proc_sparse().
if obj.type == GNUTYPE_SPARSE:
pos = 386
structs = []
for i in range(4):
try:
offset = nti(buf[pos:pos + 12])
numbytes = nti(buf[pos + 12:pos + 24])
except ValueError:
break
structs.append((offset, numbytes))
pos += 24
isextended = bool(buf[482])
origsize = nti(buf[483:495])
obj._sparse_structs = (structs, isextended, origsize)
# Remove redundant slashes from directories.
if obj.isdir():
obj.name = obj.name.rstrip("/")
# Reconstruct a ustar longname.
if prefix and obj.type not in GNU_TYPES:
obj.name = prefix + "/" + obj.name
return obj
@classmethod
def fromtarfile(cls, tarfile):
"""Return the next TarInfo object from TarFile object
tarfile.
"""
buf = tarfile.fileobj.read(BLOCKSIZE)
obj = cls.frombuf(buf, tarfile.encoding, tarfile.errors)
obj.offset = tarfile.fileobj.tell() - BLOCKSIZE
return obj._proc_member(tarfile)
#--------------------------------------------------------------------------
# The following are methods that are called depending on the type of a
# member. The entry point is _proc_member() which can be overridden in a
# subclass to add custom _proc_*() methods. A _proc_*() method MUST
# implement the following
# operations:
# 1. Set self.offset_data to the position where the data blocks begin,
# if there is data that follows.
# 2. Set tarfile.offset to the position where the next member's header will
# begin.
# 3. Return self or another valid TarInfo object.
def _proc_member(self, tarfile):
"""Choose the right processing method depending on
the type and call it.
"""
if self.type in (GNUTYPE_LONGNAME, GNUTYPE_LONGLINK):
return self._proc_gnulong(tarfile)
elif self.type == GNUTYPE_SPARSE:
return self._proc_sparse(tarfile)
elif self.type in (XHDTYPE, XGLTYPE, SOLARIS_XHDTYPE):
return self._proc_pax(tarfile)
else:
return self._proc_builtin(tarfile)
def _proc_builtin(self, tarfile):
"""Process a builtin type or an unknown type which
will be treated as a regular file.
"""
self.offset_data = tarfile.fileobj.tell()
offset = self.offset_data
if self.isreg() or self.type not in SUPPORTED_TYPES:
# Skip the following data blocks.
offset += self._block(self.size)
tarfile.offset = offset
# Patch the TarInfo object with saved global
# header information.
self._apply_pax_info(tarfile.pax_headers, tarfile.encoding, tarfile.errors)
return self
def _proc_gnulong(self, tarfile):
"""Process the blocks that hold a GNU longname
or longlink member.
"""
buf = tarfile.fileobj.read(self._block(self.size))
# Fetch the next header and process it.
try:
next = self.fromtarfile(tarfile)
except HeaderError:
raise SubsequentHeaderError("missing or bad subsequent header")
# Patch the TarInfo object from the next header with
# the longname information.
next.offset = self.offset
if self.type == GNUTYPE_LONGNAME:
next.name = nts(buf, tarfile.encoding, tarfile.errors)
elif self.type == GNUTYPE_LONGLINK:
next.linkname = nts(buf, tarfile.encoding, tarfile.errors)
return next
def _proc_sparse(self, tarfile):
"""Process a GNU sparse header plus extra headers.
"""
# We already collected some sparse structures in frombuf().
structs, isextended, origsize = self._sparse_structs
del self._sparse_structs
# Collect sparse structures from extended header blocks.
while isextended:
buf = tarfile.fileobj.read(BLOCKSIZE)
pos = 0
for i in range(21):
try:
offset = nti(buf[pos:pos + 12])
numbytes = nti(buf[pos + 12:pos + 24])
except ValueError:
break
if offset and numbytes:
structs.append((offset, numbytes))
pos += 24
isextended = bool(buf[504])
self.sparse = structs
self.offset_data = tarfile.fileobj.tell()
tarfile.offset = self.offset_data + self._block(self.size)
self.size = origsize
return self
def _proc_pax(self, tarfile):
"""Process an extended or global header as described in
POSIX.1-2008.
"""
# Read the header information.
buf = tarfile.fileobj.read(self._block(self.size))
# A pax header stores supplemental information for either
# the following file (extended) or all following files
# (global).
if self.type == XGLTYPE:
pax_headers = tarfile.pax_headers
else:
pax_headers = tarfile.pax_headers.copy()
# Check if the pax header contains a hdrcharset field. This tells us
# the encoding of the path, linkpath, uname and gname fields. Normally,
# these fields are UTF-8 encoded but since POSIX.1-2008 tar
# implementations are allowed to store them as raw binary strings if
# the translation to UTF-8 fails.
match = re.search(br"\d+ hdrcharset=([^\n]+)\n", buf)
if match is not None:
pax_headers["hdrcharset"] = match.group(1).decode("utf8")
# For the time being, we don't care about anything other than "BINARY".
# The only other value that is currently allowed by the standard is
# "ISO-IR 10646 2000 UTF-8" in other words UTF-8.
hdrcharset = pax_headers.get("hdrcharset")
if hdrcharset == "BINARY":
encoding = tarfile.encoding
else:
encoding = "utf8"
# Parse pax header information. A record looks like that:
# "%d %s=%s\n" % (length, keyword, value). length is the size
# of the complete record including the length field itself and
# the newline. keyword and value are both UTF-8 encoded strings.
regex = re.compile(br"(\d+) ([^=]+)=")
pos = 0
while True:
match = regex.match(buf, pos)
if not match:
break
length, keyword = match.groups()
length = int(length)
value = buf[match.end(2) + 1:match.start(1) + length - 1]
# Normally, we could just use "utf8" as the encoding and "strict"
# as the error handler, but we better not take the risk. For
# example, GNU tar <= 1.23 is known to store filenames it cannot
# translate to UTF-8 as raw strings (unfortunately without a
# hdrcharset=BINARY header).
# We first try the strict standard encoding, and if that fails we
# fall back on the user's encoding and error handler.
keyword = self._decode_pax_field(keyword, "utf8", "utf8",
tarfile.errors)
if keyword in PAX_NAME_FIELDS:
value = self._decode_pax_field(value, encoding, tarfile.encoding,
tarfile.errors)
else:
value = self._decode_pax_field(value, "utf8", "utf8",
tarfile.errors)
pax_headers[keyword] = value
pos += length
# Fetch the next header.
try:
next = self.fromtarfile(tarfile)
except HeaderError:
raise SubsequentHeaderError("missing or bad subsequent header")
# Process GNU sparse information.
if "GNU.sparse.map" in pax_headers:
# GNU extended sparse format version 0.1.
self._proc_gnusparse_01(next, pax_headers)
elif "GNU.sparse.size" in pax_headers:
# GNU extended sparse format version 0.0.
self._proc_gnusparse_00(next, pax_headers, buf)
elif pax_headers.get("GNU.sparse.major") == "1" and pax_headers.get("GNU.sparse.minor") == "0":
# GNU extended sparse format version 1.0.
self._proc_gnusparse_10(next, pax_headers, tarfile)
if self.type in (XHDTYPE, SOLARIS_XHDTYPE):
# Patch the TarInfo object with the extended header info.
next._apply_pax_info(pax_headers, tarfile.encoding, tarfile.errors)
next.offset = self.offset
if "size" in pax_headers:
# If the extended header replaces the size field,
# we need to recalculate the offset where the next
# header starts.
offset = next.offset_data
if next.isreg() or next.type not in SUPPORTED_TYPES:
offset += next._block(next.size)
tarfile.offset = offset
return next
def _proc_gnusparse_00(self, next, pax_headers, buf):
"""Process a GNU tar extended sparse header, version 0.0.
"""
offsets = []
for match in re.finditer(br"\d+ GNU.sparse.offset=(\d+)\n", buf):
offsets.append(int(match.group(1)))
numbytes = []
for match in re.finditer(br"\d+ GNU.sparse.numbytes=(\d+)\n", buf):
numbytes.append(int(match.group(1)))
next.sparse = list(zip(offsets, numbytes))
def _proc_gnusparse_01(self, next, pax_headers):
"""Process a GNU tar extended sparse header, version 0.1.
"""
sparse = [int(x) for x in pax_headers["GNU.sparse.map"].split(",")]
next.sparse = list(zip(sparse[::2], sparse[1::2]))
def _proc_gnusparse_10(self, next, pax_headers, tarfile):
"""Process a GNU tar extended sparse header, version 1.0.
"""
fields = None
sparse = []
buf = tarfile.fileobj.read(BLOCKSIZE)
fields, buf = buf.split(b"\n", 1)
fields = int(fields)
while len(sparse) < fields * 2:
if b"\n" not in buf:
buf += tarfile.fileobj.read(BLOCKSIZE)
number, buf = buf.split(b"\n", 1)
sparse.append(int(number))
next.offset_data = tarfile.fileobj.tell()
next.sparse = list(zip(sparse[::2], sparse[1::2]))
def _apply_pax_info(self, pax_headers, encoding, errors):
"""Replace fields with supplemental information from a previous
pax extended or global header.
"""
for keyword, value in pax_headers.items():
if keyword == "GNU.sparse.name":
setattr(self, "path", value)
elif keyword == "GNU.sparse.size":
setattr(self, "size", int(value))
elif keyword == "GNU.sparse.realsize":
setattr(self, "size", int(value))
elif keyword in PAX_FIELDS:
if keyword in PAX_NUMBER_FIELDS:
try:
value = PAX_NUMBER_FIELDS[keyword](value)
except ValueError:
value = 0
if keyword == "path":
value = value.rstrip("/")
setattr(self, keyword, value)
self.pax_headers = pax_headers.copy()
def _decode_pax_field(self, value, encoding, fallback_encoding, fallback_errors):
"""Decode a single field from a pax record.
"""
try:
return value.decode(encoding, "strict")
except UnicodeDecodeError:
return value.decode(fallback_encoding, fallback_errors)
def _block(self, count):
"""Round up a byte count by BLOCKSIZE and return it,
e.g. _block(834) => 1024.
"""
blocks, remainder = divmod(count, BLOCKSIZE)
if remainder:
blocks += 1
return blocks * BLOCKSIZE
def isreg(self):
return self.type in REGULAR_TYPES
def isfile(self):
return self.isreg()
def isdir(self):
return self.type == DIRTYPE
def issym(self):
return self.type == SYMTYPE
def islnk(self):
return self.type == LNKTYPE
def ischr(self):
return self.type == CHRTYPE
def isblk(self):
return self.type == BLKTYPE
def isfifo(self):
return self.type == FIFOTYPE
def issparse(self):
return self.sparse is not None
def isdev(self):
return self.type in (CHRTYPE, BLKTYPE, FIFOTYPE)
# class TarInfo
class TarFile(object):
"""The TarFile Class provides an interface to tar archives.
"""
debug = 0 # May be set from 0 (no msgs) to 3 (all msgs)
dereference = False # If true, add content of linked file to the
# tar file, else the link.
ignore_zeros = False # If true, skips empty or invalid blocks and
# continues processing.
errorlevel = 1 # If 0, fatal errors only appear in debug
# messages (if debug >= 0). If > 0, errors
# are passed to the caller as exceptions.
format = DEFAULT_FORMAT # The format to use when creating an archive.
encoding = ENCODING # Encoding for 8-bit character strings.
errors = None # Error handler for unicode conversion.
tarinfo = TarInfo # The default TarInfo class to use.
fileobject = ExFileObject # The default ExFileObject class to use.
def __init__(self, name=None, mode="r", fileobj=None, format=None,
tarinfo=None, dereference=None, ignore_zeros=None, encoding=None,
errors="surrogateescape", pax_headers=None, debug=None, errorlevel=None):
"""Open an (uncompressed) tar archive `name'. `mode' is either 'r' to
read from an existing archive, 'a' to append data to an existing
file or 'w' to create a new file overwriting an existing one. `mode'
defaults to 'r'.
If `fileobj' is given, it is used for reading or writing data. If it
can be determined, `mode' is overridden by `fileobj's mode.
`fileobj' is not closed, when TarFile is closed.
"""
if len(mode) > 1 or mode not in "raw":
raise ValueError("mode must be 'r', 'a' or 'w'")
self.mode = mode
self._mode = {"r": "rb", "a": "r+b", "w": "wb"}[mode]
if not fileobj:
if self.mode == "a" and not os.path.exists(name):
# Create nonexistent files in append mode.
self.mode = "w"
self._mode = "wb"
fileobj = bltn_open(name, self._mode)
self._extfileobj = False
else:
if name is None and hasattr(fileobj, "name"):
name = fileobj.name
if hasattr(fileobj, "mode"):
self._mode = fileobj.mode
self._extfileobj = True
self.name = os.path.abspath(name) if name else None
self.fileobj = fileobj
# Init attributes.
if format is not None:
self.format = format
if tarinfo is not None:
self.tarinfo = tarinfo
if dereference is not None:
self.dereference = dereference
if ignore_zeros is not None:
self.ignore_zeros = ignore_zeros
if encoding is not None:
self.encoding = encoding
self.errors = errors
if pax_headers is not None and self.format == PAX_FORMAT:
self.pax_headers = pax_headers
else:
self.pax_headers = {}
if debug is not None:
self.debug = debug
if errorlevel is not None:
self.errorlevel = errorlevel
# Init datastructures.
self.closed = False
self.members = [] # list of members as TarInfo objects
self._loaded = False # flag if all members have been read
self.offset = self.fileobj.tell()
# current position in the archive file
self.inodes = {} # dictionary caching the inodes of
# archive members already added
try:
if self.mode == "r":
self.firstmember = None
self.firstmember = self.next()
if self.mode == "a":
# Move to the end of the archive,
# before the first empty block.
while True:
self.fileobj.seek(self.offset)
try:
tarinfo = self.tarinfo.fromtarfile(self)
self.members.append(tarinfo)
except EOFHeaderError:
self.fileobj.seek(self.offset)
break
except HeaderError as e:
raise ReadError(str(e))
if self.mode in "aw":
self._loaded = True
if self.pax_headers:
buf = self.tarinfo.create_pax_global_header(self.pax_headers.copy())
self.fileobj.write(buf)
self.offset += len(buf)
except:
if not self._extfileobj:
self.fileobj.close()
self.closed = True
raise
#--------------------------------------------------------------------------
# Below are the classmethods which act as alternate constructors to the
# TarFile class. The open() method is the only one that is needed for
# public use; it is the "super"-constructor and is able to select an
# adequate "sub"-constructor for a particular compression using the mapping
# from OPEN_METH.
#
# This concept allows one to subclass TarFile without losing the comfort of
# the super-constructor. A sub-constructor is registered and made available
# by adding it to the mapping in OPEN_METH.
@classmethod
def open(cls, name=None, mode="r", fileobj=None, bufsize=RECORDSIZE, **kwargs):
"""Open a tar archive for reading, writing or appending. Return
an appropriate TarFile class.
mode:
'r' or 'r:*' open for reading with transparent compression
'r:' open for reading exclusively uncompressed
'r:gz' open for reading with gzip compression
'r:bz2' open for reading with bzip2 compression
'a' or 'a:' open for appending, creating the file if necessary
'w' or 'w:' open for writing without compression
'w:gz' open for writing with gzip compression
'w:bz2' open for writing with bzip2 compression
'r|*' open a stream of tar blocks with transparent compression
'r|' open an uncompressed stream of tar blocks for reading
'r|gz' open a gzip compressed stream of tar blocks
'r|bz2' open a bzip2 compressed stream of tar blocks
'w|' open an uncompressed stream for writing
'w|gz' open a gzip compressed stream for writing
'w|bz2' open a bzip2 compressed stream for writing
"""
if not name and not fileobj:
raise ValueError("nothing to open")
if mode in ("r", "r:*"):
# Find out which *open() is appropriate for opening the file.
for comptype in cls.OPEN_METH:
func = getattr(cls, cls.OPEN_METH[comptype])
if fileobj is not None:
saved_pos = fileobj.tell()
try:
return func(name, "r", fileobj, **kwargs)
except (ReadError, CompressionError) as e:
if fileobj is not None:
fileobj.seek(saved_pos)
continue
raise ReadError("file could not be opened successfully")
elif ":" in mode:
filemode, comptype = mode.split(":", 1)
filemode = filemode or "r"
comptype = comptype or "tar"
# Select the *open() function according to
# given compression.
if comptype in cls.OPEN_METH:
func = getattr(cls, cls.OPEN_METH[comptype])
else:
raise CompressionError("unknown compression type %r" % comptype)
return func(name, filemode, fileobj, **kwargs)
elif "|" in mode:
filemode, comptype = mode.split("|", 1)
filemode = filemode or "r"
comptype = comptype or "tar"
if filemode not in "rw":
raise ValueError("mode must be 'r' or 'w'")
stream = _Stream(name, filemode, comptype, fileobj, bufsize)
try:
t = cls(name, filemode, stream, **kwargs)
except:
stream.close()
raise
t._extfileobj = False
return t
elif mode in "aw":
return cls.taropen(name, mode, fileobj, **kwargs)
raise ValueError("undiscernible mode")
@classmethod
def taropen(cls, name, mode="r", fileobj=None, **kwargs):
"""Open uncompressed tar archive name for reading or writing.
"""
if len(mode) > 1 or mode not in "raw":
raise ValueError("mode must be 'r', 'a' or 'w'")
return cls(name, mode, fileobj, **kwargs)
@classmethod
def gzopen(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
"""Open gzip compressed tar archive name for reading or writing.
Appending is not allowed.
"""
if len(mode) > 1 or mode not in "rw":
raise ValueError("mode must be 'r' or 'w'")
try:
import gzip
gzip.GzipFile
except (ImportError, AttributeError):
raise CompressionError("gzip module is not available")
extfileobj = fileobj is not None
try:
fileobj = gzip.GzipFile(name, mode + "b", compresslevel, fileobj)
t = cls.taropen(name, mode, fileobj, **kwargs)
except IOError:
if not extfileobj and fileobj is not None:
fileobj.close()
if fileobj is None:
raise
raise ReadError("not a gzip file")
except:
if not extfileobj and fileobj is not None:
fileobj.close()
raise
t._extfileobj = extfileobj
return t
@classmethod
def bz2open(cls, name, mode="r", fileobj=None, compresslevel=9, **kwargs):
"""Open bzip2 compressed tar archive name for reading or writing.
Appending is not allowed.
"""
if len(mode) > 1 or mode not in "rw":
raise ValueError("mode must be 'r' or 'w'.")
try:
import bz2
except ImportError:
raise CompressionError("bz2 module is not available")
if fileobj is not None:
fileobj = _BZ2Proxy(fileobj, mode)
else:
fileobj = bz2.BZ2File(name, mode, compresslevel=compresslevel)
try:
t = cls.taropen(name, mode, fileobj, **kwargs)
except (IOError, EOFError):
fileobj.close()
raise ReadError("not a bzip2 file")
t._extfileobj = False
return t
# All *open() methods are registered here.
OPEN_METH = {
"tar": "taropen", # uncompressed tar
"gz": "gzopen", # gzip compressed tar
"bz2": "bz2open" # bzip2 compressed tar
}
#--------------------------------------------------------------------------
# The public methods which TarFile provides:
def close(self):
"""Close the TarFile. In write-mode, two finishing zero blocks are
appended to the archive.
"""
if self.closed:
return
if self.mode in "aw":
self.fileobj.write(NUL * (BLOCKSIZE * 2))
self.offset += (BLOCKSIZE * 2)
# fill up the end with zero-blocks
# (like option -b20 for tar does)
blocks, remainder = divmod(self.offset, RECORDSIZE)
if remainder > 0:
self.fileobj.write(NUL * (RECORDSIZE - remainder))
if not self._extfileobj:
self.fileobj.close()
self.closed = True
def getmember(self, name):
"""Return a TarInfo object for member `name'. If `name' can not be
found in the archive, KeyError is raised. If a member occurs more
than once in the archive, its last occurrence is assumed to be the
most up-to-date version.
"""
tarinfo = self._getmember(name)
if tarinfo is None:
raise KeyError("filename %r not found" % name)
return tarinfo
def getmembers(self):
"""Return the members of the archive as a list of TarInfo objects. The
list has the same order as the members in the archive.
"""
self._check()
if not self._loaded: # if we want to obtain a list of
self._load() # all members, we first have to
# scan the whole archive.
return self.members
def getnames(self):
"""Return the members of the archive as a list of their names. It has
the same order as the list returned by getmembers().
"""
return [tarinfo.name for tarinfo in self.getmembers()]
def gettarinfo(self, name=None, arcname=None, fileobj=None):
"""Create a TarInfo object for either the file `name' or the file
object `fileobj' (using os.fstat on its file descriptor). You can
modify some of the TarInfo's attributes before you add it using
addfile(). If given, `arcname' specifies an alternative name for the
file in the archive.
"""
self._check("aw")
# When fileobj is given, replace name by
# fileobj's real name.
if fileobj is not None:
name = fileobj.name
# Building the name of the member in the archive.
# Backward slashes are converted to forward slashes,
# Absolute paths are turned to relative paths.
if arcname is None:
arcname = name
drv, arcname = os.path.splitdrive(arcname)
arcname = arcname.replace(os.sep, "/")
arcname = arcname.lstrip("/")
# Now, fill the TarInfo object with
# information specific for the file.
tarinfo = self.tarinfo()
tarinfo.tarfile = self
# Use os.stat or os.lstat, depending on platform
# and if symlinks shall be resolved.
if fileobj is None:
if hasattr(os, "lstat") and not self.dereference:
statres = os.lstat(name)
else:
statres = os.stat(name)
else:
statres = os.fstat(fileobj.fileno())
linkname = ""
stmd = statres.st_mode
if stat.S_ISREG(stmd):
inode = (statres.st_ino, statres.st_dev)
if not self.dereference and statres.st_nlink > 1 and \
inode in self.inodes and arcname != self.inodes[inode]:
# Is it a hardlink to an already
# archived file?
type = LNKTYPE
linkname = self.inodes[inode]
else:
# The inode is added only if its valid.
# For win32 it is always 0.
type = REGTYPE
if inode[0]:
self.inodes[inode] = arcname
elif stat.S_ISDIR(stmd):
type = DIRTYPE
elif stat.S_ISFIFO(stmd):
type = FIFOTYPE
elif stat.S_ISLNK(stmd):
type = SYMTYPE
linkname = os.readlink(name)
elif stat.S_ISCHR(stmd):
type = CHRTYPE
elif stat.S_ISBLK(stmd):
type = BLKTYPE
else:
return None
# Fill the TarInfo object with all
# information we can get.
tarinfo.name = arcname
tarinfo.mode = stmd
tarinfo.uid = statres.st_uid
tarinfo.gid = statres.st_gid
if type == REGTYPE:
tarinfo.size = statres.st_size
else:
tarinfo.size = 0
tarinfo.mtime = statres.st_mtime
tarinfo.type = type
tarinfo.linkname = linkname
if pwd:
try:
tarinfo.uname = pwd.getpwuid(tarinfo.uid)[0]
except KeyError:
pass
if grp:
try:
tarinfo.gname = grp.getgrgid(tarinfo.gid)[0]
except KeyError:
pass
if type in (CHRTYPE, BLKTYPE):
if hasattr(os, "major") and hasattr(os, "minor"):
tarinfo.devmajor = os.major(statres.st_rdev)
tarinfo.devminor = os.minor(statres.st_rdev)
return tarinfo
def list(self, verbose=True):
"""Print a table of contents to sys.stdout. If `verbose' is False, only
the names of the members are printed. If it is True, an `ls -l'-like
output is produced.
"""
self._check()
for tarinfo in self:
if verbose:
print(filemode(tarinfo.mode), end=' ')
print("%s/%s" % (tarinfo.uname or tarinfo.uid,
tarinfo.gname or tarinfo.gid), end=' ')
if tarinfo.ischr() or tarinfo.isblk():
print("%10s" % ("%d,%d" \
% (tarinfo.devmajor, tarinfo.devminor)), end=' ')
else:
print("%10d" % tarinfo.size, end=' ')
print("%d-%02d-%02d %02d:%02d:%02d" \
% time.localtime(tarinfo.mtime)[:6], end=' ')
print(tarinfo.name + ("/" if tarinfo.isdir() else ""), end=' ')
if verbose:
if tarinfo.issym():
print("->", tarinfo.linkname, end=' ')
if tarinfo.islnk():
print("link to", tarinfo.linkname, end=' ')
print()
def add(self, name, arcname=None, recursive=True, exclude=None, filter=None):
"""Add the file `name' to the archive. `name' may be any type of file
(directory, fifo, symbolic link, etc.). If given, `arcname'
specifies an alternative name for the file in the archive.
Directories are added recursively by default. This can be avoided by
setting `recursive' to False. `exclude' is a function that should
return True for each filename to be excluded. `filter' is a function
that expects a TarInfo object argument and returns the changed
TarInfo object, if it returns None the TarInfo object will be
excluded from the archive.
"""
self._check("aw")
if arcname is None:
arcname = name
# Exclude pathnames.
if exclude is not None:
import warnings
warnings.warn("use the filter argument instead",
DeprecationWarning, 2)
if exclude(name):
self._dbg(2, "tarfile: Excluded %r" % name)
return
# Skip if somebody tries to archive the archive...
if self.name is not None and os.path.abspath(name) == self.name:
self._dbg(2, "tarfile: Skipped %r" % name)
return
self._dbg(1, name)
# Create a TarInfo object from the file.
tarinfo = self.gettarinfo(name, arcname)
if tarinfo is None:
self._dbg(1, "tarfile: Unsupported type %r" % name)
return
# Change or exclude the TarInfo object.
if filter is not None:
tarinfo = filter(tarinfo)
if tarinfo is None:
self._dbg(2, "tarfile: Excluded %r" % name)
return
# Append the tar header and data to the archive.
if tarinfo.isreg():
f = bltn_open(name, "rb")
self.addfile(tarinfo, f)
f.close()
elif tarinfo.isdir():
self.addfile(tarinfo)
if recursive:
for f in os.listdir(name):
self.add(os.path.join(name, f), os.path.join(arcname, f),
recursive, exclude, filter=filter)
else:
self.addfile(tarinfo)
def addfile(self, tarinfo, fileobj=None):
"""Add the TarInfo object `tarinfo' to the archive. If `fileobj' is
given, tarinfo.size bytes are read from it and added to the archive.
You can create TarInfo objects using gettarinfo().
On Windows platforms, `fileobj' should always be opened with mode
'rb' to avoid irritation about the file size.
"""
self._check("aw")
tarinfo = copy.copy(tarinfo)
buf = tarinfo.tobuf(self.format, self.encoding, self.errors)
self.fileobj.write(buf)
self.offset += len(buf)
# If there's data to follow, append it.
if fileobj is not None:
copyfileobj(fileobj, self.fileobj, tarinfo.size)
blocks, remainder = divmod(tarinfo.size, BLOCKSIZE)
if remainder > 0:
self.fileobj.write(NUL * (BLOCKSIZE - remainder))
blocks += 1
self.offset += blocks * BLOCKSIZE
self.members.append(tarinfo)
def extractall(self, path=".", members=None):
"""Extract all members from the archive to the current working
directory and set owner, modification time and permissions on
directories afterwards. `path' specifies a different directory
to extract to. `members' is optional and must be a subset of the
list returned by getmembers().
"""
directories = []
if members is None:
members = self
for tarinfo in members:
if tarinfo.isdir():
# Extract directories with a safe mode.
directories.append(tarinfo)
tarinfo = copy.copy(tarinfo)
tarinfo.mode = 0o700
# Do not set_attrs directories, as we will do that further down
self.extract(tarinfo, path, set_attrs=not tarinfo.isdir())
# Reverse sort directories.
directories.sort(key=lambda a: a.name)
directories.reverse()
# Set correct owner, mtime and filemode on directories.
for tarinfo in directories:
dirpath = os.path.join(path, tarinfo.name)
try:
self.chown(tarinfo, dirpath)
self.utime(tarinfo, dirpath)
self.chmod(tarinfo, dirpath)
except ExtractError as e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def extract(self, member, path="", set_attrs=True):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a TarInfo object. You can
specify a different directory using `path'. File attributes (owner,
mtime, mode) are set unless `set_attrs' is False.
"""
self._check("r")
if isinstance(member, str):
tarinfo = self.getmember(member)
else:
tarinfo = member
# Prepare the link target for makelink().
if tarinfo.islnk():
tarinfo._link_target = os.path.join(path, tarinfo.linkname)
try:
self._extract_member(tarinfo, os.path.join(path, tarinfo.name),
set_attrs=set_attrs)
except EnvironmentError as e:
if self.errorlevel > 0:
raise
else:
if e.filename is None:
self._dbg(1, "tarfile: %s" % e.strerror)
else:
self._dbg(1, "tarfile: %s %r" % (e.strerror, e.filename))
except ExtractError as e:
if self.errorlevel > 1:
raise
else:
self._dbg(1, "tarfile: %s" % e)
def extractfile(self, member):
"""Extract a member from the archive as a file object. `member' may be
a filename or a TarInfo object. If `member' is a regular file, a
file-like object is returned. If `member' is a link, a file-like
object is constructed from the link's target. If `member' is none of
the above, None is returned.
The file-like object is read-only and provides the following
methods: read(), readline(), readlines(), seek() and tell()
"""
self._check("r")
if isinstance(member, str):
tarinfo = self.getmember(member)
else:
tarinfo = member
if tarinfo.isreg():
return self.fileobject(self, tarinfo)
elif tarinfo.type not in SUPPORTED_TYPES:
# If a member's type is unknown, it is treated as a
# regular file.
return self.fileobject(self, tarinfo)
elif tarinfo.islnk() or tarinfo.issym():
if isinstance(self.fileobj, _Stream):
# A small but ugly workaround for the case that someone tries
# to extract a (sym)link as a file-object from a non-seekable
# stream of tar blocks.
raise StreamError("cannot extract (sym)link as file object")
else:
# A (sym)link's file object is its target's file object.
return self.extractfile(self._find_link_target(tarinfo))
else:
# If there's no data associated with the member (directory, chrdev,
# blkdev, etc.), return None instead of a file object.
return None
def _extract_member(self, tarinfo, targetpath, set_attrs=True):
"""Extract the TarInfo object tarinfo to a physical
file called targetpath.
"""
# Fetch the TarInfo object for the given name
# and build the destination pathname, replacing
# forward slashes to platform specific separators.
targetpath = targetpath.rstrip("/")
targetpath = targetpath.replace("/", os.sep)
# Create all upper directories.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
# Create directories that are not part of the archive with
# default permissions.
os.makedirs(upperdirs)
if tarinfo.islnk() or tarinfo.issym():
self._dbg(1, "%s -> %s" % (tarinfo.name, tarinfo.linkname))
else:
self._dbg(1, tarinfo.name)
if tarinfo.isreg():
self.makefile(tarinfo, targetpath)
elif tarinfo.isdir():
self.makedir(tarinfo, targetpath)
elif tarinfo.isfifo():
self.makefifo(tarinfo, targetpath)
elif tarinfo.ischr() or tarinfo.isblk():
self.makedev(tarinfo, targetpath)
elif tarinfo.islnk() or tarinfo.issym():
self.makelink(tarinfo, targetpath)
elif tarinfo.type not in SUPPORTED_TYPES:
self.makeunknown(tarinfo, targetpath)
else:
self.makefile(tarinfo, targetpath)
if set_attrs:
self.chown(tarinfo, targetpath)
if not tarinfo.issym():
self.chmod(tarinfo, targetpath)
self.utime(tarinfo, targetpath)
#--------------------------------------------------------------------------
# Below are the different file methods. They are called via
# _extract_member() when extract() is called. They can be replaced in a
# subclass to implement other functionality.
def makedir(self, tarinfo, targetpath):
"""Make a directory called targetpath.
"""
try:
# Use a safe mode for the directory, the real mode is set
# later in _extract_member().
os.mkdir(targetpath, 0o700)
except EnvironmentError as e:
if e.errno != errno.EEXIST:
raise
def makefile(self, tarinfo, targetpath):
"""Make a file called targetpath.
"""
source = self.fileobj
source.seek(tarinfo.offset_data)
target = bltn_open(targetpath, "wb")
if tarinfo.sparse is not None:
for offset, size in tarinfo.sparse:
target.seek(offset)
copyfileobj(source, target, size)
else:
copyfileobj(source, target, tarinfo.size)
target.seek(tarinfo.size)
target.truncate()
target.close()
def makeunknown(self, tarinfo, targetpath):
"""Make a file from a TarInfo object with an unknown type
at targetpath.
"""
self.makefile(tarinfo, targetpath)
self._dbg(1, "tarfile: Unknown file type %r, " \
"extracted as regular file." % tarinfo.type)
def makefifo(self, tarinfo, targetpath):
"""Make a fifo called targetpath.
"""
if hasattr(os, "mkfifo"):
os.mkfifo(targetpath)
else:
raise ExtractError("fifo not supported by system")
def makedev(self, tarinfo, targetpath):
"""Make a character or block device called targetpath.
"""
if not hasattr(os, "mknod") or not hasattr(os, "makedev"):
raise ExtractError("special devices not supported by system")
mode = tarinfo.mode
if tarinfo.isblk():
mode |= stat.S_IFBLK
else:
mode |= stat.S_IFCHR
os.mknod(targetpath, mode,
os.makedev(tarinfo.devmajor, tarinfo.devminor))
def makelink(self, tarinfo, targetpath):
"""Make a (symbolic) link called targetpath. If it cannot be created
(platform limitation), we try to make a copy of the referenced file
instead of a link.
"""
try:
# For systems that support symbolic and hard links.
if tarinfo.issym():
os.symlink(tarinfo.linkname, targetpath)
else:
# See extract().
if os.path.exists(tarinfo._link_target):
os.link(tarinfo._link_target, targetpath)
else:
self._extract_member(self._find_link_target(tarinfo),
targetpath)
except symlink_exception:
if tarinfo.issym():
linkpath = os.path.join(os.path.dirname(tarinfo.name),
tarinfo.linkname)
else:
linkpath = tarinfo.linkname
else:
try:
self._extract_member(self._find_link_target(tarinfo),
targetpath)
except KeyError:
raise ExtractError("unable to resolve link inside archive")
def chown(self, tarinfo, targetpath):
"""Set owner of targetpath according to tarinfo.
"""
if pwd and hasattr(os, "geteuid") and os.geteuid() == 0:
# We have to be root to do so.
try:
g = grp.getgrnam(tarinfo.gname)[2]
except KeyError:
g = tarinfo.gid
try:
u = pwd.getpwnam(tarinfo.uname)[2]
except KeyError:
u = tarinfo.uid
try:
if tarinfo.issym() and hasattr(os, "lchown"):
os.lchown(targetpath, u, g)
else:
if sys.platform != "os2emx":
os.chown(targetpath, u, g)
except EnvironmentError as e:
raise ExtractError("could not change owner")
def chmod(self, tarinfo, targetpath):
"""Set file permissions of targetpath according to tarinfo.
"""
if hasattr(os, 'chmod'):
try:
os.chmod(targetpath, tarinfo.mode)
except EnvironmentError as e:
raise ExtractError("could not change mode")
def utime(self, tarinfo, targetpath):
"""Set modification time of targetpath according to tarinfo.
"""
if not hasattr(os, 'utime'):
return
try:
os.utime(targetpath, (tarinfo.mtime, tarinfo.mtime))
except EnvironmentError as e:
raise ExtractError("could not change modification time")
#--------------------------------------------------------------------------
def next(self):
"""Return the next member of the archive as a TarInfo object, when
TarFile is opened for reading. Return None if there is no more
available.
"""
self._check("ra")
if self.firstmember is not None:
m = self.firstmember
self.firstmember = None
return m
# Read the next block.
self.fileobj.seek(self.offset)
tarinfo = None
while True:
try:
tarinfo = self.tarinfo.fromtarfile(self)
except EOFHeaderError as e:
if self.ignore_zeros:
self._dbg(2, "0x%X: %s" % (self.offset, e))
self.offset += BLOCKSIZE
continue
except InvalidHeaderError as e:
if self.ignore_zeros:
self._dbg(2, "0x%X: %s" % (self.offset, e))
self.offset += BLOCKSIZE
continue
elif self.offset == 0:
raise ReadError(str(e))
except EmptyHeaderError:
if self.offset == 0:
raise ReadError("empty file")
except TruncatedHeaderError as e:
if self.offset == 0:
raise ReadError(str(e))
except SubsequentHeaderError as e:
raise ReadError(str(e))
break
if tarinfo is not None:
self.members.append(tarinfo)
else:
self._loaded = True
return tarinfo
#--------------------------------------------------------------------------
# Little helper methods:
def _getmember(self, name, tarinfo=None, normalize=False):
"""Find an archive member by name from bottom to top.
If tarinfo is given, it is used as the starting point.
"""
# Ensure that all members have been loaded.
members = self.getmembers()
# Limit the member search list up to tarinfo.
if tarinfo is not None:
members = members[:members.index(tarinfo)]
if normalize:
name = os.path.normpath(name)
for member in reversed(members):
if normalize:
member_name = os.path.normpath(member.name)
else:
member_name = member.name
if name == member_name:
return member
def _load(self):
"""Read through the entire archive file and look for readable
members.
"""
while True:
tarinfo = self.next()
if tarinfo is None:
break
self._loaded = True
def _check(self, mode=None):
"""Check if TarFile is still open, and if the operation's mode
corresponds to TarFile's mode.
"""
if self.closed:
raise IOError("%s is closed" % self.__class__.__name__)
if mode is not None and self.mode not in mode:
raise IOError("bad operation for mode %r" % self.mode)
def _find_link_target(self, tarinfo):
"""Find the target member of a symlink or hardlink member in the
archive.
"""
if tarinfo.issym():
# Always search the entire archive.
linkname = os.path.dirname(tarinfo.name) + "/" + tarinfo.linkname
limit = None
else:
# Search the archive before the link, because a hard link is
# just a reference to an already archived file.
linkname = tarinfo.linkname
limit = tarinfo
member = self._getmember(linkname, tarinfo=limit, normalize=True)
if member is None:
raise KeyError("linkname %r not found" % linkname)
return member
def __iter__(self):
"""Provide an iterator object.
"""
if self._loaded:
return iter(self.members)
else:
return TarIter(self)
def _dbg(self, level, msg):
"""Write debugging output to sys.stderr.
"""
if level <= self.debug:
print(msg, file=sys.stderr)
def __enter__(self):
self._check()
return self
def __exit__(self, type, value, traceback):
if type is None:
self.close()
else:
# An exception occurred. We must not call close() because
# it would try to write end-of-archive blocks and padding.
if not self._extfileobj:
self.fileobj.close()
self.closed = True
# class TarFile
class TarIter(object):
"""Iterator Class.
for tarinfo in TarFile(...):
suite...
"""
def __init__(self, tarfile):
"""Construct a TarIter object.
"""
self.tarfile = tarfile
self.index = 0
def __iter__(self):
"""Return iterator object.
"""
return self
def __next__(self):
"""Return the next item using TarFile's next() method.
When all members have been read, set TarFile as _loaded.
"""
# Fix for SF #1100429: Under rare circumstances it can
# happen that getmembers() is called during iteration,
# which will cause TarIter to stop prematurely.
if not self.tarfile._loaded:
tarinfo = self.tarfile.next()
if not tarinfo:
self.tarfile._loaded = True
raise StopIteration
else:
try:
tarinfo = self.tarfile.members[self.index]
except IndexError:
raise StopIteration
self.index += 1
return tarinfo
next = __next__ # for Python 2.x
#--------------------
# exported functions
#--------------------
def is_tarfile(name):
"""Return True if name points to a tar archive that we
are able to handle, else return False.
"""
try:
t = open(name)
t.close()
return True
except TarError:
return False
bltn_open = open
open = TarFile.open
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/distlib/_backport/tarfile.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/distlib/_backport/tarfile.py",
"repo_id": "Django-locallibrary",
"token_count": 43603
} | 24 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2017 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from __future__ import unicode_literals
import base64
import codecs
import datetime
import distutils.util
from email import message_from_file
import hashlib
import imp
import json
import logging
import os
import posixpath
import re
import shutil
import sys
import tempfile
import zipfile
from . import __version__, DistlibException
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
from .database import InstalledDistribution
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
cached_property, get_cache_base, read_exports, tempdir)
from .version import NormalizedVersion, UnsupportedVersionError
logger = logging.getLogger(__name__)
cache = None # created when needed
if hasattr(sys, 'pypy_version_info'): # pragma: no cover
IMP_PREFIX = 'pp'
elif sys.platform.startswith('java'): # pragma: no cover
IMP_PREFIX = 'jy'
elif sys.platform == 'cli': # pragma: no cover
IMP_PREFIX = 'ip'
else:
IMP_PREFIX = 'cp'
VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
if not VER_SUFFIX: # pragma: no cover
VER_SUFFIX = '%s%s' % sys.version_info[:2]
PYVER = 'py' + VER_SUFFIX
IMPVER = IMP_PREFIX + VER_SUFFIX
ARCH = distutils.util.get_platform().replace('-', '_').replace('.', '_')
ABI = sysconfig.get_config_var('SOABI')
if ABI and ABI.startswith('cpython-'):
ABI = ABI.replace('cpython-', 'cp')
else:
def _derive_abi():
parts = ['cp', VER_SUFFIX]
if sysconfig.get_config_var('Py_DEBUG'):
parts.append('d')
if sysconfig.get_config_var('WITH_PYMALLOC'):
parts.append('m')
if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4:
parts.append('u')
return ''.join(parts)
ABI = _derive_abi()
del _derive_abi
FILENAME_RE = re.compile(r'''
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?
-(?P<py>\w+\d+(\.\w+\d+)*)
-(?P<bi>\w+)
-(?P<ar>\w+(\.\w+)*)
\.whl$
''', re.IGNORECASE | re.VERBOSE)
NAME_VERSION_RE = re.compile(r'''
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?$
''', re.IGNORECASE | re.VERBOSE)
SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
SHEBANG_PYTHON = b'#!python'
SHEBANG_PYTHONW = b'#!pythonw'
if os.sep == '/':
to_posix = lambda o: o
else:
to_posix = lambda o: o.replace(os.sep, '/')
class Mounter(object):
def __init__(self):
self.impure_wheels = {}
self.libs = {}
def add(self, pathname, extensions):
self.impure_wheels[pathname] = extensions
self.libs.update(extensions)
def remove(self, pathname):
extensions = self.impure_wheels.pop(pathname)
for k, v in extensions:
if k in self.libs:
del self.libs[k]
def find_module(self, fullname, path=None):
if fullname in self.libs:
result = self
else:
result = None
return result
def load_module(self, fullname):
if fullname in sys.modules:
result = sys.modules[fullname]
else:
if fullname not in self.libs:
raise ImportError('unable to find extension for %s' % fullname)
result = imp.load_dynamic(fullname, self.libs[fullname])
result.__loader__ = self
parts = fullname.rsplit('.', 1)
if len(parts) > 1:
result.__package__ = parts[0]
return result
_hook = Mounter()
class Wheel(object):
"""
Class to build and install from Wheel files (PEP 427).
"""
wheel_version = (1, 1)
hash_kind = 'sha256'
def __init__(self, filename=None, sign=False, verify=False):
"""
Initialise an instance using a (valid) filename.
"""
self.sign = sign
self.should_verify = verify
self.buildver = ''
self.pyver = [PYVER]
self.abi = ['none']
self.arch = ['any']
self.dirname = os.getcwd()
if filename is None:
self.name = 'dummy'
self.version = '0.1'
self._filename = self.filename
else:
m = NAME_VERSION_RE.match(filename)
if m:
info = m.groupdict('')
self.name = info['nm']
# Reinstate the local version separator
self.version = info['vn'].replace('_', '-')
self.buildver = info['bn']
self._filename = self.filename
else:
dirname, filename = os.path.split(filename)
m = FILENAME_RE.match(filename)
if not m:
raise DistlibException('Invalid name or '
'filename: %r' % filename)
if dirname:
self.dirname = os.path.abspath(dirname)
self._filename = filename
info = m.groupdict('')
self.name = info['nm']
self.version = info['vn']
self.buildver = info['bn']
self.pyver = info['py'].split('.')
self.abi = info['bi'].split('.')
self.arch = info['ar'].split('.')
@property
def filename(self):
"""
Build and return a filename from the various components.
"""
if self.buildver:
buildver = '-' + self.buildver
else:
buildver = ''
pyver = '.'.join(self.pyver)
abi = '.'.join(self.abi)
arch = '.'.join(self.arch)
# replace - with _ as a local version separator
version = self.version.replace('-', '_')
return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
pyver, abi, arch)
@property
def exists(self):
path = os.path.join(self.dirname, self.filename)
return os.path.isfile(path)
@property
def tags(self):
for pyver in self.pyver:
for abi in self.abi:
for arch in self.arch:
yield pyver, abi, arch
@cached_property
def metadata(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
wheel_metadata = self.get_wheel_metadata(zf)
wv = wheel_metadata['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
# if file_version < (1, 1):
# fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME,
# LEGACY_METADATA_FILENAME]
# else:
# fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME]
result = None
for fn in fns:
try:
metadata_filename = posixpath.join(info_dir, fn)
with zf.open(metadata_filename) as bf:
wf = wrapper(bf)
result = Metadata(fileobj=wf)
if result:
break
except KeyError:
pass
if not result:
raise ValueError('Invalid wheel, because metadata is '
'missing: looked in %s' % ', '.join(fns))
return result
def get_wheel_metadata(self, zf):
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
metadata_filename = posixpath.join(info_dir, 'WHEEL')
with zf.open(metadata_filename) as bf:
wf = codecs.getreader('utf-8')(bf)
message = message_from_file(wf)
return dict(message)
@cached_property
def info(self):
pathname = os.path.join(self.dirname, self.filename)
with ZipFile(pathname, 'r') as zf:
result = self.get_wheel_metadata(zf)
return result
def process_shebang(self, data):
m = SHEBANG_RE.match(data)
if m:
end = m.end()
shebang, data_after_shebang = data[:end], data[end:]
# Preserve any arguments after the interpreter
if b'pythonw' in shebang.lower():
shebang_python = SHEBANG_PYTHONW
else:
shebang_python = SHEBANG_PYTHON
m = SHEBANG_DETAIL_RE.match(shebang)
if m:
args = b' ' + m.groups()[-1]
else:
args = b''
shebang = shebang_python + args
data = shebang + data_after_shebang
else:
cr = data.find(b'\r')
lf = data.find(b'\n')
if cr < 0 or cr > lf:
term = b'\n'
else:
if data[cr:cr + 2] == b'\r\n':
term = b'\r\n'
else:
term = b'\r'
data = SHEBANG_PYTHON + term + data
return data
def get_hash(self, data, hash_kind=None):
if hash_kind is None:
hash_kind = self.hash_kind
try:
hasher = getattr(hashlib, hash_kind)
except AttributeError:
raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
result = hasher(data).digest()
result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
return hash_kind, result
def write_record(self, records, record_path, base):
records = list(records) # make a copy, as mutated
p = to_posix(os.path.relpath(record_path, base))
records.append((p, '', ''))
with CSVWriter(record_path) as writer:
for row in records:
writer.writerow(row)
def write_records(self, info, libdir, archive_paths):
records = []
distinfo, info_dir = info
hasher = getattr(hashlib, self.hash_kind)
for ap, p in archive_paths:
with open(p, 'rb') as f:
data = f.read()
digest = '%s=%s' % self.get_hash(data)
size = os.path.getsize(p)
records.append((ap, digest, size))
p = os.path.join(distinfo, 'RECORD')
self.write_record(records, p, libdir)
ap = to_posix(os.path.join(info_dir, 'RECORD'))
archive_paths.append((ap, p))
def build_zip(self, pathname, archive_paths):
with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
for ap, p in archive_paths:
logger.debug('Wrote %s to %s in wheel', p, ap)
zf.write(p, ap)
def build(self, paths, tags=None, wheel_version=None):
"""
Build a wheel from files in specified paths, and use any specified tags
when determining the name of the wheel.
"""
if tags is None:
tags = {}
libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
if libkey == 'platlib':
is_pure = 'false'
default_pyver = [IMPVER]
default_abi = [ABI]
default_arch = [ARCH]
else:
is_pure = 'true'
default_pyver = [PYVER]
default_abi = ['none']
default_arch = ['any']
self.pyver = tags.get('pyver', default_pyver)
self.abi = tags.get('abi', default_abi)
self.arch = tags.get('arch', default_arch)
libdir = paths[libkey]
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
archive_paths = []
# First, stuff which is not in site-packages
for key in ('data', 'headers', 'scripts'):
if key not in paths:
continue
path = paths[key]
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
for fn in files:
p = fsdecode(os.path.join(root, fn))
rp = os.path.relpath(p, path)
ap = to_posix(os.path.join(data_dir, key, rp))
archive_paths.append((ap, p))
if key == 'scripts' and not p.endswith('.exe'):
with open(p, 'rb') as f:
data = f.read()
data = self.process_shebang(data)
with open(p, 'wb') as f:
f.write(data)
# Now, stuff which is in site-packages, other than the
# distinfo stuff.
path = libdir
distinfo = None
for root, dirs, files in os.walk(path):
if root == path:
# At the top level only, save distinfo for later
# and skip it for now
for i, dn in enumerate(dirs):
dn = fsdecode(dn)
if dn.endswith('.dist-info'):
distinfo = os.path.join(root, dn)
del dirs[i]
break
assert distinfo, '.dist-info directory expected, not found'
for fn in files:
# comment out next suite to leave .pyc files in
if fsdecode(fn).endswith(('.pyc', '.pyo')):
continue
p = os.path.join(root, fn)
rp = to_posix(os.path.relpath(p, path))
archive_paths.append((rp, p))
# Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
files = os.listdir(distinfo)
for fn in files:
if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
p = fsdecode(os.path.join(distinfo, fn))
ap = to_posix(os.path.join(info_dir, fn))
archive_paths.append((ap, p))
wheel_metadata = [
'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
'Generator: distlib %s' % __version__,
'Root-Is-Purelib: %s' % is_pure,
]
for pyver, abi, arch in self.tags:
wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
p = os.path.join(distinfo, 'WHEEL')
with open(p, 'w') as f:
f.write('\n'.join(wheel_metadata))
ap = to_posix(os.path.join(info_dir, 'WHEEL'))
archive_paths.append((ap, p))
# sort the entries by archive path. Not needed by any spec, but it
# keeps the archive listing and RECORD tidier than they would otherwise
# be. Use the number of path segments to keep directory entries together,
# and keep the dist-info stuff at the end.
def sorter(t):
ap = t[0]
n = ap.count('/')
if '.dist-info' in ap:
n += 10000
return (n, ap)
archive_paths = sorted(archive_paths, key=sorter)
# Now, at last, RECORD.
# Paths in here are archive paths - nothing else makes sense.
self.write_records((distinfo, info_dir), libdir, archive_paths)
# Now, ready to build the zip file
pathname = os.path.join(self.dirname, self.filename)
self.build_zip(pathname, archive_paths)
return pathname
def skip_entry(self, arcname):
"""
Determine whether an archive entry should be skipped when verifying
or installing.
"""
# The signature file won't be in RECORD,
# and we don't currently don't do anything with it
# We also skip directories, as they won't be in RECORD
# either. See:
#
# https://github.com/pypa/wheel/issues/294
# https://github.com/pypa/wheel/issues/287
# https://github.com/pypa/wheel/pull/289
#
return arcname.endswith(('/', '/RECORD.jws'))
def install(self, paths, maker, **kwargs):
"""
Install a wheel to the specified paths. If kwarg ``warner`` is
specified, it should be a callable, which will be called with two
tuples indicating the wheel version of this software and the wheel
version in the file, if there is a discrepancy in the versions.
This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are
not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
bytecode will try to use file-hash based invalidation (PEP-552) on
supported interpreter versions (CPython 2.7+).
The return value is a :class:`InstalledDistribution` instance unless
``options.lib_only`` is True, in which case the return value is ``None``.
"""
dry_run = maker.dry_run
warner = kwargs.get('warner')
lib_only = kwargs.get('lib_only', False)
bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
with zf.open(wheel_metadata_name) as bwf:
wf = wrapper(bwf)
message = message_from_file(wf)
wv = message['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
if (file_version != self.wheel_version) and warner:
warner(self.wheel_version, file_version)
if message['Root-Is-Purelib'] == 'true':
libdir = paths['purelib']
else:
libdir = paths['platlib']
records = {}
with zf.open(record_name) as bf:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
data_pfx = posixpath.join(data_dir, '')
info_pfx = posixpath.join(info_dir, '')
script_pfx = posixpath.join(data_dir, 'scripts', '')
# make a new instance rather than a copy of maker's,
# as we mutate it
fileop = FileOperator(dry_run=dry_run)
fileop.record = True # so we can rollback if needed
bc = not sys.dont_write_bytecode # Double negatives. Lovely!
outfiles = [] # for RECORD writing
# for script copying/shebang processing
workdir = tempfile.mkdtemp()
# set target dir later
# we default add_launchers to False, as the
# Python Launcher should be used instead
maker.source_dir = workdir
maker.target_dir = None
try:
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
if self.skip_entry(u_arcname):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
'%s' % u_arcname)
if row[1]:
kind, value = row[1].split('=', 1)
with zf.open(arcname) as bf:
data = bf.read()
_, digest = self.get_hash(data, kind)
if digest != value:
raise DistlibException('digest mismatch for '
'%s' % arcname)
if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
logger.debug('lib_only: skipping %s', u_arcname)
continue
is_script = (u_arcname.startswith(script_pfx)
and not u_arcname.endswith('.exe'))
if u_arcname.startswith(data_pfx):
_, where, rp = u_arcname.split('/', 2)
outfile = os.path.join(paths[where], convert_path(rp))
else:
# meant for site-packages.
if u_arcname in (wheel_metadata_name, record_name):
continue
outfile = os.path.join(libdir, convert_path(u_arcname))
if not is_script:
with zf.open(arcname) as bf:
fileop.copy_stream(bf, outfile)
outfiles.append(outfile)
# Double check the digest of the written file
if not dry_run and row[1]:
with open(outfile, 'rb') as bf:
data = bf.read()
_, newdigest = self.get_hash(data, kind)
if newdigest != digest:
raise DistlibException('digest mismatch '
'on write for '
'%s' % outfile)
if bc and outfile.endswith('.py'):
try:
pyc = fileop.byte_compile(outfile,
hashed_invalidation=bc_hashed_invalidation)
outfiles.append(pyc)
except Exception:
# Don't give up if byte-compilation fails,
# but log it and perhaps warn the user
logger.warning('Byte-compilation failed',
exc_info=True)
else:
fn = os.path.basename(convert_path(arcname))
workname = os.path.join(workdir, fn)
with zf.open(arcname) as bf:
fileop.copy_stream(bf, workname)
dn, fn = os.path.split(outfile)
maker.target_dir = dn
filenames = maker.make(fn)
fileop.set_executable_mode(filenames)
outfiles.extend(filenames)
if lib_only:
logger.debug('lib_only: returning None')
dist = None
else:
# Generate scripts
# Try to get pydist.json so we can see if there are
# any commands to generate. If this fails (e.g. because
# of a legacy wheel), log a warning but don't give up.
commands = None
file_version = self.info['Wheel-Version']
if file_version == '1.0':
# Use legacy info
ep = posixpath.join(info_dir, 'entry_points.txt')
try:
with zf.open(ep) as bwf:
epdata = read_exports(bwf)
commands = {}
for key in ('console', 'gui'):
k = '%s_scripts' % key
if k in epdata:
commands['wrap_%s' % key] = d = {}
for v in epdata[k].values():
s = '%s:%s' % (v.prefix, v.suffix)
if v.flags:
s += ' [%s]' % ','.join(v.flags)
d[v.name] = s
except Exception:
logger.warning('Unable to read legacy script '
'metadata, so cannot generate '
'scripts')
else:
try:
with zf.open(metadata_name) as bwf:
wf = wrapper(bwf)
commands = json.load(wf).get('extensions')
if commands:
commands = commands.get('python.commands')
except Exception:
logger.warning('Unable to read JSON metadata, so '
'cannot generate scripts')
if commands:
console_scripts = commands.get('wrap_console', {})
gui_scripts = commands.get('wrap_gui', {})
if console_scripts or gui_scripts:
script_dir = paths.get('scripts', '')
if not os.path.isdir(script_dir):
raise ValueError('Valid script path not '
'specified')
maker.target_dir = script_dir
for k, v in console_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script)
fileop.set_executable_mode(filenames)
if gui_scripts:
options = {'gui': True }
for k, v in gui_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script, options)
fileop.set_executable_mode(filenames)
p = os.path.join(libdir, info_dir)
dist = InstalledDistribution(p)
# Write SHARED
paths = dict(paths) # don't change passed in dict
del paths['purelib']
del paths['platlib']
paths['lib'] = libdir
p = dist.write_shared_locations(paths, dry_run)
if p:
outfiles.append(p)
# Write RECORD
dist.write_installed_files(outfiles, paths['prefix'],
dry_run)
return dist
except Exception: # pragma: no cover
logger.exception('installation failed.')
fileop.rollback()
raise
finally:
shutil.rmtree(workdir)
def _get_dylib_cache(self):
global cache
if cache is None:
# Use native string to avoid issues on 2.x: see Python #20140.
base = os.path.join(get_cache_base(), str('dylib-cache'),
'%s.%s' % sys.version_info[:2])
cache = Cache(base)
return cache
def _get_extensions(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
arcname = posixpath.join(info_dir, 'EXTENSIONS')
wrapper = codecs.getreader('utf-8')
result = []
with ZipFile(pathname, 'r') as zf:
try:
with zf.open(arcname) as bf:
wf = wrapper(bf)
extensions = json.load(wf)
cache = self._get_dylib_cache()
prefix = cache.prefix_to_dir(pathname)
cache_base = os.path.join(cache.base, prefix)
if not os.path.isdir(cache_base):
os.makedirs(cache_base)
for name, relpath in extensions.items():
dest = os.path.join(cache_base, convert_path(relpath))
if not os.path.exists(dest):
extract = True
else:
file_time = os.stat(dest).st_mtime
file_time = datetime.datetime.fromtimestamp(file_time)
info = zf.getinfo(relpath)
wheel_time = datetime.datetime(*info.date_time)
extract = wheel_time > file_time
if extract:
zf.extract(relpath, cache_base)
result.append((name, dest))
except KeyError:
pass
return result
def is_compatible(self):
"""
Determine if a wheel is compatible with the running system.
"""
return is_compatible(self)
def is_mountable(self):
"""
Determine if a wheel is asserted as mountable by its metadata.
"""
return True # for now - metadata details TBD
def mount(self, append=False):
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
if not self.is_compatible():
msg = 'Wheel %s not compatible with this Python.' % pathname
raise DistlibException(msg)
if not self.is_mountable():
msg = 'Wheel %s is marked as not mountable.' % pathname
raise DistlibException(msg)
if pathname in sys.path:
logger.debug('%s already in path', pathname)
else:
if append:
sys.path.append(pathname)
else:
sys.path.insert(0, pathname)
extensions = self._get_extensions()
if extensions:
if _hook not in sys.meta_path:
sys.meta_path.append(_hook)
_hook.add(pathname, extensions)
def unmount(self):
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
if pathname not in sys.path:
logger.debug('%s not in path', pathname)
else:
sys.path.remove(pathname)
if pathname in _hook.impure_wheels:
_hook.remove(pathname)
if not _hook.impure_wheels:
if _hook in sys.meta_path:
sys.meta_path.remove(_hook)
def verify(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
with zf.open(wheel_metadata_name) as bwf:
wf = wrapper(bwf)
message = message_from_file(wf)
wv = message['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
# TODO version verification
records = {}
with zf.open(record_name) as bf:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
# See issue #115: some wheels have .. in their entries, but
# in the filename ... e.g. __main__..py ! So the check is
# updated to look for .. in the directory portions
p = u_arcname.split('/')
if '..' in p:
raise DistlibException('invalid entry in '
'wheel: %r' % u_arcname)
if self.skip_entry(u_arcname):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
'%s' % u_arcname)
if row[1]:
kind, value = row[1].split('=', 1)
with zf.open(arcname) as bf:
data = bf.read()
_, digest = self.get_hash(data, kind)
if digest != value:
raise DistlibException('digest mismatch for '
'%s' % arcname)
def update(self, modifier, dest_dir=None, **kwargs):
"""
Update the contents of a wheel in a generic way. The modifier should
be a callable which expects a dictionary argument: its keys are
archive-entry paths, and its values are absolute filesystem paths
where the contents the corresponding archive entries can be found. The
modifier is free to change the contents of the files pointed to, add
new entries and remove entries, before returning. This method will
extract the entire contents of the wheel to a temporary location, call
the modifier, and then use the passed (and possibly updated)
dictionary to write a new wheel. If ``dest_dir`` is specified, the new
wheel is written there -- otherwise, the original wheel is overwritten.
The modifier should return True if it updated the wheel, else False.
This method returns the same value the modifier returns.
"""
def get_version(path_map, info_dir):
version = path = None
key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME)
if key not in path_map:
key = '%s/PKG-INFO' % info_dir
if key in path_map:
path = path_map[key]
version = Metadata(path=path).version
return version, path
def update_version(version, path):
updated = None
try:
v = NormalizedVersion(version)
i = version.find('-')
if i < 0:
updated = '%s+1' % version
else:
parts = [int(s) for s in version[i + 1:].split('.')]
parts[-1] += 1
updated = '%s+%s' % (version[:i],
'.'.join(str(i) for i in parts))
except UnsupportedVersionError:
logger.debug('Cannot update non-compliant (PEP-440) '
'version %r', version)
if updated:
md = Metadata(path=path)
md.version = updated
legacy = path.endswith(LEGACY_METADATA_FILENAME)
md.write(path=path, legacy=legacy)
logger.debug('Version updated from %r to %r', version,
updated)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
record_name = posixpath.join(info_dir, 'RECORD')
with tempdir() as workdir:
with ZipFile(pathname, 'r') as zf:
path_map = {}
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
if u_arcname == record_name:
continue
if '..' in u_arcname:
raise DistlibException('invalid entry in '
'wheel: %r' % u_arcname)
zf.extract(zinfo, workdir)
path = os.path.join(workdir, convert_path(u_arcname))
path_map[u_arcname] = path
# Remember the version.
original_version, _ = get_version(path_map, info_dir)
# Files extracted. Call the modifier.
modified = modifier(path_map, **kwargs)
if modified:
# Something changed - need to build a new wheel.
current_version, path = get_version(path_map, info_dir)
if current_version and (current_version == original_version):
# Add or update local version to signify changes.
update_version(current_version, path)
# Decide where the new wheel goes.
if dest_dir is None:
fd, newpath = tempfile.mkstemp(suffix='.whl',
prefix='wheel-update-',
dir=workdir)
os.close(fd)
else:
if not os.path.isdir(dest_dir):
raise DistlibException('Not a directory: %r' % dest_dir)
newpath = os.path.join(dest_dir, self.filename)
archive_paths = list(path_map.items())
distinfo = os.path.join(workdir, info_dir)
info = distinfo, info_dir
self.write_records(info, workdir, archive_paths)
self.build_zip(newpath, archive_paths)
if dest_dir is None:
shutil.copyfile(newpath, pathname)
return modified
def compatible_tags():
"""
Return (pyver, abi, arch) tuples compatible with this Python.
"""
versions = [VER_SUFFIX]
major = VER_SUFFIX[0]
for minor in range(sys.version_info[1] - 1, - 1, -1):
versions.append(''.join([major, str(minor)]))
abis = []
for suffix, _, _ in imp.get_suffixes():
if suffix.startswith('.abi'):
abis.append(suffix.split('.', 2)[1])
abis.sort()
if ABI != 'none':
abis.insert(0, ABI)
abis.append('none')
result = []
arches = [ARCH]
if sys.platform == 'darwin':
m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
if m:
name, major, minor, arch = m.groups()
minor = int(minor)
matches = [arch]
if arch in ('i386', 'ppc'):
matches.append('fat')
if arch in ('i386', 'ppc', 'x86_64'):
matches.append('fat3')
if arch in ('ppc64', 'x86_64'):
matches.append('fat64')
if arch in ('i386', 'x86_64'):
matches.append('intel')
if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
matches.append('universal')
while minor >= 0:
for match in matches:
s = '%s_%s_%s_%s' % (name, major, minor, match)
if s != ARCH: # already there
arches.append(s)
minor -= 1
# Most specific - our Python version, ABI and arch
for abi in abis:
for arch in arches:
result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
# where no ABI / arch dependency, but IMP_PREFIX dependency
for i, version in enumerate(versions):
result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
if i == 0:
result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
# no IMP_PREFIX, ABI or arch dependency
for i, version in enumerate(versions):
result.append((''.join(('py', version)), 'none', 'any'))
if i == 0:
result.append((''.join(('py', version[0])), 'none', 'any'))
return set(result)
COMPATIBLE_TAGS = compatible_tags()
del compatible_tags
def is_compatible(wheel, tags=None):
if not isinstance(wheel, Wheel):
wheel = Wheel(wheel) # assume it's a filename
result = False
if tags is None:
tags = COMPATIBLE_TAGS
for ver, abi, arch in tags:
if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
result = True
break
return result
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/distlib/wheel.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/distlib/wheel.py",
"repo_id": "Django-locallibrary",
"token_count": 22357
} | 25 |
from __future__ import absolute_import, division, unicode_literals
class Filter(object):
def __init__(self, source):
self.source = source
def __iter__(self):
return iter(self.source)
def __getattr__(self, name):
return getattr(self.source, name)
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/filters/base.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/filters/base.py",
"repo_id": "Django-locallibrary",
"token_count": 108
} | 26 |
from __future__ import absolute_import, division, unicode_literals
from xml.dom import Node
from . import base
class TreeWalker(base.NonRecursiveTreeWalker):
def getNodeDetails(self, node):
if node.nodeType == Node.DOCUMENT_TYPE_NODE:
return base.DOCTYPE, node.name, node.publicId, node.systemId
elif node.nodeType in (Node.TEXT_NODE, Node.CDATA_SECTION_NODE):
return base.TEXT, node.nodeValue
elif node.nodeType == Node.ELEMENT_NODE:
attrs = {}
for attr in list(node.attributes.keys()):
attr = node.getAttributeNode(attr)
if attr.namespaceURI:
attrs[(attr.namespaceURI, attr.localName)] = attr.value
else:
attrs[(None, attr.name)] = attr.value
return (base.ELEMENT, node.namespaceURI, node.nodeName,
attrs, node.hasChildNodes())
elif node.nodeType == Node.COMMENT_NODE:
return base.COMMENT, node.nodeValue
elif node.nodeType in (Node.DOCUMENT_NODE, Node.DOCUMENT_FRAGMENT_NODE):
return (base.DOCUMENT,)
else:
return base.UNKNOWN, node.nodeType
def getFirstChild(self, node):
return node.firstChild
def getNextSibling(self, node):
return node.nextSibling
def getParentNode(self, node):
return node.parentNode
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/dom.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/dom.py",
"repo_id": "Django-locallibrary",
"token_count": 659
} | 27 |
# This file is automatically generated by tools/idna-data
__version__ = "13.0.0"
scripts = {
'Greek': (
0x37000000374,
0x37500000378,
0x37a0000037e,
0x37f00000380,
0x38400000385,
0x38600000387,
0x3880000038b,
0x38c0000038d,
0x38e000003a2,
0x3a3000003e2,
0x3f000000400,
0x1d2600001d2b,
0x1d5d00001d62,
0x1d6600001d6b,
0x1dbf00001dc0,
0x1f0000001f16,
0x1f1800001f1e,
0x1f2000001f46,
0x1f4800001f4e,
0x1f5000001f58,
0x1f5900001f5a,
0x1f5b00001f5c,
0x1f5d00001f5e,
0x1f5f00001f7e,
0x1f8000001fb5,
0x1fb600001fc5,
0x1fc600001fd4,
0x1fd600001fdc,
0x1fdd00001ff0,
0x1ff200001ff5,
0x1ff600001fff,
0x212600002127,
0xab650000ab66,
0x101400001018f,
0x101a0000101a1,
0x1d2000001d246,
),
'Han': (
0x2e8000002e9a,
0x2e9b00002ef4,
0x2f0000002fd6,
0x300500003006,
0x300700003008,
0x30210000302a,
0x30380000303c,
0x340000004dc0,
0x4e0000009ffd,
0xf9000000fa6e,
0xfa700000fada,
0x16ff000016ff2,
0x200000002a6de,
0x2a7000002b735,
0x2b7400002b81e,
0x2b8200002cea2,
0x2ceb00002ebe1,
0x2f8000002fa1e,
0x300000003134b,
),
'Hebrew': (
0x591000005c8,
0x5d0000005eb,
0x5ef000005f5,
0xfb1d0000fb37,
0xfb380000fb3d,
0xfb3e0000fb3f,
0xfb400000fb42,
0xfb430000fb45,
0xfb460000fb50,
),
'Hiragana': (
0x304100003097,
0x309d000030a0,
0x1b0010001b11f,
0x1b1500001b153,
0x1f2000001f201,
),
'Katakana': (
0x30a1000030fb,
0x30fd00003100,
0x31f000003200,
0x32d0000032ff,
0x330000003358,
0xff660000ff70,
0xff710000ff9e,
0x1b0000001b001,
0x1b1640001b168,
),
}
joining_types = {
0x600: 85,
0x601: 85,
0x602: 85,
0x603: 85,
0x604: 85,
0x605: 85,
0x608: 85,
0x60b: 85,
0x620: 68,
0x621: 85,
0x622: 82,
0x623: 82,
0x624: 82,
0x625: 82,
0x626: 68,
0x627: 82,
0x628: 68,
0x629: 82,
0x62a: 68,
0x62b: 68,
0x62c: 68,
0x62d: 68,
0x62e: 68,
0x62f: 82,
0x630: 82,
0x631: 82,
0x632: 82,
0x633: 68,
0x634: 68,
0x635: 68,
0x636: 68,
0x637: 68,
0x638: 68,
0x639: 68,
0x63a: 68,
0x63b: 68,
0x63c: 68,
0x63d: 68,
0x63e: 68,
0x63f: 68,
0x640: 67,
0x641: 68,
0x642: 68,
0x643: 68,
0x644: 68,
0x645: 68,
0x646: 68,
0x647: 68,
0x648: 82,
0x649: 68,
0x64a: 68,
0x66e: 68,
0x66f: 68,
0x671: 82,
0x672: 82,
0x673: 82,
0x674: 85,
0x675: 82,
0x676: 82,
0x677: 82,
0x678: 68,
0x679: 68,
0x67a: 68,
0x67b: 68,
0x67c: 68,
0x67d: 68,
0x67e: 68,
0x67f: 68,
0x680: 68,
0x681: 68,
0x682: 68,
0x683: 68,
0x684: 68,
0x685: 68,
0x686: 68,
0x687: 68,
0x688: 82,
0x689: 82,
0x68a: 82,
0x68b: 82,
0x68c: 82,
0x68d: 82,
0x68e: 82,
0x68f: 82,
0x690: 82,
0x691: 82,
0x692: 82,
0x693: 82,
0x694: 82,
0x695: 82,
0x696: 82,
0x697: 82,
0x698: 82,
0x699: 82,
0x69a: 68,
0x69b: 68,
0x69c: 68,
0x69d: 68,
0x69e: 68,
0x69f: 68,
0x6a0: 68,
0x6a1: 68,
0x6a2: 68,
0x6a3: 68,
0x6a4: 68,
0x6a5: 68,
0x6a6: 68,
0x6a7: 68,
0x6a8: 68,
0x6a9: 68,
0x6aa: 68,
0x6ab: 68,
0x6ac: 68,
0x6ad: 68,
0x6ae: 68,
0x6af: 68,
0x6b0: 68,
0x6b1: 68,
0x6b2: 68,
0x6b3: 68,
0x6b4: 68,
0x6b5: 68,
0x6b6: 68,
0x6b7: 68,
0x6b8: 68,
0x6b9: 68,
0x6ba: 68,
0x6bb: 68,
0x6bc: 68,
0x6bd: 68,
0x6be: 68,
0x6bf: 68,
0x6c0: 82,
0x6c1: 68,
0x6c2: 68,
0x6c3: 82,
0x6c4: 82,
0x6c5: 82,
0x6c6: 82,
0x6c7: 82,
0x6c8: 82,
0x6c9: 82,
0x6ca: 82,
0x6cb: 82,
0x6cc: 68,
0x6cd: 82,
0x6ce: 68,
0x6cf: 82,
0x6d0: 68,
0x6d1: 68,
0x6d2: 82,
0x6d3: 82,
0x6d5: 82,
0x6dd: 85,
0x6ee: 82,
0x6ef: 82,
0x6fa: 68,
0x6fb: 68,
0x6fc: 68,
0x6ff: 68,
0x70f: 84,
0x710: 82,
0x712: 68,
0x713: 68,
0x714: 68,
0x715: 82,
0x716: 82,
0x717: 82,
0x718: 82,
0x719: 82,
0x71a: 68,
0x71b: 68,
0x71c: 68,
0x71d: 68,
0x71e: 82,
0x71f: 68,
0x720: 68,
0x721: 68,
0x722: 68,
0x723: 68,
0x724: 68,
0x725: 68,
0x726: 68,
0x727: 68,
0x728: 82,
0x729: 68,
0x72a: 82,
0x72b: 68,
0x72c: 82,
0x72d: 68,
0x72e: 68,
0x72f: 82,
0x74d: 82,
0x74e: 68,
0x74f: 68,
0x750: 68,
0x751: 68,
0x752: 68,
0x753: 68,
0x754: 68,
0x755: 68,
0x756: 68,
0x757: 68,
0x758: 68,
0x759: 82,
0x75a: 82,
0x75b: 82,
0x75c: 68,
0x75d: 68,
0x75e: 68,
0x75f: 68,
0x760: 68,
0x761: 68,
0x762: 68,
0x763: 68,
0x764: 68,
0x765: 68,
0x766: 68,
0x767: 68,
0x768: 68,
0x769: 68,
0x76a: 68,
0x76b: 82,
0x76c: 82,
0x76d: 68,
0x76e: 68,
0x76f: 68,
0x770: 68,
0x771: 82,
0x772: 68,
0x773: 82,
0x774: 82,
0x775: 68,
0x776: 68,
0x777: 68,
0x778: 82,
0x779: 82,
0x77a: 68,
0x77b: 68,
0x77c: 68,
0x77d: 68,
0x77e: 68,
0x77f: 68,
0x7ca: 68,
0x7cb: 68,
0x7cc: 68,
0x7cd: 68,
0x7ce: 68,
0x7cf: 68,
0x7d0: 68,
0x7d1: 68,
0x7d2: 68,
0x7d3: 68,
0x7d4: 68,
0x7d5: 68,
0x7d6: 68,
0x7d7: 68,
0x7d8: 68,
0x7d9: 68,
0x7da: 68,
0x7db: 68,
0x7dc: 68,
0x7dd: 68,
0x7de: 68,
0x7df: 68,
0x7e0: 68,
0x7e1: 68,
0x7e2: 68,
0x7e3: 68,
0x7e4: 68,
0x7e5: 68,
0x7e6: 68,
0x7e7: 68,
0x7e8: 68,
0x7e9: 68,
0x7ea: 68,
0x7fa: 67,
0x840: 82,
0x841: 68,
0x842: 68,
0x843: 68,
0x844: 68,
0x845: 68,
0x846: 82,
0x847: 82,
0x848: 68,
0x849: 82,
0x84a: 68,
0x84b: 68,
0x84c: 68,
0x84d: 68,
0x84e: 68,
0x84f: 68,
0x850: 68,
0x851: 68,
0x852: 68,
0x853: 68,
0x854: 82,
0x855: 68,
0x856: 82,
0x857: 82,
0x858: 82,
0x860: 68,
0x861: 85,
0x862: 68,
0x863: 68,
0x864: 68,
0x865: 68,
0x866: 85,
0x867: 82,
0x868: 68,
0x869: 82,
0x86a: 82,
0x8a0: 68,
0x8a1: 68,
0x8a2: 68,
0x8a3: 68,
0x8a4: 68,
0x8a5: 68,
0x8a6: 68,
0x8a7: 68,
0x8a8: 68,
0x8a9: 68,
0x8aa: 82,
0x8ab: 82,
0x8ac: 82,
0x8ad: 85,
0x8ae: 82,
0x8af: 68,
0x8b0: 68,
0x8b1: 82,
0x8b2: 82,
0x8b3: 68,
0x8b4: 68,
0x8b6: 68,
0x8b7: 68,
0x8b8: 68,
0x8b9: 82,
0x8ba: 68,
0x8bb: 68,
0x8bc: 68,
0x8bd: 68,
0x8be: 68,
0x8bf: 68,
0x8c0: 68,
0x8c1: 68,
0x8c2: 68,
0x8c3: 68,
0x8c4: 68,
0x8c5: 68,
0x8c6: 68,
0x8c7: 68,
0x8e2: 85,
0x1806: 85,
0x1807: 68,
0x180a: 67,
0x180e: 85,
0x1820: 68,
0x1821: 68,
0x1822: 68,
0x1823: 68,
0x1824: 68,
0x1825: 68,
0x1826: 68,
0x1827: 68,
0x1828: 68,
0x1829: 68,
0x182a: 68,
0x182b: 68,
0x182c: 68,
0x182d: 68,
0x182e: 68,
0x182f: 68,
0x1830: 68,
0x1831: 68,
0x1832: 68,
0x1833: 68,
0x1834: 68,
0x1835: 68,
0x1836: 68,
0x1837: 68,
0x1838: 68,
0x1839: 68,
0x183a: 68,
0x183b: 68,
0x183c: 68,
0x183d: 68,
0x183e: 68,
0x183f: 68,
0x1840: 68,
0x1841: 68,
0x1842: 68,
0x1843: 68,
0x1844: 68,
0x1845: 68,
0x1846: 68,
0x1847: 68,
0x1848: 68,
0x1849: 68,
0x184a: 68,
0x184b: 68,
0x184c: 68,
0x184d: 68,
0x184e: 68,
0x184f: 68,
0x1850: 68,
0x1851: 68,
0x1852: 68,
0x1853: 68,
0x1854: 68,
0x1855: 68,
0x1856: 68,
0x1857: 68,
0x1858: 68,
0x1859: 68,
0x185a: 68,
0x185b: 68,
0x185c: 68,
0x185d: 68,
0x185e: 68,
0x185f: 68,
0x1860: 68,
0x1861: 68,
0x1862: 68,
0x1863: 68,
0x1864: 68,
0x1865: 68,
0x1866: 68,
0x1867: 68,
0x1868: 68,
0x1869: 68,
0x186a: 68,
0x186b: 68,
0x186c: 68,
0x186d: 68,
0x186e: 68,
0x186f: 68,
0x1870: 68,
0x1871: 68,
0x1872: 68,
0x1873: 68,
0x1874: 68,
0x1875: 68,
0x1876: 68,
0x1877: 68,
0x1878: 68,
0x1880: 85,
0x1881: 85,
0x1882: 85,
0x1883: 85,
0x1884: 85,
0x1885: 84,
0x1886: 84,
0x1887: 68,
0x1888: 68,
0x1889: 68,
0x188a: 68,
0x188b: 68,
0x188c: 68,
0x188d: 68,
0x188e: 68,
0x188f: 68,
0x1890: 68,
0x1891: 68,
0x1892: 68,
0x1893: 68,
0x1894: 68,
0x1895: 68,
0x1896: 68,
0x1897: 68,
0x1898: 68,
0x1899: 68,
0x189a: 68,
0x189b: 68,
0x189c: 68,
0x189d: 68,
0x189e: 68,
0x189f: 68,
0x18a0: 68,
0x18a1: 68,
0x18a2: 68,
0x18a3: 68,
0x18a4: 68,
0x18a5: 68,
0x18a6: 68,
0x18a7: 68,
0x18a8: 68,
0x18aa: 68,
0x200c: 85,
0x200d: 67,
0x202f: 85,
0x2066: 85,
0x2067: 85,
0x2068: 85,
0x2069: 85,
0xa840: 68,
0xa841: 68,
0xa842: 68,
0xa843: 68,
0xa844: 68,
0xa845: 68,
0xa846: 68,
0xa847: 68,
0xa848: 68,
0xa849: 68,
0xa84a: 68,
0xa84b: 68,
0xa84c: 68,
0xa84d: 68,
0xa84e: 68,
0xa84f: 68,
0xa850: 68,
0xa851: 68,
0xa852: 68,
0xa853: 68,
0xa854: 68,
0xa855: 68,
0xa856: 68,
0xa857: 68,
0xa858: 68,
0xa859: 68,
0xa85a: 68,
0xa85b: 68,
0xa85c: 68,
0xa85d: 68,
0xa85e: 68,
0xa85f: 68,
0xa860: 68,
0xa861: 68,
0xa862: 68,
0xa863: 68,
0xa864: 68,
0xa865: 68,
0xa866: 68,
0xa867: 68,
0xa868: 68,
0xa869: 68,
0xa86a: 68,
0xa86b: 68,
0xa86c: 68,
0xa86d: 68,
0xa86e: 68,
0xa86f: 68,
0xa870: 68,
0xa871: 68,
0xa872: 76,
0xa873: 85,
0x10ac0: 68,
0x10ac1: 68,
0x10ac2: 68,
0x10ac3: 68,
0x10ac4: 68,
0x10ac5: 82,
0x10ac6: 85,
0x10ac7: 82,
0x10ac8: 85,
0x10ac9: 82,
0x10aca: 82,
0x10acb: 85,
0x10acc: 85,
0x10acd: 76,
0x10ace: 82,
0x10acf: 82,
0x10ad0: 82,
0x10ad1: 82,
0x10ad2: 82,
0x10ad3: 68,
0x10ad4: 68,
0x10ad5: 68,
0x10ad6: 68,
0x10ad7: 76,
0x10ad8: 68,
0x10ad9: 68,
0x10ada: 68,
0x10adb: 68,
0x10adc: 68,
0x10add: 82,
0x10ade: 68,
0x10adf: 68,
0x10ae0: 68,
0x10ae1: 82,
0x10ae2: 85,
0x10ae3: 85,
0x10ae4: 82,
0x10aeb: 68,
0x10aec: 68,
0x10aed: 68,
0x10aee: 68,
0x10aef: 82,
0x10b80: 68,
0x10b81: 82,
0x10b82: 68,
0x10b83: 82,
0x10b84: 82,
0x10b85: 82,
0x10b86: 68,
0x10b87: 68,
0x10b88: 68,
0x10b89: 82,
0x10b8a: 68,
0x10b8b: 68,
0x10b8c: 82,
0x10b8d: 68,
0x10b8e: 82,
0x10b8f: 82,
0x10b90: 68,
0x10b91: 82,
0x10ba9: 82,
0x10baa: 82,
0x10bab: 82,
0x10bac: 82,
0x10bad: 68,
0x10bae: 68,
0x10baf: 85,
0x10d00: 76,
0x10d01: 68,
0x10d02: 68,
0x10d03: 68,
0x10d04: 68,
0x10d05: 68,
0x10d06: 68,
0x10d07: 68,
0x10d08: 68,
0x10d09: 68,
0x10d0a: 68,
0x10d0b: 68,
0x10d0c: 68,
0x10d0d: 68,
0x10d0e: 68,
0x10d0f: 68,
0x10d10: 68,
0x10d11: 68,
0x10d12: 68,
0x10d13: 68,
0x10d14: 68,
0x10d15: 68,
0x10d16: 68,
0x10d17: 68,
0x10d18: 68,
0x10d19: 68,
0x10d1a: 68,
0x10d1b: 68,
0x10d1c: 68,
0x10d1d: 68,
0x10d1e: 68,
0x10d1f: 68,
0x10d20: 68,
0x10d21: 68,
0x10d22: 82,
0x10d23: 68,
0x10f30: 68,
0x10f31: 68,
0x10f32: 68,
0x10f33: 82,
0x10f34: 68,
0x10f35: 68,
0x10f36: 68,
0x10f37: 68,
0x10f38: 68,
0x10f39: 68,
0x10f3a: 68,
0x10f3b: 68,
0x10f3c: 68,
0x10f3d: 68,
0x10f3e: 68,
0x10f3f: 68,
0x10f40: 68,
0x10f41: 68,
0x10f42: 68,
0x10f43: 68,
0x10f44: 68,
0x10f45: 85,
0x10f51: 68,
0x10f52: 68,
0x10f53: 68,
0x10f54: 82,
0x10fb0: 68,
0x10fb1: 85,
0x10fb2: 68,
0x10fb3: 68,
0x10fb4: 82,
0x10fb5: 82,
0x10fb6: 82,
0x10fb7: 85,
0x10fb8: 68,
0x10fb9: 82,
0x10fba: 82,
0x10fbb: 68,
0x10fbc: 68,
0x10fbd: 82,
0x10fbe: 68,
0x10fbf: 68,
0x10fc0: 85,
0x10fc1: 68,
0x10fc2: 82,
0x10fc3: 82,
0x10fc4: 68,
0x10fc5: 85,
0x10fc6: 85,
0x10fc7: 85,
0x10fc8: 85,
0x10fc9: 82,
0x10fca: 68,
0x10fcb: 76,
0x110bd: 85,
0x110cd: 85,
0x1e900: 68,
0x1e901: 68,
0x1e902: 68,
0x1e903: 68,
0x1e904: 68,
0x1e905: 68,
0x1e906: 68,
0x1e907: 68,
0x1e908: 68,
0x1e909: 68,
0x1e90a: 68,
0x1e90b: 68,
0x1e90c: 68,
0x1e90d: 68,
0x1e90e: 68,
0x1e90f: 68,
0x1e910: 68,
0x1e911: 68,
0x1e912: 68,
0x1e913: 68,
0x1e914: 68,
0x1e915: 68,
0x1e916: 68,
0x1e917: 68,
0x1e918: 68,
0x1e919: 68,
0x1e91a: 68,
0x1e91b: 68,
0x1e91c: 68,
0x1e91d: 68,
0x1e91e: 68,
0x1e91f: 68,
0x1e920: 68,
0x1e921: 68,
0x1e922: 68,
0x1e923: 68,
0x1e924: 68,
0x1e925: 68,
0x1e926: 68,
0x1e927: 68,
0x1e928: 68,
0x1e929: 68,
0x1e92a: 68,
0x1e92b: 68,
0x1e92c: 68,
0x1e92d: 68,
0x1e92e: 68,
0x1e92f: 68,
0x1e930: 68,
0x1e931: 68,
0x1e932: 68,
0x1e933: 68,
0x1e934: 68,
0x1e935: 68,
0x1e936: 68,
0x1e937: 68,
0x1e938: 68,
0x1e939: 68,
0x1e93a: 68,
0x1e93b: 68,
0x1e93c: 68,
0x1e93d: 68,
0x1e93e: 68,
0x1e93f: 68,
0x1e940: 68,
0x1e941: 68,
0x1e942: 68,
0x1e943: 68,
0x1e94b: 84,
}
codepoint_classes = {
'PVALID': (
0x2d0000002e,
0x300000003a,
0x610000007b,
0xdf000000f7,
0xf800000100,
0x10100000102,
0x10300000104,
0x10500000106,
0x10700000108,
0x1090000010a,
0x10b0000010c,
0x10d0000010e,
0x10f00000110,
0x11100000112,
0x11300000114,
0x11500000116,
0x11700000118,
0x1190000011a,
0x11b0000011c,
0x11d0000011e,
0x11f00000120,
0x12100000122,
0x12300000124,
0x12500000126,
0x12700000128,
0x1290000012a,
0x12b0000012c,
0x12d0000012e,
0x12f00000130,
0x13100000132,
0x13500000136,
0x13700000139,
0x13a0000013b,
0x13c0000013d,
0x13e0000013f,
0x14200000143,
0x14400000145,
0x14600000147,
0x14800000149,
0x14b0000014c,
0x14d0000014e,
0x14f00000150,
0x15100000152,
0x15300000154,
0x15500000156,
0x15700000158,
0x1590000015a,
0x15b0000015c,
0x15d0000015e,
0x15f00000160,
0x16100000162,
0x16300000164,
0x16500000166,
0x16700000168,
0x1690000016a,
0x16b0000016c,
0x16d0000016e,
0x16f00000170,
0x17100000172,
0x17300000174,
0x17500000176,
0x17700000178,
0x17a0000017b,
0x17c0000017d,
0x17e0000017f,
0x18000000181,
0x18300000184,
0x18500000186,
0x18800000189,
0x18c0000018e,
0x19200000193,
0x19500000196,
0x1990000019c,
0x19e0000019f,
0x1a1000001a2,
0x1a3000001a4,
0x1a5000001a6,
0x1a8000001a9,
0x1aa000001ac,
0x1ad000001ae,
0x1b0000001b1,
0x1b4000001b5,
0x1b6000001b7,
0x1b9000001bc,
0x1bd000001c4,
0x1ce000001cf,
0x1d0000001d1,
0x1d2000001d3,
0x1d4000001d5,
0x1d6000001d7,
0x1d8000001d9,
0x1da000001db,
0x1dc000001de,
0x1df000001e0,
0x1e1000001e2,
0x1e3000001e4,
0x1e5000001e6,
0x1e7000001e8,
0x1e9000001ea,
0x1eb000001ec,
0x1ed000001ee,
0x1ef000001f1,
0x1f5000001f6,
0x1f9000001fa,
0x1fb000001fc,
0x1fd000001fe,
0x1ff00000200,
0x20100000202,
0x20300000204,
0x20500000206,
0x20700000208,
0x2090000020a,
0x20b0000020c,
0x20d0000020e,
0x20f00000210,
0x21100000212,
0x21300000214,
0x21500000216,
0x21700000218,
0x2190000021a,
0x21b0000021c,
0x21d0000021e,
0x21f00000220,
0x22100000222,
0x22300000224,
0x22500000226,
0x22700000228,
0x2290000022a,
0x22b0000022c,
0x22d0000022e,
0x22f00000230,
0x23100000232,
0x2330000023a,
0x23c0000023d,
0x23f00000241,
0x24200000243,
0x24700000248,
0x2490000024a,
0x24b0000024c,
0x24d0000024e,
0x24f000002b0,
0x2b9000002c2,
0x2c6000002d2,
0x2ec000002ed,
0x2ee000002ef,
0x30000000340,
0x34200000343,
0x3460000034f,
0x35000000370,
0x37100000372,
0x37300000374,
0x37700000378,
0x37b0000037e,
0x39000000391,
0x3ac000003cf,
0x3d7000003d8,
0x3d9000003da,
0x3db000003dc,
0x3dd000003de,
0x3df000003e0,
0x3e1000003e2,
0x3e3000003e4,
0x3e5000003e6,
0x3e7000003e8,
0x3e9000003ea,
0x3eb000003ec,
0x3ed000003ee,
0x3ef000003f0,
0x3f3000003f4,
0x3f8000003f9,
0x3fb000003fd,
0x43000000460,
0x46100000462,
0x46300000464,
0x46500000466,
0x46700000468,
0x4690000046a,
0x46b0000046c,
0x46d0000046e,
0x46f00000470,
0x47100000472,
0x47300000474,
0x47500000476,
0x47700000478,
0x4790000047a,
0x47b0000047c,
0x47d0000047e,
0x47f00000480,
0x48100000482,
0x48300000488,
0x48b0000048c,
0x48d0000048e,
0x48f00000490,
0x49100000492,
0x49300000494,
0x49500000496,
0x49700000498,
0x4990000049a,
0x49b0000049c,
0x49d0000049e,
0x49f000004a0,
0x4a1000004a2,
0x4a3000004a4,
0x4a5000004a6,
0x4a7000004a8,
0x4a9000004aa,
0x4ab000004ac,
0x4ad000004ae,
0x4af000004b0,
0x4b1000004b2,
0x4b3000004b4,
0x4b5000004b6,
0x4b7000004b8,
0x4b9000004ba,
0x4bb000004bc,
0x4bd000004be,
0x4bf000004c0,
0x4c2000004c3,
0x4c4000004c5,
0x4c6000004c7,
0x4c8000004c9,
0x4ca000004cb,
0x4cc000004cd,
0x4ce000004d0,
0x4d1000004d2,
0x4d3000004d4,
0x4d5000004d6,
0x4d7000004d8,
0x4d9000004da,
0x4db000004dc,
0x4dd000004de,
0x4df000004e0,
0x4e1000004e2,
0x4e3000004e4,
0x4e5000004e6,
0x4e7000004e8,
0x4e9000004ea,
0x4eb000004ec,
0x4ed000004ee,
0x4ef000004f0,
0x4f1000004f2,
0x4f3000004f4,
0x4f5000004f6,
0x4f7000004f8,
0x4f9000004fa,
0x4fb000004fc,
0x4fd000004fe,
0x4ff00000500,
0x50100000502,
0x50300000504,
0x50500000506,
0x50700000508,
0x5090000050a,
0x50b0000050c,
0x50d0000050e,
0x50f00000510,
0x51100000512,
0x51300000514,
0x51500000516,
0x51700000518,
0x5190000051a,
0x51b0000051c,
0x51d0000051e,
0x51f00000520,
0x52100000522,
0x52300000524,
0x52500000526,
0x52700000528,
0x5290000052a,
0x52b0000052c,
0x52d0000052e,
0x52f00000530,
0x5590000055a,
0x56000000587,
0x58800000589,
0x591000005be,
0x5bf000005c0,
0x5c1000005c3,
0x5c4000005c6,
0x5c7000005c8,
0x5d0000005eb,
0x5ef000005f3,
0x6100000061b,
0x62000000640,
0x64100000660,
0x66e00000675,
0x679000006d4,
0x6d5000006dd,
0x6df000006e9,
0x6ea000006f0,
0x6fa00000700,
0x7100000074b,
0x74d000007b2,
0x7c0000007f6,
0x7fd000007fe,
0x8000000082e,
0x8400000085c,
0x8600000086b,
0x8a0000008b5,
0x8b6000008c8,
0x8d3000008e2,
0x8e300000958,
0x96000000964,
0x96600000970,
0x97100000984,
0x9850000098d,
0x98f00000991,
0x993000009a9,
0x9aa000009b1,
0x9b2000009b3,
0x9b6000009ba,
0x9bc000009c5,
0x9c7000009c9,
0x9cb000009cf,
0x9d7000009d8,
0x9e0000009e4,
0x9e6000009f2,
0x9fc000009fd,
0x9fe000009ff,
0xa0100000a04,
0xa0500000a0b,
0xa0f00000a11,
0xa1300000a29,
0xa2a00000a31,
0xa3200000a33,
0xa3500000a36,
0xa3800000a3a,
0xa3c00000a3d,
0xa3e00000a43,
0xa4700000a49,
0xa4b00000a4e,
0xa5100000a52,
0xa5c00000a5d,
0xa6600000a76,
0xa8100000a84,
0xa8500000a8e,
0xa8f00000a92,
0xa9300000aa9,
0xaaa00000ab1,
0xab200000ab4,
0xab500000aba,
0xabc00000ac6,
0xac700000aca,
0xacb00000ace,
0xad000000ad1,
0xae000000ae4,
0xae600000af0,
0xaf900000b00,
0xb0100000b04,
0xb0500000b0d,
0xb0f00000b11,
0xb1300000b29,
0xb2a00000b31,
0xb3200000b34,
0xb3500000b3a,
0xb3c00000b45,
0xb4700000b49,
0xb4b00000b4e,
0xb5500000b58,
0xb5f00000b64,
0xb6600000b70,
0xb7100000b72,
0xb8200000b84,
0xb8500000b8b,
0xb8e00000b91,
0xb9200000b96,
0xb9900000b9b,
0xb9c00000b9d,
0xb9e00000ba0,
0xba300000ba5,
0xba800000bab,
0xbae00000bba,
0xbbe00000bc3,
0xbc600000bc9,
0xbca00000bce,
0xbd000000bd1,
0xbd700000bd8,
0xbe600000bf0,
0xc0000000c0d,
0xc0e00000c11,
0xc1200000c29,
0xc2a00000c3a,
0xc3d00000c45,
0xc4600000c49,
0xc4a00000c4e,
0xc5500000c57,
0xc5800000c5b,
0xc6000000c64,
0xc6600000c70,
0xc8000000c84,
0xc8500000c8d,
0xc8e00000c91,
0xc9200000ca9,
0xcaa00000cb4,
0xcb500000cba,
0xcbc00000cc5,
0xcc600000cc9,
0xcca00000cce,
0xcd500000cd7,
0xcde00000cdf,
0xce000000ce4,
0xce600000cf0,
0xcf100000cf3,
0xd0000000d0d,
0xd0e00000d11,
0xd1200000d45,
0xd4600000d49,
0xd4a00000d4f,
0xd5400000d58,
0xd5f00000d64,
0xd6600000d70,
0xd7a00000d80,
0xd8100000d84,
0xd8500000d97,
0xd9a00000db2,
0xdb300000dbc,
0xdbd00000dbe,
0xdc000000dc7,
0xdca00000dcb,
0xdcf00000dd5,
0xdd600000dd7,
0xdd800000de0,
0xde600000df0,
0xdf200000df4,
0xe0100000e33,
0xe3400000e3b,
0xe4000000e4f,
0xe5000000e5a,
0xe8100000e83,
0xe8400000e85,
0xe8600000e8b,
0xe8c00000ea4,
0xea500000ea6,
0xea700000eb3,
0xeb400000ebe,
0xec000000ec5,
0xec600000ec7,
0xec800000ece,
0xed000000eda,
0xede00000ee0,
0xf0000000f01,
0xf0b00000f0c,
0xf1800000f1a,
0xf2000000f2a,
0xf3500000f36,
0xf3700000f38,
0xf3900000f3a,
0xf3e00000f43,
0xf4400000f48,
0xf4900000f4d,
0xf4e00000f52,
0xf5300000f57,
0xf5800000f5c,
0xf5d00000f69,
0xf6a00000f6d,
0xf7100000f73,
0xf7400000f75,
0xf7a00000f81,
0xf8200000f85,
0xf8600000f93,
0xf9400000f98,
0xf9900000f9d,
0xf9e00000fa2,
0xfa300000fa7,
0xfa800000fac,
0xfad00000fb9,
0xfba00000fbd,
0xfc600000fc7,
0x10000000104a,
0x10500000109e,
0x10d0000010fb,
0x10fd00001100,
0x120000001249,
0x124a0000124e,
0x125000001257,
0x125800001259,
0x125a0000125e,
0x126000001289,
0x128a0000128e,
0x1290000012b1,
0x12b2000012b6,
0x12b8000012bf,
0x12c0000012c1,
0x12c2000012c6,
0x12c8000012d7,
0x12d800001311,
0x131200001316,
0x13180000135b,
0x135d00001360,
0x138000001390,
0x13a0000013f6,
0x14010000166d,
0x166f00001680,
0x16810000169b,
0x16a0000016eb,
0x16f1000016f9,
0x17000000170d,
0x170e00001715,
0x172000001735,
0x174000001754,
0x17600000176d,
0x176e00001771,
0x177200001774,
0x1780000017b4,
0x17b6000017d4,
0x17d7000017d8,
0x17dc000017de,
0x17e0000017ea,
0x18100000181a,
0x182000001879,
0x1880000018ab,
0x18b0000018f6,
0x19000000191f,
0x19200000192c,
0x19300000193c,
0x19460000196e,
0x197000001975,
0x1980000019ac,
0x19b0000019ca,
0x19d0000019da,
0x1a0000001a1c,
0x1a2000001a5f,
0x1a6000001a7d,
0x1a7f00001a8a,
0x1a9000001a9a,
0x1aa700001aa8,
0x1ab000001abe,
0x1abf00001ac1,
0x1b0000001b4c,
0x1b5000001b5a,
0x1b6b00001b74,
0x1b8000001bf4,
0x1c0000001c38,
0x1c4000001c4a,
0x1c4d00001c7e,
0x1cd000001cd3,
0x1cd400001cfb,
0x1d0000001d2c,
0x1d2f00001d30,
0x1d3b00001d3c,
0x1d4e00001d4f,
0x1d6b00001d78,
0x1d7900001d9b,
0x1dc000001dfa,
0x1dfb00001e00,
0x1e0100001e02,
0x1e0300001e04,
0x1e0500001e06,
0x1e0700001e08,
0x1e0900001e0a,
0x1e0b00001e0c,
0x1e0d00001e0e,
0x1e0f00001e10,
0x1e1100001e12,
0x1e1300001e14,
0x1e1500001e16,
0x1e1700001e18,
0x1e1900001e1a,
0x1e1b00001e1c,
0x1e1d00001e1e,
0x1e1f00001e20,
0x1e2100001e22,
0x1e2300001e24,
0x1e2500001e26,
0x1e2700001e28,
0x1e2900001e2a,
0x1e2b00001e2c,
0x1e2d00001e2e,
0x1e2f00001e30,
0x1e3100001e32,
0x1e3300001e34,
0x1e3500001e36,
0x1e3700001e38,
0x1e3900001e3a,
0x1e3b00001e3c,
0x1e3d00001e3e,
0x1e3f00001e40,
0x1e4100001e42,
0x1e4300001e44,
0x1e4500001e46,
0x1e4700001e48,
0x1e4900001e4a,
0x1e4b00001e4c,
0x1e4d00001e4e,
0x1e4f00001e50,
0x1e5100001e52,
0x1e5300001e54,
0x1e5500001e56,
0x1e5700001e58,
0x1e5900001e5a,
0x1e5b00001e5c,
0x1e5d00001e5e,
0x1e5f00001e60,
0x1e6100001e62,
0x1e6300001e64,
0x1e6500001e66,
0x1e6700001e68,
0x1e6900001e6a,
0x1e6b00001e6c,
0x1e6d00001e6e,
0x1e6f00001e70,
0x1e7100001e72,
0x1e7300001e74,
0x1e7500001e76,
0x1e7700001e78,
0x1e7900001e7a,
0x1e7b00001e7c,
0x1e7d00001e7e,
0x1e7f00001e80,
0x1e8100001e82,
0x1e8300001e84,
0x1e8500001e86,
0x1e8700001e88,
0x1e8900001e8a,
0x1e8b00001e8c,
0x1e8d00001e8e,
0x1e8f00001e90,
0x1e9100001e92,
0x1e9300001e94,
0x1e9500001e9a,
0x1e9c00001e9e,
0x1e9f00001ea0,
0x1ea100001ea2,
0x1ea300001ea4,
0x1ea500001ea6,
0x1ea700001ea8,
0x1ea900001eaa,
0x1eab00001eac,
0x1ead00001eae,
0x1eaf00001eb0,
0x1eb100001eb2,
0x1eb300001eb4,
0x1eb500001eb6,
0x1eb700001eb8,
0x1eb900001eba,
0x1ebb00001ebc,
0x1ebd00001ebe,
0x1ebf00001ec0,
0x1ec100001ec2,
0x1ec300001ec4,
0x1ec500001ec6,
0x1ec700001ec8,
0x1ec900001eca,
0x1ecb00001ecc,
0x1ecd00001ece,
0x1ecf00001ed0,
0x1ed100001ed2,
0x1ed300001ed4,
0x1ed500001ed6,
0x1ed700001ed8,
0x1ed900001eda,
0x1edb00001edc,
0x1edd00001ede,
0x1edf00001ee0,
0x1ee100001ee2,
0x1ee300001ee4,
0x1ee500001ee6,
0x1ee700001ee8,
0x1ee900001eea,
0x1eeb00001eec,
0x1eed00001eee,
0x1eef00001ef0,
0x1ef100001ef2,
0x1ef300001ef4,
0x1ef500001ef6,
0x1ef700001ef8,
0x1ef900001efa,
0x1efb00001efc,
0x1efd00001efe,
0x1eff00001f08,
0x1f1000001f16,
0x1f2000001f28,
0x1f3000001f38,
0x1f4000001f46,
0x1f5000001f58,
0x1f6000001f68,
0x1f7000001f71,
0x1f7200001f73,
0x1f7400001f75,
0x1f7600001f77,
0x1f7800001f79,
0x1f7a00001f7b,
0x1f7c00001f7d,
0x1fb000001fb2,
0x1fb600001fb7,
0x1fc600001fc7,
0x1fd000001fd3,
0x1fd600001fd8,
0x1fe000001fe3,
0x1fe400001fe8,
0x1ff600001ff7,
0x214e0000214f,
0x218400002185,
0x2c3000002c5f,
0x2c6100002c62,
0x2c6500002c67,
0x2c6800002c69,
0x2c6a00002c6b,
0x2c6c00002c6d,
0x2c7100002c72,
0x2c7300002c75,
0x2c7600002c7c,
0x2c8100002c82,
0x2c8300002c84,
0x2c8500002c86,
0x2c8700002c88,
0x2c8900002c8a,
0x2c8b00002c8c,
0x2c8d00002c8e,
0x2c8f00002c90,
0x2c9100002c92,
0x2c9300002c94,
0x2c9500002c96,
0x2c9700002c98,
0x2c9900002c9a,
0x2c9b00002c9c,
0x2c9d00002c9e,
0x2c9f00002ca0,
0x2ca100002ca2,
0x2ca300002ca4,
0x2ca500002ca6,
0x2ca700002ca8,
0x2ca900002caa,
0x2cab00002cac,
0x2cad00002cae,
0x2caf00002cb0,
0x2cb100002cb2,
0x2cb300002cb4,
0x2cb500002cb6,
0x2cb700002cb8,
0x2cb900002cba,
0x2cbb00002cbc,
0x2cbd00002cbe,
0x2cbf00002cc0,
0x2cc100002cc2,
0x2cc300002cc4,
0x2cc500002cc6,
0x2cc700002cc8,
0x2cc900002cca,
0x2ccb00002ccc,
0x2ccd00002cce,
0x2ccf00002cd0,
0x2cd100002cd2,
0x2cd300002cd4,
0x2cd500002cd6,
0x2cd700002cd8,
0x2cd900002cda,
0x2cdb00002cdc,
0x2cdd00002cde,
0x2cdf00002ce0,
0x2ce100002ce2,
0x2ce300002ce5,
0x2cec00002ced,
0x2cee00002cf2,
0x2cf300002cf4,
0x2d0000002d26,
0x2d2700002d28,
0x2d2d00002d2e,
0x2d3000002d68,
0x2d7f00002d97,
0x2da000002da7,
0x2da800002daf,
0x2db000002db7,
0x2db800002dbf,
0x2dc000002dc7,
0x2dc800002dcf,
0x2dd000002dd7,
0x2dd800002ddf,
0x2de000002e00,
0x2e2f00002e30,
0x300500003008,
0x302a0000302e,
0x303c0000303d,
0x304100003097,
0x30990000309b,
0x309d0000309f,
0x30a1000030fb,
0x30fc000030ff,
0x310500003130,
0x31a0000031c0,
0x31f000003200,
0x340000004dc0,
0x4e0000009ffd,
0xa0000000a48d,
0xa4d00000a4fe,
0xa5000000a60d,
0xa6100000a62c,
0xa6410000a642,
0xa6430000a644,
0xa6450000a646,
0xa6470000a648,
0xa6490000a64a,
0xa64b0000a64c,
0xa64d0000a64e,
0xa64f0000a650,
0xa6510000a652,
0xa6530000a654,
0xa6550000a656,
0xa6570000a658,
0xa6590000a65a,
0xa65b0000a65c,
0xa65d0000a65e,
0xa65f0000a660,
0xa6610000a662,
0xa6630000a664,
0xa6650000a666,
0xa6670000a668,
0xa6690000a66a,
0xa66b0000a66c,
0xa66d0000a670,
0xa6740000a67e,
0xa67f0000a680,
0xa6810000a682,
0xa6830000a684,
0xa6850000a686,
0xa6870000a688,
0xa6890000a68a,
0xa68b0000a68c,
0xa68d0000a68e,
0xa68f0000a690,
0xa6910000a692,
0xa6930000a694,
0xa6950000a696,
0xa6970000a698,
0xa6990000a69a,
0xa69b0000a69c,
0xa69e0000a6e6,
0xa6f00000a6f2,
0xa7170000a720,
0xa7230000a724,
0xa7250000a726,
0xa7270000a728,
0xa7290000a72a,
0xa72b0000a72c,
0xa72d0000a72e,
0xa72f0000a732,
0xa7330000a734,
0xa7350000a736,
0xa7370000a738,
0xa7390000a73a,
0xa73b0000a73c,
0xa73d0000a73e,
0xa73f0000a740,
0xa7410000a742,
0xa7430000a744,
0xa7450000a746,
0xa7470000a748,
0xa7490000a74a,
0xa74b0000a74c,
0xa74d0000a74e,
0xa74f0000a750,
0xa7510000a752,
0xa7530000a754,
0xa7550000a756,
0xa7570000a758,
0xa7590000a75a,
0xa75b0000a75c,
0xa75d0000a75e,
0xa75f0000a760,
0xa7610000a762,
0xa7630000a764,
0xa7650000a766,
0xa7670000a768,
0xa7690000a76a,
0xa76b0000a76c,
0xa76d0000a76e,
0xa76f0000a770,
0xa7710000a779,
0xa77a0000a77b,
0xa77c0000a77d,
0xa77f0000a780,
0xa7810000a782,
0xa7830000a784,
0xa7850000a786,
0xa7870000a789,
0xa78c0000a78d,
0xa78e0000a790,
0xa7910000a792,
0xa7930000a796,
0xa7970000a798,
0xa7990000a79a,
0xa79b0000a79c,
0xa79d0000a79e,
0xa79f0000a7a0,
0xa7a10000a7a2,
0xa7a30000a7a4,
0xa7a50000a7a6,
0xa7a70000a7a8,
0xa7a90000a7aa,
0xa7af0000a7b0,
0xa7b50000a7b6,
0xa7b70000a7b8,
0xa7b90000a7ba,
0xa7bb0000a7bc,
0xa7bd0000a7be,
0xa7bf0000a7c0,
0xa7c30000a7c4,
0xa7c80000a7c9,
0xa7ca0000a7cb,
0xa7f60000a7f8,
0xa7fa0000a828,
0xa82c0000a82d,
0xa8400000a874,
0xa8800000a8c6,
0xa8d00000a8da,
0xa8e00000a8f8,
0xa8fb0000a8fc,
0xa8fd0000a92e,
0xa9300000a954,
0xa9800000a9c1,
0xa9cf0000a9da,
0xa9e00000a9ff,
0xaa000000aa37,
0xaa400000aa4e,
0xaa500000aa5a,
0xaa600000aa77,
0xaa7a0000aac3,
0xaadb0000aade,
0xaae00000aaf0,
0xaaf20000aaf7,
0xab010000ab07,
0xab090000ab0f,
0xab110000ab17,
0xab200000ab27,
0xab280000ab2f,
0xab300000ab5b,
0xab600000ab6a,
0xabc00000abeb,
0xabec0000abee,
0xabf00000abfa,
0xac000000d7a4,
0xfa0e0000fa10,
0xfa110000fa12,
0xfa130000fa15,
0xfa1f0000fa20,
0xfa210000fa22,
0xfa230000fa25,
0xfa270000fa2a,
0xfb1e0000fb1f,
0xfe200000fe30,
0xfe730000fe74,
0x100000001000c,
0x1000d00010027,
0x100280001003b,
0x1003c0001003e,
0x1003f0001004e,
0x100500001005e,
0x10080000100fb,
0x101fd000101fe,
0x102800001029d,
0x102a0000102d1,
0x102e0000102e1,
0x1030000010320,
0x1032d00010341,
0x103420001034a,
0x103500001037b,
0x103800001039e,
0x103a0000103c4,
0x103c8000103d0,
0x104280001049e,
0x104a0000104aa,
0x104d8000104fc,
0x1050000010528,
0x1053000010564,
0x1060000010737,
0x1074000010756,
0x1076000010768,
0x1080000010806,
0x1080800010809,
0x1080a00010836,
0x1083700010839,
0x1083c0001083d,
0x1083f00010856,
0x1086000010877,
0x108800001089f,
0x108e0000108f3,
0x108f4000108f6,
0x1090000010916,
0x109200001093a,
0x10980000109b8,
0x109be000109c0,
0x10a0000010a04,
0x10a0500010a07,
0x10a0c00010a14,
0x10a1500010a18,
0x10a1900010a36,
0x10a3800010a3b,
0x10a3f00010a40,
0x10a6000010a7d,
0x10a8000010a9d,
0x10ac000010ac8,
0x10ac900010ae7,
0x10b0000010b36,
0x10b4000010b56,
0x10b6000010b73,
0x10b8000010b92,
0x10c0000010c49,
0x10cc000010cf3,
0x10d0000010d28,
0x10d3000010d3a,
0x10e8000010eaa,
0x10eab00010ead,
0x10eb000010eb2,
0x10f0000010f1d,
0x10f2700010f28,
0x10f3000010f51,
0x10fb000010fc5,
0x10fe000010ff7,
0x1100000011047,
0x1106600011070,
0x1107f000110bb,
0x110d0000110e9,
0x110f0000110fa,
0x1110000011135,
0x1113600011140,
0x1114400011148,
0x1115000011174,
0x1117600011177,
0x11180000111c5,
0x111c9000111cd,
0x111ce000111db,
0x111dc000111dd,
0x1120000011212,
0x1121300011238,
0x1123e0001123f,
0x1128000011287,
0x1128800011289,
0x1128a0001128e,
0x1128f0001129e,
0x1129f000112a9,
0x112b0000112eb,
0x112f0000112fa,
0x1130000011304,
0x113050001130d,
0x1130f00011311,
0x1131300011329,
0x1132a00011331,
0x1133200011334,
0x113350001133a,
0x1133b00011345,
0x1134700011349,
0x1134b0001134e,
0x1135000011351,
0x1135700011358,
0x1135d00011364,
0x113660001136d,
0x1137000011375,
0x114000001144b,
0x114500001145a,
0x1145e00011462,
0x11480000114c6,
0x114c7000114c8,
0x114d0000114da,
0x11580000115b6,
0x115b8000115c1,
0x115d8000115de,
0x1160000011641,
0x1164400011645,
0x116500001165a,
0x11680000116b9,
0x116c0000116ca,
0x117000001171b,
0x1171d0001172c,
0x117300001173a,
0x118000001183b,
0x118c0000118ea,
0x118ff00011907,
0x119090001190a,
0x1190c00011914,
0x1191500011917,
0x1191800011936,
0x1193700011939,
0x1193b00011944,
0x119500001195a,
0x119a0000119a8,
0x119aa000119d8,
0x119da000119e2,
0x119e3000119e5,
0x11a0000011a3f,
0x11a4700011a48,
0x11a5000011a9a,
0x11a9d00011a9e,
0x11ac000011af9,
0x11c0000011c09,
0x11c0a00011c37,
0x11c3800011c41,
0x11c5000011c5a,
0x11c7200011c90,
0x11c9200011ca8,
0x11ca900011cb7,
0x11d0000011d07,
0x11d0800011d0a,
0x11d0b00011d37,
0x11d3a00011d3b,
0x11d3c00011d3e,
0x11d3f00011d48,
0x11d5000011d5a,
0x11d6000011d66,
0x11d6700011d69,
0x11d6a00011d8f,
0x11d9000011d92,
0x11d9300011d99,
0x11da000011daa,
0x11ee000011ef7,
0x11fb000011fb1,
0x120000001239a,
0x1248000012544,
0x130000001342f,
0x1440000014647,
0x1680000016a39,
0x16a4000016a5f,
0x16a6000016a6a,
0x16ad000016aee,
0x16af000016af5,
0x16b0000016b37,
0x16b4000016b44,
0x16b5000016b5a,
0x16b6300016b78,
0x16b7d00016b90,
0x16e6000016e80,
0x16f0000016f4b,
0x16f4f00016f88,
0x16f8f00016fa0,
0x16fe000016fe2,
0x16fe300016fe5,
0x16ff000016ff2,
0x17000000187f8,
0x1880000018cd6,
0x18d0000018d09,
0x1b0000001b11f,
0x1b1500001b153,
0x1b1640001b168,
0x1b1700001b2fc,
0x1bc000001bc6b,
0x1bc700001bc7d,
0x1bc800001bc89,
0x1bc900001bc9a,
0x1bc9d0001bc9f,
0x1da000001da37,
0x1da3b0001da6d,
0x1da750001da76,
0x1da840001da85,
0x1da9b0001daa0,
0x1daa10001dab0,
0x1e0000001e007,
0x1e0080001e019,
0x1e01b0001e022,
0x1e0230001e025,
0x1e0260001e02b,
0x1e1000001e12d,
0x1e1300001e13e,
0x1e1400001e14a,
0x1e14e0001e14f,
0x1e2c00001e2fa,
0x1e8000001e8c5,
0x1e8d00001e8d7,
0x1e9220001e94c,
0x1e9500001e95a,
0x1fbf00001fbfa,
0x200000002a6de,
0x2a7000002b735,
0x2b7400002b81e,
0x2b8200002cea2,
0x2ceb00002ebe1,
0x300000003134b,
),
'CONTEXTJ': (
0x200c0000200e,
),
'CONTEXTO': (
0xb7000000b8,
0x37500000376,
0x5f3000005f5,
0x6600000066a,
0x6f0000006fa,
0x30fb000030fc,
),
}
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/idna/idnadata.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/idna/idnadata.py",
"repo_id": "Django-locallibrary",
"token_count": 29918
} | 28 |
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
from .__about__ import (
__author__,
__copyright__,
__email__,
__license__,
__summary__,
__title__,
__uri__,
__version__,
)
__all__ = [
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
]
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/packaging/__init__.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/packaging/__init__.py",
"repo_id": "Django-locallibrary",
"token_count": 218
} | 29 |
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import string
import re
from pip._vendor.pyparsing import stringStart, stringEnd, originalTextFor, ParseException
from pip._vendor.pyparsing import ZeroOrMore, Word, Optional, Regex, Combine
from pip._vendor.pyparsing import Literal as L # noqa
from pip._vendor.six.moves.urllib import parse as urlparse
from ._typing import TYPE_CHECKING
from .markers import MARKER_EXPR, Marker
from .specifiers import LegacySpecifier, Specifier, SpecifierSet
if TYPE_CHECKING: # pragma: no cover
from typing import List
class InvalidRequirement(ValueError):
"""
An invalid requirement was found, users should refer to PEP 508.
"""
ALPHANUM = Word(string.ascii_letters + string.digits)
LBRACKET = L("[").suppress()
RBRACKET = L("]").suppress()
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
COMMA = L(",").suppress()
SEMICOLON = L(";").suppress()
AT = L("@").suppress()
PUNCTUATION = Word("-_.")
IDENTIFIER_END = ALPHANUM | (ZeroOrMore(PUNCTUATION) + ALPHANUM)
IDENTIFIER = Combine(ALPHANUM + ZeroOrMore(IDENTIFIER_END))
NAME = IDENTIFIER("name")
EXTRA = IDENTIFIER
URI = Regex(r"[^ ]+")("url")
URL = AT + URI
EXTRAS_LIST = EXTRA + ZeroOrMore(COMMA + EXTRA)
EXTRAS = (LBRACKET + Optional(EXTRAS_LIST) + RBRACKET)("extras")
VERSION_PEP440 = Regex(Specifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_LEGACY = Regex(LegacySpecifier._regex_str, re.VERBOSE | re.IGNORECASE)
VERSION_ONE = VERSION_PEP440 ^ VERSION_LEGACY
VERSION_MANY = Combine(
VERSION_ONE + ZeroOrMore(COMMA + VERSION_ONE), joinString=",", adjacent=False
)("_raw_spec")
_VERSION_SPEC = Optional(((LPAREN + VERSION_MANY + RPAREN) | VERSION_MANY))
_VERSION_SPEC.setParseAction(lambda s, l, t: t._raw_spec or "")
VERSION_SPEC = originalTextFor(_VERSION_SPEC)("specifier")
VERSION_SPEC.setParseAction(lambda s, l, t: t[1])
MARKER_EXPR = originalTextFor(MARKER_EXPR())("marker")
MARKER_EXPR.setParseAction(
lambda s, l, t: Marker(s[t._original_start : t._original_end])
)
MARKER_SEPARATOR = SEMICOLON
MARKER = MARKER_SEPARATOR + MARKER_EXPR
VERSION_AND_MARKER = VERSION_SPEC + Optional(MARKER)
URL_AND_MARKER = URL + Optional(MARKER)
NAMED_REQUIREMENT = NAME + Optional(EXTRAS) + (URL_AND_MARKER | VERSION_AND_MARKER)
REQUIREMENT = stringStart + NAMED_REQUIREMENT + stringEnd
# pyparsing isn't thread safe during initialization, so we do it eagerly, see
# issue #104
REQUIREMENT.parseString("x[]")
class Requirement(object):
"""Parse a requirement.
Parse a given requirement string into its parts, such as name, specifier,
URL, and extras. Raises InvalidRequirement on a badly-formed requirement
string.
"""
# TODO: Can we test whether something is contained within a requirement?
# If so how do we do that? Do we need to test against the _name_ of
# the thing as well as the version? What about the markers?
# TODO: Can we normalize the name and extra name?
def __init__(self, requirement_string):
# type: (str) -> None
try:
req = REQUIREMENT.parseString(requirement_string)
except ParseException as e:
raise InvalidRequirement(
'Parse error at "{0!r}": {1}'.format(
requirement_string[e.loc : e.loc + 8], e.msg
)
)
self.name = req.name
if req.url:
parsed_url = urlparse.urlparse(req.url)
if parsed_url.scheme == "file":
if urlparse.urlunparse(parsed_url) != req.url:
raise InvalidRequirement("Invalid URL given")
elif not (parsed_url.scheme and parsed_url.netloc) or (
not parsed_url.scheme and not parsed_url.netloc
):
raise InvalidRequirement("Invalid URL: {0}".format(req.url))
self.url = req.url
else:
self.url = None
self.extras = set(req.extras.asList() if req.extras else [])
self.specifier = SpecifierSet(req.specifier)
self.marker = req.marker if req.marker else None
def __str__(self):
# type: () -> str
parts = [self.name] # type: List[str]
if self.extras:
parts.append("[{0}]".format(",".join(sorted(self.extras))))
if self.specifier:
parts.append(str(self.specifier))
if self.url:
parts.append("@ {0}".format(self.url))
if self.marker:
parts.append(" ")
if self.marker:
parts.append("; {0}".format(self.marker))
return "".join(parts)
def __repr__(self):
# type: () -> str
return "<Requirement({0!r})>".format(str(self))
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/packaging/requirements.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/packaging/requirements.py",
"repo_id": "Django-locallibrary",
"token_count": 2034
} | 30 |
"""This is invoked in a subprocess to call the build backend hooks.
It expects:
- Command line args: hook_name, control_dir
- Environment variables:
PEP517_BUILD_BACKEND=entry.point:spec
PEP517_BACKEND_PATH=paths (separated with os.pathsep)
- control_dir/input.json:
- {"kwargs": {...}}
Results:
- control_dir/output.json
- {"return_val": ...}
"""
from glob import glob
from importlib import import_module
import json
import os
import os.path
from os.path import join as pjoin
import re
import shutil
import sys
import traceback
# This file is run as a script, and `import compat` is not zip-safe, so we
# include write_json() and read_json() from compat.py.
#
# Handle reading and writing JSON in UTF-8, on Python 3 and 2.
if sys.version_info[0] >= 3:
# Python 3
def write_json(obj, path, **kwargs):
with open(path, 'w', encoding='utf-8') as f:
json.dump(obj, f, **kwargs)
def read_json(path):
with open(path, 'r', encoding='utf-8') as f:
return json.load(f)
else:
# Python 2
def write_json(obj, path, **kwargs):
with open(path, 'wb') as f:
json.dump(obj, f, encoding='utf-8', **kwargs)
def read_json(path):
with open(path, 'rb') as f:
return json.load(f)
class BackendUnavailable(Exception):
"""Raised if we cannot import the backend"""
def __init__(self, traceback):
self.traceback = traceback
class BackendInvalid(Exception):
"""Raised if the backend is invalid"""
def __init__(self, message):
self.message = message
class HookMissing(Exception):
"""Raised if a hook is missing and we are not executing the fallback"""
def contained_in(filename, directory):
"""Test if a file is located within the given directory."""
filename = os.path.normcase(os.path.abspath(filename))
directory = os.path.normcase(os.path.abspath(directory))
return os.path.commonprefix([filename, directory]) == directory
def _build_backend():
"""Find and load the build backend"""
# Add in-tree backend directories to the front of sys.path.
backend_path = os.environ.get('PEP517_BACKEND_PATH')
if backend_path:
extra_pathitems = backend_path.split(os.pathsep)
sys.path[:0] = extra_pathitems
ep = os.environ['PEP517_BUILD_BACKEND']
mod_path, _, obj_path = ep.partition(':')
try:
obj = import_module(mod_path)
except ImportError:
raise BackendUnavailable(traceback.format_exc())
if backend_path:
if not any(
contained_in(obj.__file__, path)
for path in extra_pathitems
):
raise BackendInvalid("Backend was not loaded from backend-path")
if obj_path:
for path_part in obj_path.split('.'):
obj = getattr(obj, path_part)
return obj
def get_requires_for_build_wheel(config_settings):
"""Invoke the optional get_requires_for_build_wheel hook
Returns [] if the hook is not defined.
"""
backend = _build_backend()
try:
hook = backend.get_requires_for_build_wheel
except AttributeError:
return []
else:
return hook(config_settings)
def prepare_metadata_for_build_wheel(
metadata_directory, config_settings, _allow_fallback):
"""Invoke optional prepare_metadata_for_build_wheel
Implements a fallback by building a wheel if the hook isn't defined,
unless _allow_fallback is False in which case HookMissing is raised.
"""
backend = _build_backend()
try:
hook = backend.prepare_metadata_for_build_wheel
except AttributeError:
if not _allow_fallback:
raise HookMissing()
return _get_wheel_metadata_from_wheel(backend, metadata_directory,
config_settings)
else:
return hook(metadata_directory, config_settings)
WHEEL_BUILT_MARKER = 'PEP517_ALREADY_BUILT_WHEEL'
def _dist_info_files(whl_zip):
"""Identify the .dist-info folder inside a wheel ZipFile."""
res = []
for path in whl_zip.namelist():
m = re.match(r'[^/\\]+-[^/\\]+\.dist-info/', path)
if m:
res.append(path)
if res:
return res
raise Exception("No .dist-info folder found in wheel")
def _get_wheel_metadata_from_wheel(
backend, metadata_directory, config_settings):
"""Build a wheel and extract the metadata from it.
Fallback for when the build backend does not
define the 'get_wheel_metadata' hook.
"""
from zipfile import ZipFile
whl_basename = backend.build_wheel(metadata_directory, config_settings)
with open(os.path.join(metadata_directory, WHEEL_BUILT_MARKER), 'wb'):
pass # Touch marker file
whl_file = os.path.join(metadata_directory, whl_basename)
with ZipFile(whl_file) as zipf:
dist_info = _dist_info_files(zipf)
zipf.extractall(path=metadata_directory, members=dist_info)
return dist_info[0].split('/')[0]
def _find_already_built_wheel(metadata_directory):
"""Check for a wheel already built during the get_wheel_metadata hook.
"""
if not metadata_directory:
return None
metadata_parent = os.path.dirname(metadata_directory)
if not os.path.isfile(pjoin(metadata_parent, WHEEL_BUILT_MARKER)):
return None
whl_files = glob(os.path.join(metadata_parent, '*.whl'))
if not whl_files:
print('Found wheel built marker, but no .whl files')
return None
if len(whl_files) > 1:
print('Found multiple .whl files; unspecified behaviour. '
'Will call build_wheel.')
return None
# Exactly one .whl file
return whl_files[0]
def build_wheel(wheel_directory, config_settings, metadata_directory=None):
"""Invoke the mandatory build_wheel hook.
If a wheel was already built in the
prepare_metadata_for_build_wheel fallback, this
will copy it rather than rebuilding the wheel.
"""
prebuilt_whl = _find_already_built_wheel(metadata_directory)
if prebuilt_whl:
shutil.copy2(prebuilt_whl, wheel_directory)
return os.path.basename(prebuilt_whl)
return _build_backend().build_wheel(wheel_directory, config_settings,
metadata_directory)
def get_requires_for_build_sdist(config_settings):
"""Invoke the optional get_requires_for_build_wheel hook
Returns [] if the hook is not defined.
"""
backend = _build_backend()
try:
hook = backend.get_requires_for_build_sdist
except AttributeError:
return []
else:
return hook(config_settings)
class _DummyException(Exception):
"""Nothing should ever raise this exception"""
class GotUnsupportedOperation(Exception):
"""For internal use when backend raises UnsupportedOperation"""
def __init__(self, traceback):
self.traceback = traceback
def build_sdist(sdist_directory, config_settings):
"""Invoke the mandatory build_sdist hook."""
backend = _build_backend()
try:
return backend.build_sdist(sdist_directory, config_settings)
except getattr(backend, 'UnsupportedOperation', _DummyException):
raise GotUnsupportedOperation(traceback.format_exc())
HOOK_NAMES = {
'get_requires_for_build_wheel',
'prepare_metadata_for_build_wheel',
'build_wheel',
'get_requires_for_build_sdist',
'build_sdist',
}
def main():
if len(sys.argv) < 3:
sys.exit("Needs args: hook_name, control_dir")
hook_name = sys.argv[1]
control_dir = sys.argv[2]
if hook_name not in HOOK_NAMES:
sys.exit("Unknown hook: %s" % hook_name)
hook = globals()[hook_name]
hook_input = read_json(pjoin(control_dir, 'input.json'))
json_out = {'unsupported': False, 'return_val': None}
try:
json_out['return_val'] = hook(**hook_input['kwargs'])
except BackendUnavailable as e:
json_out['no_backend'] = True
json_out['traceback'] = e.traceback
except BackendInvalid as e:
json_out['backend_invalid'] = True
json_out['backend_error'] = e.message
except GotUnsupportedOperation as e:
json_out['unsupported'] = True
json_out['traceback'] = e.traceback
except HookMissing:
json_out['hook_missing'] = True
write_json(json_out, pjoin(control_dir, 'output.json'), indent=2)
if __name__ == '__main__':
main()
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/pep517/_in_process.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/pep517/_in_process.py",
"repo_id": "Django-locallibrary",
"token_count": 3325
} | 31 |
# -*- coding: utf-8 -*-
"""
requests.cookies
~~~~~~~~~~~~~~~~
Compatibility code to be able to use `cookielib.CookieJar` with requests.
requests.utils imports from here, so be careful with imports.
"""
import copy
import time
import calendar
from ._internal_utils import to_native_string
from .compat import cookielib, urlparse, urlunparse, Morsel, MutableMapping
try:
import threading
except ImportError:
import dummy_threading as threading
class MockRequest(object):
"""Wraps a `requests.Request` to mimic a `urllib2.Request`.
The code in `cookielib.CookieJar` expects this interface in order to correctly
manage cookie policies, i.e., determine whether a cookie can be set, given the
domains of the request and the cookie.
The original request object is read-only. The client is responsible for collecting
the new headers via `get_new_headers()` and interpreting them appropriately. You
probably want `get_cookie_header`, defined below.
"""
def __init__(self, request):
self._r = request
self._new_headers = {}
self.type = urlparse(self._r.url).scheme
def get_type(self):
return self.type
def get_host(self):
return urlparse(self._r.url).netloc
def get_origin_req_host(self):
return self.get_host()
def get_full_url(self):
# Only return the response's URL if the user hadn't set the Host
# header
if not self._r.headers.get('Host'):
return self._r.url
# If they did set it, retrieve it and reconstruct the expected domain
host = to_native_string(self._r.headers['Host'], encoding='utf-8')
parsed = urlparse(self._r.url)
# Reconstruct the URL as we expect it
return urlunparse([
parsed.scheme, host, parsed.path, parsed.params, parsed.query,
parsed.fragment
])
def is_unverifiable(self):
return True
def has_header(self, name):
return name in self._r.headers or name in self._new_headers
def get_header(self, name, default=None):
return self._r.headers.get(name, self._new_headers.get(name, default))
def add_header(self, key, val):
"""cookielib has no legitimate use for this method; add it back if you find one."""
raise NotImplementedError("Cookie headers should be added with add_unredirected_header()")
def add_unredirected_header(self, name, value):
self._new_headers[name] = value
def get_new_headers(self):
return self._new_headers
@property
def unverifiable(self):
return self.is_unverifiable()
@property
def origin_req_host(self):
return self.get_origin_req_host()
@property
def host(self):
return self.get_host()
class MockResponse(object):
"""Wraps a `httplib.HTTPMessage` to mimic a `urllib.addinfourl`.
...what? Basically, expose the parsed HTTP headers from the server response
the way `cookielib` expects to see them.
"""
def __init__(self, headers):
"""Make a MockResponse for `cookielib` to read.
:param headers: a httplib.HTTPMessage or analogous carrying the headers
"""
self._headers = headers
def info(self):
return self._headers
def getheaders(self, name):
self._headers.getheaders(name)
def extract_cookies_to_jar(jar, request, response):
"""Extract the cookies from the response into a CookieJar.
:param jar: cookielib.CookieJar (not necessarily a RequestsCookieJar)
:param request: our own requests.Request object
:param response: urllib3.HTTPResponse object
"""
if not (hasattr(response, '_original_response') and
response._original_response):
return
# the _original_response field is the wrapped httplib.HTTPResponse object,
req = MockRequest(request)
# pull out the HTTPMessage with the headers and put it in the mock:
res = MockResponse(response._original_response.msg)
jar.extract_cookies(res, req)
def get_cookie_header(jar, request):
"""
Produce an appropriate Cookie header string to be sent with `request`, or None.
:rtype: str
"""
r = MockRequest(request)
jar.add_cookie_header(r)
return r.get_new_headers().get('Cookie')
def remove_cookie_by_name(cookiejar, name, domain=None, path=None):
"""Unsets a cookie by name, by default over all domains and paths.
Wraps CookieJar.clear(), is O(n).
"""
clearables = []
for cookie in cookiejar:
if cookie.name != name:
continue
if domain is not None and domain != cookie.domain:
continue
if path is not None and path != cookie.path:
continue
clearables.append((cookie.domain, cookie.path, cookie.name))
for domain, path, name in clearables:
cookiejar.clear(domain, path, name)
class CookieConflictError(RuntimeError):
"""There are two cookies that meet the criteria specified in the cookie jar.
Use .get and .set and include domain and path args in order to be more specific.
"""
class RequestsCookieJar(cookielib.CookieJar, MutableMapping):
"""Compatibility class; is a cookielib.CookieJar, but exposes a dict
interface.
This is the CookieJar we create by default for requests and sessions that
don't specify one, since some clients may expect response.cookies and
session.cookies to support dict operations.
Requests does not use the dict interface internally; it's just for
compatibility with external client code. All requests code should work
out of the box with externally provided instances of ``CookieJar``, e.g.
``LWPCookieJar`` and ``FileCookieJar``.
Unlike a regular CookieJar, this class is pickleable.
.. warning:: dictionary operations that are normally O(1) may be O(n).
"""
def get(self, name, default=None, domain=None, path=None):
"""Dict-like get() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains.
.. warning:: operation is O(n), not O(1).
"""
try:
return self._find_no_duplicates(name, domain, path)
except KeyError:
return default
def set(self, name, value, **kwargs):
"""Dict-like set() that also supports optional domain and path args in
order to resolve naming collisions from using one cookie jar over
multiple domains.
"""
# support client code that unsets cookies by assignment of a None value:
if value is None:
remove_cookie_by_name(self, name, domain=kwargs.get('domain'), path=kwargs.get('path'))
return
if isinstance(value, Morsel):
c = morsel_to_cookie(value)
else:
c = create_cookie(name, value, **kwargs)
self.set_cookie(c)
return c
def iterkeys(self):
"""Dict-like iterkeys() that returns an iterator of names of cookies
from the jar.
.. seealso:: itervalues() and iteritems().
"""
for cookie in iter(self):
yield cookie.name
def keys(self):
"""Dict-like keys() that returns a list of names of cookies from the
jar.
.. seealso:: values() and items().
"""
return list(self.iterkeys())
def itervalues(self):
"""Dict-like itervalues() that returns an iterator of values of cookies
from the jar.
.. seealso:: iterkeys() and iteritems().
"""
for cookie in iter(self):
yield cookie.value
def values(self):
"""Dict-like values() that returns a list of values of cookies from the
jar.
.. seealso:: keys() and items().
"""
return list(self.itervalues())
def iteritems(self):
"""Dict-like iteritems() that returns an iterator of name-value tuples
from the jar.
.. seealso:: iterkeys() and itervalues().
"""
for cookie in iter(self):
yield cookie.name, cookie.value
def items(self):
"""Dict-like items() that returns a list of name-value tuples from the
jar. Allows client-code to call ``dict(RequestsCookieJar)`` and get a
vanilla python dict of key value pairs.
.. seealso:: keys() and values().
"""
return list(self.iteritems())
def list_domains(self):
"""Utility method to list all the domains in the jar."""
domains = []
for cookie in iter(self):
if cookie.domain not in domains:
domains.append(cookie.domain)
return domains
def list_paths(self):
"""Utility method to list all the paths in the jar."""
paths = []
for cookie in iter(self):
if cookie.path not in paths:
paths.append(cookie.path)
return paths
def multiple_domains(self):
"""Returns True if there are multiple domains in the jar.
Returns False otherwise.
:rtype: bool
"""
domains = []
for cookie in iter(self):
if cookie.domain is not None and cookie.domain in domains:
return True
domains.append(cookie.domain)
return False # there is only one domain in jar
def get_dict(self, domain=None, path=None):
"""Takes as an argument an optional domain and path and returns a plain
old Python dict of name-value pairs of cookies that meet the
requirements.
:rtype: dict
"""
dictionary = {}
for cookie in iter(self):
if (
(domain is None or cookie.domain == domain) and
(path is None or cookie.path == path)
):
dictionary[cookie.name] = cookie.value
return dictionary
def __contains__(self, name):
try:
return super(RequestsCookieJar, self).__contains__(name)
except CookieConflictError:
return True
def __getitem__(self, name):
"""Dict-like __getitem__() for compatibility with client code. Throws
exception if there are more than one cookie with name. In that case,
use the more explicit get() method instead.
.. warning:: operation is O(n), not O(1).
"""
return self._find_no_duplicates(name)
def __setitem__(self, name, value):
"""Dict-like __setitem__ for compatibility with client code. Throws
exception if there is already a cookie of that name in the jar. In that
case, use the more explicit set() method instead.
"""
self.set(name, value)
def __delitem__(self, name):
"""Deletes a cookie given a name. Wraps ``cookielib.CookieJar``'s
``remove_cookie_by_name()``.
"""
remove_cookie_by_name(self, name)
def set_cookie(self, cookie, *args, **kwargs):
if hasattr(cookie.value, 'startswith') and cookie.value.startswith('"') and cookie.value.endswith('"'):
cookie.value = cookie.value.replace('\\"', '')
return super(RequestsCookieJar, self).set_cookie(cookie, *args, **kwargs)
def update(self, other):
"""Updates this jar with cookies from another CookieJar or dict-like"""
if isinstance(other, cookielib.CookieJar):
for cookie in other:
self.set_cookie(copy.copy(cookie))
else:
super(RequestsCookieJar, self).update(other)
def _find(self, name, domain=None, path=None):
"""Requests uses this method internally to get cookie values.
If there are conflicting cookies, _find arbitrarily chooses one.
See _find_no_duplicates if you want an exception thrown if there are
conflicting cookies.
:param name: a string containing name of cookie
:param domain: (optional) string containing domain of cookie
:param path: (optional) string containing path of cookie
:return: cookie.value
"""
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
return cookie.value
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def _find_no_duplicates(self, name, domain=None, path=None):
"""Both ``__get_item__`` and ``get`` call this function: it's never
used elsewhere in Requests.
:param name: a string containing name of cookie
:param domain: (optional) string containing domain of cookie
:param path: (optional) string containing path of cookie
:raises KeyError: if cookie is not found
:raises CookieConflictError: if there are multiple cookies
that match name and optionally domain and path
:return: cookie.value
"""
toReturn = None
for cookie in iter(self):
if cookie.name == name:
if domain is None or cookie.domain == domain:
if path is None or cookie.path == path:
if toReturn is not None: # if there are multiple cookies that meet passed in criteria
raise CookieConflictError('There are multiple cookies with name, %r' % (name))
toReturn = cookie.value # we will eventually return this as long as no cookie conflict
if toReturn:
return toReturn
raise KeyError('name=%r, domain=%r, path=%r' % (name, domain, path))
def __getstate__(self):
"""Unlike a normal CookieJar, this class is pickleable."""
state = self.__dict__.copy()
# remove the unpickleable RLock object
state.pop('_cookies_lock')
return state
def __setstate__(self, state):
"""Unlike a normal CookieJar, this class is pickleable."""
self.__dict__.update(state)
if '_cookies_lock' not in self.__dict__:
self._cookies_lock = threading.RLock()
def copy(self):
"""Return a copy of this RequestsCookieJar."""
new_cj = RequestsCookieJar()
new_cj.set_policy(self.get_policy())
new_cj.update(self)
return new_cj
def get_policy(self):
"""Return the CookiePolicy instance used."""
return self._policy
def _copy_cookie_jar(jar):
if jar is None:
return None
if hasattr(jar, 'copy'):
# We're dealing with an instance of RequestsCookieJar
return jar.copy()
# We're dealing with a generic CookieJar instance
new_jar = copy.copy(jar)
new_jar.clear()
for cookie in jar:
new_jar.set_cookie(copy.copy(cookie))
return new_jar
def create_cookie(name, value, **kwargs):
"""Make a cookie from underspecified parameters.
By default, the pair of `name` and `value` will be set for the domain ''
and sent on every request (this is sometimes called a "supercookie").
"""
result = {
'version': 0,
'name': name,
'value': value,
'port': None,
'domain': '',
'path': '/',
'secure': False,
'expires': None,
'discard': True,
'comment': None,
'comment_url': None,
'rest': {'HttpOnly': None},
'rfc2109': False,
}
badargs = set(kwargs) - set(result)
if badargs:
err = 'create_cookie() got unexpected keyword arguments: %s'
raise TypeError(err % list(badargs))
result.update(kwargs)
result['port_specified'] = bool(result['port'])
result['domain_specified'] = bool(result['domain'])
result['domain_initial_dot'] = result['domain'].startswith('.')
result['path_specified'] = bool(result['path'])
return cookielib.Cookie(**result)
def morsel_to_cookie(morsel):
"""Convert a Morsel object into a Cookie containing the one k/v pair."""
expires = None
if morsel['max-age']:
try:
expires = int(time.time() + int(morsel['max-age']))
except ValueError:
raise TypeError('max-age: %s must be integer' % morsel['max-age'])
elif morsel['expires']:
time_template = '%a, %d-%b-%Y %H:%M:%S GMT'
expires = calendar.timegm(
time.strptime(morsel['expires'], time_template)
)
return create_cookie(
comment=morsel['comment'],
comment_url=bool(morsel['comment']),
discard=False,
domain=morsel['domain'],
expires=expires,
name=morsel.key,
path=morsel['path'],
port=None,
rest={'HttpOnly': morsel['httponly']},
rfc2109=False,
secure=bool(morsel['secure']),
value=morsel.value,
version=morsel['version'] or 0,
)
def cookiejar_from_dict(cookie_dict, cookiejar=None, overwrite=True):
"""Returns a CookieJar from a key/value dictionary.
:param cookie_dict: Dict of key/values to insert into CookieJar.
:param cookiejar: (optional) A cookiejar to add the cookies to.
:param overwrite: (optional) If False, will not replace cookies
already in the jar with new ones.
:rtype: CookieJar
"""
if cookiejar is None:
cookiejar = RequestsCookieJar()
if cookie_dict is not None:
names_from_jar = [cookie.name for cookie in cookiejar]
for name in cookie_dict:
if overwrite or (name not in names_from_jar):
cookiejar.set_cookie(create_cookie(name, cookie_dict[name]))
return cookiejar
def merge_cookies(cookiejar, cookies):
"""Add cookies to cookiejar and returns a merged CookieJar.
:param cookiejar: CookieJar object to add the cookies to.
:param cookies: Dictionary or CookieJar object to be added.
:rtype: CookieJar
"""
if not isinstance(cookiejar, cookielib.CookieJar):
raise ValueError('You can only merge into CookieJar')
if isinstance(cookies, dict):
cookiejar = cookiejar_from_dict(
cookies, cookiejar=cookiejar, overwrite=False)
elif isinstance(cookies, cookielib.CookieJar):
try:
cookiejar.update(cookies)
except AttributeError:
for cookie_in_jar in cookies:
cookiejar.set_cookie(cookie_in_jar)
return cookiejar
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/requests/cookies.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/requests/cookies.py",
"repo_id": "Django-locallibrary",
"token_count": 7300
} | 32 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2005-2010 ActiveState Software Inc.
# Copyright (c) 2013 Eddy Petrișor
"""Utilities for determining application-specific dirs.
See <http://github.com/ActiveState/appdirs> for details and usage.
"""
# Dev Notes:
# - MSDN on where to store app data files:
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
# - Mac OS X: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
__version_info__ = (1, 4, 3)
__version__ = '.'.join(map(str, __version_info__))
import sys
import os
PY3 = sys.version_info[0] == 3
if PY3:
unicode = str
if sys.platform.startswith('java'):
import platform
os_name = platform.java_ver()[3][0]
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
system = 'win32'
elif os_name.startswith('Mac'): # "Mac OS X", etc.
system = 'darwin'
else: # "Linux", "SunOS", "FreeBSD", etc.
# Setting this to "linux2" is not ideal, but only Windows or Mac
# are actually checked for and the rest of the module expects
# *sys.platform* style strings.
system = 'linux2'
else:
system = sys.platform
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user data directories are:
Mac OS X: ~/Library/Application Support/<AppName>
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
That means, by default "~/.local/share/<AppName>".
"""
if system == "win32":
if appauthor is None:
appauthor = appname
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
path = os.path.normpath(_get_win_folder(const))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
elif system == 'darwin':
path = os.path.expanduser('~/Library/Application Support/')
if appname:
path = os.path.join(path, appname)
else:
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
r"""Return full path to the user-shared data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"multipath" is an optional parameter only applicable to *nix
which indicates that the entire list of data dirs should be
returned. By default, the first item from XDG_DATA_DIRS is
returned, or '/usr/local/share/<AppName>',
if XDG_DATA_DIRS is not set
Typical site data directories are:
Mac OS X: /Library/Application Support/<AppName>
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
For Unix, this is using the $XDG_DATA_DIRS[0] default.
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
"""
if system == "win32":
if appauthor is None:
appauthor = appname
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
elif system == 'darwin':
path = os.path.expanduser('/Library/Application Support')
if appname:
path = os.path.join(path, appname)
else:
# XDG default for $XDG_DATA_DIRS
# only first, if multipath is False
path = os.getenv('XDG_DATA_DIRS',
os.pathsep.join(['/usr/local/share', '/usr/share']))
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
if appname:
if version:
appname = os.path.join(appname, version)
pathlist = [os.sep.join([x, appname]) for x in pathlist]
if multipath:
path = os.pathsep.join(pathlist)
else:
path = pathlist[0]
return path
if appname and version:
path = os.path.join(path, version)
return path
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific config dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user config directories are:
Mac OS X: same as user_data_dir
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
That means, by default "~/.config/<AppName>".
"""
if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming)
else:
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
r"""Return full path to the user-shared data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"multipath" is an optional parameter only applicable to *nix
which indicates that the entire list of config dirs should be
returned. By default, the first item from XDG_CONFIG_DIRS is
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
Typical site config directories are:
Mac OS X: same as site_data_dir
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
$XDG_CONFIG_DIRS
Win *: same as site_data_dir
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
"""
if system in ["win32", "darwin"]:
path = site_data_dir(appname, appauthor)
if appname and version:
path = os.path.join(path, version)
else:
# XDG default for $XDG_CONFIG_DIRS
# only first, if multipath is False
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
if appname:
if version:
appname = os.path.join(appname, version)
pathlist = [os.sep.join([x, appname]) for x in pathlist]
if multipath:
path = os.pathsep.join(pathlist)
else:
path = pathlist[0]
return path
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
r"""Return full path to the user-specific cache dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"opinion" (boolean) can be False to disable the appending of
"Cache" to the base app data dir for Windows. See
discussion below.
Typical user cache directories are:
Mac OS X: ~/Library/Caches/<AppName>
Unix: ~/.cache/<AppName> (XDG default)
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
On Windows the only suggestion in the MSDN docs is that local settings go in
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
app data dir (the default returned by `user_data_dir` above). Apps typically
put cache data somewhere *under* the given dir here. Some examples:
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
...\Acme\SuperApp\Cache\1.0
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
This can be disabled with the `opinion=False` option.
"""
if system == "win32":
if appauthor is None:
appauthor = appname
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
if opinion:
path = os.path.join(path, "Cache")
elif system == 'darwin':
path = os.path.expanduser('~/Library/Caches')
if appname:
path = os.path.join(path, appname)
else:
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def user_state_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific state dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user state directories are:
Mac OS X: same as user_data_dir
Unix: ~/.local/state/<AppName> # or in $XDG_STATE_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow this Debian proposal <https://wiki.debian.org/XDGBaseDirectorySpecification#state>
to extend the XDG spec and support $XDG_STATE_HOME.
That means, by default "~/.local/state/<AppName>".
"""
if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming)
else:
path = os.getenv('XDG_STATE_HOME', os.path.expanduser("~/.local/state"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
r"""Return full path to the user-specific log dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"opinion" (boolean) can be False to disable the appending of
"Logs" to the base app data dir for Windows, and "log" to the
base cache dir for Unix. See discussion below.
Typical user log directories are:
Mac OS X: ~/Library/Logs/<AppName>
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
On Windows the only suggestion in the MSDN docs is that local settings
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
examples of what some windows apps use for a logs dir.)
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
value for Windows and appends "log" to the user cache dir for Unix.
This can be disabled with the `opinion=False` option.
"""
if system == "darwin":
path = os.path.join(
os.path.expanduser('~/Library/Logs'),
appname)
elif system == "win32":
path = user_data_dir(appname, appauthor, version)
version = False
if opinion:
path = os.path.join(path, "Logs")
else:
path = user_cache_dir(appname, appauthor, version)
version = False
if opinion:
path = os.path.join(path, "log")
if appname and version:
path = os.path.join(path, version)
return path
class AppDirs(object):
"""Convenience wrapper for getting application dirs."""
def __init__(self, appname=None, appauthor=None, version=None,
roaming=False, multipath=False):
self.appname = appname
self.appauthor = appauthor
self.version = version
self.roaming = roaming
self.multipath = multipath
@property
def user_data_dir(self):
return user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
@property
def site_data_dir(self):
return site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
@property
def user_config_dir(self):
return user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
@property
def site_config_dir(self):
return site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
@property
def user_cache_dir(self):
return user_cache_dir(self.appname, self.appauthor,
version=self.version)
@property
def user_state_dir(self):
return user_state_dir(self.appname, self.appauthor,
version=self.version)
@property
def user_log_dir(self):
return user_log_dir(self.appname, self.appauthor,
version=self.version)
#---- internal support stuff
def _get_win_folder_from_registry(csidl_name):
"""This is a fallback technique at best. I'm not sure if using the
registry for this guarantees us the correct answer for all CSIDL_*
names.
"""
if PY3:
import winreg as _winreg
else:
import _winreg
shell_folder_name = {
"CSIDL_APPDATA": "AppData",
"CSIDL_COMMON_APPDATA": "Common AppData",
"CSIDL_LOCAL_APPDATA": "Local AppData",
}[csidl_name]
key = _winreg.OpenKey(
_winreg.HKEY_CURRENT_USER,
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
)
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
return dir
def _get_win_folder_with_pywin32(csidl_name):
from win32com.shell import shellcon, shell
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
# Try to make this a unicode path because SHGetFolderPath does
# not return unicode strings when there is unicode data in the
# path.
try:
dir = unicode(dir)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in dir:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
try:
import win32api
dir = win32api.GetShortPathName(dir)
except ImportError:
pass
except UnicodeError:
pass
return dir
def _get_win_folder_with_ctypes(csidl_name):
import ctypes
csidl_const = {
"CSIDL_APPDATA": 26,
"CSIDL_COMMON_APPDATA": 35,
"CSIDL_LOCAL_APPDATA": 28,
}[csidl_name]
buf = ctypes.create_unicode_buffer(1024)
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in buf:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
buf2 = ctypes.create_unicode_buffer(1024)
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
buf = buf2
return buf.value
def _get_win_folder_with_jna(csidl_name):
import array
from com.sun import jna
from com.sun.jna.platform import win32
buf_size = win32.WinDef.MAX_PATH * 2
buf = array.zeros('c', buf_size)
shell = win32.Shell32.INSTANCE
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in dir:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
buf = array.zeros('c', buf_size)
kernel = win32.Kernel32.INSTANCE
if kernel.GetShortPathName(dir, buf, buf_size):
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
return dir
if system == "win32":
try:
import win32com.shell
_get_win_folder = _get_win_folder_with_pywin32
except ImportError:
try:
from ctypes import windll
_get_win_folder = _get_win_folder_with_ctypes
except ImportError:
try:
import com.sun.jna
_get_win_folder = _get_win_folder_with_jna
except ImportError:
_get_win_folder = _get_win_folder_from_registry
#---- self test code
if __name__ == "__main__":
appname = "MyApp"
appauthor = "MyCompany"
props = ("user_data_dir",
"user_config_dir",
"user_cache_dir",
"user_state_dir",
"user_log_dir",
"site_data_dir",
"site_config_dir")
print("-- app dirs %s --" % __version__)
print("-- app dirs (with optional 'version')")
dirs = AppDirs(appname, appauthor, version="1.0")
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (without optional 'version')")
dirs = AppDirs(appname, appauthor)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (without optional 'appauthor')")
dirs = AppDirs(appname)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (with disabled 'appauthor')")
dirs = AppDirs(appname, appauthor=False)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
| Django-locallibrary/env/Lib/site-packages/pkg_resources/_vendor/appdirs.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pkg_resources/_vendor/appdirs.py",
"repo_id": "Django-locallibrary",
"token_count": 10465
} | 33 |
"""distutils._msvccompiler
Contains MSVCCompiler, an implementation of the abstract CCompiler class
for Microsoft Visual Studio 2015.
The module is compatible with VS 2015 and later. You can find legacy support
for older versions in distutils.msvc9compiler and distutils.msvccompiler.
"""
# Written by Perry Stoll
# hacked by Robin Becker and Thomas Heller to do a better job of
# finding DevStudio (through the registry)
# ported to VS 2005 and VS 2008 by Christian Heimes
# ported to VS 2015 by Steve Dower
import os
import subprocess
import contextlib
with contextlib.suppress(ImportError):
import winreg
from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError
from distutils.ccompiler import CCompiler, gen_lib_options
from distutils import log
from distutils.util import get_platform
from itertools import count
def _find_vc2015():
try:
key = winreg.OpenKeyEx(
winreg.HKEY_LOCAL_MACHINE,
r"Software\Microsoft\VisualStudio\SxS\VC7",
access=winreg.KEY_READ | winreg.KEY_WOW64_32KEY
)
except OSError:
log.debug("Visual C++ is not registered")
return None, None
best_version = 0
best_dir = None
with key:
for i in count():
try:
v, vc_dir, vt = winreg.EnumValue(key, i)
except OSError:
break
if v and vt == winreg.REG_SZ and os.path.isdir(vc_dir):
try:
version = int(float(v))
except (ValueError, TypeError):
continue
if version >= 14 and version > best_version:
best_version, best_dir = version, vc_dir
return best_version, best_dir
def _find_vc2017():
"""Returns "15, path" based on the result of invoking vswhere.exe
If no install is found, returns "None, None"
The version is returned to avoid unnecessarily changing the function
result. It may be ignored when the path is not None.
If vswhere.exe is not available, by definition, VS 2017 is not
installed.
"""
root = os.environ.get("ProgramFiles(x86)") or os.environ.get("ProgramFiles")
if not root:
return None, None
try:
path = subprocess.check_output([
os.path.join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
"-latest",
"-prerelease",
"-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
"-property", "installationPath",
"-products", "*",
], encoding="mbcs", errors="strict").strip()
except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
return None, None
path = os.path.join(path, "VC", "Auxiliary", "Build")
if os.path.isdir(path):
return 15, path
return None, None
PLAT_SPEC_TO_RUNTIME = {
'x86' : 'x86',
'x86_amd64' : 'x64',
'x86_arm' : 'arm',
'x86_arm64' : 'arm64'
}
def _find_vcvarsall(plat_spec):
# bpo-38597: Removed vcruntime return value
_, best_dir = _find_vc2017()
if not best_dir:
best_version, best_dir = _find_vc2015()
if not best_dir:
log.debug("No suitable Visual C++ version found")
return None, None
vcvarsall = os.path.join(best_dir, "vcvarsall.bat")
if not os.path.isfile(vcvarsall):
log.debug("%s cannot be found", vcvarsall)
return None, None
return vcvarsall, None
def _get_vc_env(plat_spec):
if os.getenv("DISTUTILS_USE_SDK"):
return {
key.lower(): value
for key, value in os.environ.items()
}
vcvarsall, _ = _find_vcvarsall(plat_spec)
if not vcvarsall:
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
try:
out = subprocess.check_output(
'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
stderr=subprocess.STDOUT,
).decode('utf-16le', errors='replace')
except subprocess.CalledProcessError as exc:
log.error(exc.output)
raise DistutilsPlatformError("Error executing {}"
.format(exc.cmd))
env = {
key.lower(): value
for key, _, value in
(line.partition('=') for line in out.splitlines())
if key and value
}
return env
def _find_exe(exe, paths=None):
"""Return path to an MSVC executable program.
Tries to find the program in several places: first, one of the
MSVC program search paths from the registry; next, the directories
in the PATH environment variable. If any of those work, return an
absolute path that is known to exist. If none of them work, just
return the original program name, 'exe'.
"""
if not paths:
paths = os.getenv('path').split(os.pathsep)
for p in paths:
fn = os.path.join(os.path.abspath(p), exe)
if os.path.isfile(fn):
return fn
return exe
# A map keyed by get_platform() return values to values accepted by
# 'vcvarsall.bat'. Always cross-compile from x86 to work with the
# lighter-weight MSVC installs that do not include native 64-bit tools.
PLAT_TO_VCVARS = {
'win32' : 'x86',
'win-amd64' : 'x86_amd64',
'win-arm32' : 'x86_arm',
'win-arm64' : 'x86_arm64'
}
class MSVCCompiler(CCompiler) :
"""Concrete class that implements an interface to Microsoft Visual C++,
as defined by the CCompiler abstract class."""
compiler_type = 'msvc'
# Just set this so CCompiler's constructor doesn't barf. We currently
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
# as it really isn't necessary for this sort of single-compiler class.
# Would be nice to have a consistent interface with UnixCCompiler,
# though, so it's worth thinking about.
executables = {}
# Private class data (need to distinguish C from C++ source for compiler)
_c_extensions = ['.c']
_cpp_extensions = ['.cc', '.cpp', '.cxx']
_rc_extensions = ['.rc']
_mc_extensions = ['.mc']
# Needed for the filename generation methods provided by the
# base class, CCompiler.
src_extensions = (_c_extensions + _cpp_extensions +
_rc_extensions + _mc_extensions)
res_extension = '.res'
obj_extension = '.obj'
static_lib_extension = '.lib'
shared_lib_extension = '.dll'
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
def __init__(self, verbose=0, dry_run=0, force=0):
CCompiler.__init__ (self, verbose, dry_run, force)
# target platform (.plat_name is consistent with 'bdist')
self.plat_name = None
self.initialized = False
def initialize(self, plat_name=None):
# multi-init means we would need to check platform same each time...
assert not self.initialized, "don't init multiple times"
if plat_name is None:
plat_name = get_platform()
# sanity check for platforms to prevent obscure errors later.
if plat_name not in PLAT_TO_VCVARS:
raise DistutilsPlatformError("--plat-name must be one of {}"
.format(tuple(PLAT_TO_VCVARS)))
# Get the vcvarsall.bat spec for the requested platform.
plat_spec = PLAT_TO_VCVARS[plat_name]
vc_env = _get_vc_env(plat_spec)
if not vc_env:
raise DistutilsPlatformError("Unable to find a compatible "
"Visual Studio installation.")
self._paths = vc_env.get('path', '')
paths = self._paths.split(os.pathsep)
self.cc = _find_exe("cl.exe", paths)
self.linker = _find_exe("link.exe", paths)
self.lib = _find_exe("lib.exe", paths)
self.rc = _find_exe("rc.exe", paths) # resource compiler
self.mc = _find_exe("mc.exe", paths) # message compiler
self.mt = _find_exe("mt.exe", paths) # message compiler
for dir in vc_env.get('include', '').split(os.pathsep):
if dir:
self.add_include_dir(dir.rstrip(os.sep))
for dir in vc_env.get('lib', '').split(os.pathsep):
if dir:
self.add_library_dir(dir.rstrip(os.sep))
self.preprocess_options = None
# bpo-38597: Always compile with dynamic linking
# Future releases of Python 3.x will include all past
# versions of vcruntime*.dll for compatibility.
self.compile_options = [
'/nologo', '/Ox', '/W3', '/GL', '/DNDEBUG', '/MD'
]
self.compile_options_debug = [
'/nologo', '/Od', '/MDd', '/Zi', '/W3', '/D_DEBUG'
]
ldflags = [
'/nologo', '/INCREMENTAL:NO', '/LTCG'
]
ldflags_debug = [
'/nologo', '/INCREMENTAL:NO', '/LTCG', '/DEBUG:FULL'
]
self.ldflags_exe = [*ldflags, '/MANIFEST:EMBED,ID=1']
self.ldflags_exe_debug = [*ldflags_debug, '/MANIFEST:EMBED,ID=1']
self.ldflags_shared = [*ldflags, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO']
self.ldflags_shared_debug = [*ldflags_debug, '/DLL', '/MANIFEST:EMBED,ID=2', '/MANIFESTUAC:NO']
self.ldflags_static = [*ldflags]
self.ldflags_static_debug = [*ldflags_debug]
self._ldflags = {
(CCompiler.EXECUTABLE, None): self.ldflags_exe,
(CCompiler.EXECUTABLE, False): self.ldflags_exe,
(CCompiler.EXECUTABLE, True): self.ldflags_exe_debug,
(CCompiler.SHARED_OBJECT, None): self.ldflags_shared,
(CCompiler.SHARED_OBJECT, False): self.ldflags_shared,
(CCompiler.SHARED_OBJECT, True): self.ldflags_shared_debug,
(CCompiler.SHARED_LIBRARY, None): self.ldflags_static,
(CCompiler.SHARED_LIBRARY, False): self.ldflags_static,
(CCompiler.SHARED_LIBRARY, True): self.ldflags_static_debug,
}
self.initialized = True
# -- Worker methods ------------------------------------------------
def object_filenames(self,
source_filenames,
strip_dir=0,
output_dir=''):
ext_map = {
**{ext: self.obj_extension for ext in self.src_extensions},
**{ext: self.res_extension for ext in self._rc_extensions + self._mc_extensions},
}
output_dir = output_dir or ''
def make_out_path(p):
base, ext = os.path.splitext(p)
if strip_dir:
base = os.path.basename(base)
else:
_, base = os.path.splitdrive(base)
if base.startswith((os.path.sep, os.path.altsep)):
base = base[1:]
try:
# XXX: This may produce absurdly long paths. We should check
# the length of the result and trim base until we fit within
# 260 characters.
return os.path.join(output_dir, base + ext_map[ext])
except LookupError:
# Better to raise an exception instead of silently continuing
# and later complain about sources and targets having
# different lengths
raise CompileError("Don't know how to compile {}".format(p))
return list(map(make_out_path, source_filenames))
def compile(self, sources,
output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
if not self.initialized:
self.initialize()
compile_info = self._setup_compile(output_dir, macros, include_dirs,
sources, depends, extra_postargs)
macros, objects, extra_postargs, pp_opts, build = compile_info
compile_opts = extra_preargs or []
compile_opts.append('/c')
if debug:
compile_opts.extend(self.compile_options_debug)
else:
compile_opts.extend(self.compile_options)
add_cpp_opts = False
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
if debug:
# pass the full pathname to MSVC in debug mode,
# this allows the debugger to find the source file
# without asking the user to browse for it
src = os.path.abspath(src)
if ext in self._c_extensions:
input_opt = "/Tc" + src
elif ext in self._cpp_extensions:
input_opt = "/Tp" + src
add_cpp_opts = True
elif ext in self._rc_extensions:
# compile .RC to .RES file
input_opt = src
output_opt = "/fo" + obj
try:
self.spawn([self.rc] + pp_opts + [output_opt, input_opt])
except DistutilsExecError as msg:
raise CompileError(msg)
continue
elif ext in self._mc_extensions:
# Compile .MC to .RC file to .RES file.
# * '-h dir' specifies the directory for the
# generated include file
# * '-r dir' specifies the target directory of the
# generated RC file and the binary message resource
# it includes
#
# For now (since there are no options to change this),
# we use the source-directory for the include file and
# the build directory for the RC file and message
# resources. This works at least for win32all.
h_dir = os.path.dirname(src)
rc_dir = os.path.dirname(obj)
try:
# first compile .MC to .RC and .H file
self.spawn([self.mc, '-h', h_dir, '-r', rc_dir, src])
base, _ = os.path.splitext(os.path.basename (src))
rc_file = os.path.join(rc_dir, base + '.rc')
# then compile .RC to .RES file
self.spawn([self.rc, "/fo" + obj, rc_file])
except DistutilsExecError as msg:
raise CompileError(msg)
continue
else:
# how to handle this file?
raise CompileError("Don't know how to compile {} to {}"
.format(src, obj))
args = [self.cc] + compile_opts + pp_opts
if add_cpp_opts:
args.append('/EHsc')
args.append(input_opt)
args.append("/Fo" + obj)
args.extend(extra_postargs)
try:
self.spawn(args)
except DistutilsExecError as msg:
raise CompileError(msg)
return objects
def create_static_lib(self,
objects,
output_libname,
output_dir=None,
debug=0,
target_lang=None):
if not self.initialized:
self.initialize()
objects, output_dir = self._fix_object_args(objects, output_dir)
output_filename = self.library_filename(output_libname,
output_dir=output_dir)
if self._need_link(objects, output_filename):
lib_args = objects + ['/OUT:' + output_filename]
if debug:
pass # XXX what goes here?
try:
log.debug('Executing "%s" %s', self.lib, ' '.join(lib_args))
self.spawn([self.lib] + lib_args)
except DistutilsExecError as msg:
raise LibError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def link(self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
if not self.initialized:
self.initialize()
objects, output_dir = self._fix_object_args(objects, output_dir)
fixed_args = self._fix_lib_args(libraries, library_dirs,
runtime_library_dirs)
libraries, library_dirs, runtime_library_dirs = fixed_args
if runtime_library_dirs:
self.warn("I don't know what to do with 'runtime_library_dirs': "
+ str(runtime_library_dirs))
lib_opts = gen_lib_options(self,
library_dirs, runtime_library_dirs,
libraries)
if output_dir is not None:
output_filename = os.path.join(output_dir, output_filename)
if self._need_link(objects, output_filename):
ldflags = self._ldflags[target_desc, debug]
export_opts = ["/EXPORT:" + sym for sym in (export_symbols or [])]
ld_args = (ldflags + lib_opts + export_opts +
objects + ['/OUT:' + output_filename])
# The MSVC linker generates .lib and .exp files, which cannot be
# suppressed by any linker switches. The .lib files may even be
# needed! Make sure they are generated in the temporary build
# directory. Since they have different names for debug and release
# builds, they can go into the same directory.
build_temp = os.path.dirname(objects[0])
if export_symbols is not None:
(dll_name, dll_ext) = os.path.splitext(
os.path.basename(output_filename))
implib_file = os.path.join(
build_temp,
self.library_filename(dll_name))
ld_args.append ('/IMPLIB:' + implib_file)
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
output_dir = os.path.dirname(os.path.abspath(output_filename))
self.mkpath(output_dir)
try:
log.debug('Executing "%s" %s', self.linker, ' '.join(ld_args))
self.spawn([self.linker] + ld_args)
except DistutilsExecError as msg:
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def spawn(self, cmd):
env = dict(os.environ, PATH=self._paths)
return super().spawn(cmd, env=env)
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option(self, dir):
return "/LIBPATH:" + dir
def runtime_library_dir_option(self, dir):
raise DistutilsPlatformError(
"don't know how to set runtime library search path for MSVC")
def library_option(self, lib):
return self.library_filename(lib)
def find_library_file(self, dirs, lib, debug=0):
# Prefer a debugging library if found (and requested), but deal
# with it if we don't have one.
if debug:
try_names = [lib + "_d", lib]
else:
try_names = [lib]
for dir in dirs:
for name in try_names:
libfile = os.path.join(dir, self.library_filename(name))
if os.path.isfile(libfile):
return libfile
else:
# Oops, didn't find it in *any* of 'dirs'
return None
| Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/_msvccompiler.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/_msvccompiler.py",
"repo_id": "Django-locallibrary",
"token_count": 9518
} | 34 |
"""distutils.command.bdist_rpm
Implements the Distutils 'bdist_rpm' command (create RPM source and binary
distributions)."""
import subprocess, sys, os
from distutils.core import Command
from distutils.debug import DEBUG
from distutils.file_util import write_file
from distutils.errors import *
from distutils.sysconfig import get_python_version
from distutils import log
class bdist_rpm(Command):
description = "create an RPM distribution"
user_options = [
('bdist-base=', None,
"base directory for creating built distributions"),
('rpm-base=', None,
"base directory for creating RPMs (defaults to \"rpm\" under "
"--bdist-base; must be specified for RPM 2)"),
('dist-dir=', 'd',
"directory to put final RPM files in "
"(and .spec files if --spec-only)"),
('python=', None,
"path to Python interpreter to hard-code in the .spec file "
"(default: \"python\")"),
('fix-python', None,
"hard-code the exact path to the current Python interpreter in "
"the .spec file"),
('spec-only', None,
"only regenerate spec file"),
('source-only', None,
"only generate source RPM"),
('binary-only', None,
"only generate binary RPM"),
('use-bzip2', None,
"use bzip2 instead of gzip to create source distribution"),
# More meta-data: too RPM-specific to put in the setup script,
# but needs to go in the .spec file -- so we make these options
# to "bdist_rpm". The idea is that packagers would put this
# info in setup.cfg, although they are of course free to
# supply it on the command line.
('distribution-name=', None,
"name of the (Linux) distribution to which this "
"RPM applies (*not* the name of the module distribution!)"),
('group=', None,
"package classification [default: \"Development/Libraries\"]"),
('release=', None,
"RPM release number"),
('serial=', None,
"RPM serial number"),
('vendor=', None,
"RPM \"vendor\" (eg. \"Joe Blow <[email protected]>\") "
"[default: maintainer or author from setup script]"),
('packager=', None,
"RPM packager (eg. \"Jane Doe <[email protected]>\") "
"[default: vendor]"),
('doc-files=', None,
"list of documentation files (space or comma-separated)"),
('changelog=', None,
"RPM changelog"),
('icon=', None,
"name of icon file"),
('provides=', None,
"capabilities provided by this package"),
('requires=', None,
"capabilities required by this package"),
('conflicts=', None,
"capabilities which conflict with this package"),
('build-requires=', None,
"capabilities required to build this package"),
('obsoletes=', None,
"capabilities made obsolete by this package"),
('no-autoreq', None,
"do not automatically calculate dependencies"),
# Actions to take when building RPM
('keep-temp', 'k',
"don't clean up RPM build directory"),
('no-keep-temp', None,
"clean up RPM build directory [default]"),
('use-rpm-opt-flags', None,
"compile with RPM_OPT_FLAGS when building from source RPM"),
('no-rpm-opt-flags', None,
"do not pass any RPM CFLAGS to compiler"),
('rpm3-mode', None,
"RPM 3 compatibility mode (default)"),
('rpm2-mode', None,
"RPM 2 compatibility mode"),
# Add the hooks necessary for specifying custom scripts
('prep-script=', None,
"Specify a script for the PREP phase of RPM building"),
('build-script=', None,
"Specify a script for the BUILD phase of RPM building"),
('pre-install=', None,
"Specify a script for the pre-INSTALL phase of RPM building"),
('install-script=', None,
"Specify a script for the INSTALL phase of RPM building"),
('post-install=', None,
"Specify a script for the post-INSTALL phase of RPM building"),
('pre-uninstall=', None,
"Specify a script for the pre-UNINSTALL phase of RPM building"),
('post-uninstall=', None,
"Specify a script for the post-UNINSTALL phase of RPM building"),
('clean-script=', None,
"Specify a script for the CLEAN phase of RPM building"),
('verify-script=', None,
"Specify a script for the VERIFY phase of the RPM build"),
# Allow a packager to explicitly force an architecture
('force-arch=', None,
"Force an architecture onto the RPM build process"),
('quiet', 'q',
"Run the INSTALL phase of RPM building in quiet mode"),
]
boolean_options = ['keep-temp', 'use-rpm-opt-flags', 'rpm3-mode',
'no-autoreq', 'quiet']
negative_opt = {'no-keep-temp': 'keep-temp',
'no-rpm-opt-flags': 'use-rpm-opt-flags',
'rpm2-mode': 'rpm3-mode'}
def initialize_options(self):
self.bdist_base = None
self.rpm_base = None
self.dist_dir = None
self.python = None
self.fix_python = None
self.spec_only = None
self.binary_only = None
self.source_only = None
self.use_bzip2 = None
self.distribution_name = None
self.group = None
self.release = None
self.serial = None
self.vendor = None
self.packager = None
self.doc_files = None
self.changelog = None
self.icon = None
self.prep_script = None
self.build_script = None
self.install_script = None
self.clean_script = None
self.verify_script = None
self.pre_install = None
self.post_install = None
self.pre_uninstall = None
self.post_uninstall = None
self.prep = None
self.provides = None
self.requires = None
self.conflicts = None
self.build_requires = None
self.obsoletes = None
self.keep_temp = 0
self.use_rpm_opt_flags = 1
self.rpm3_mode = 1
self.no_autoreq = 0
self.force_arch = None
self.quiet = 0
def finalize_options(self):
self.set_undefined_options('bdist', ('bdist_base', 'bdist_base'))
if self.rpm_base is None:
if not self.rpm3_mode:
raise DistutilsOptionError(
"you must specify --rpm-base in RPM 2 mode")
self.rpm_base = os.path.join(self.bdist_base, "rpm")
if self.python is None:
if self.fix_python:
self.python = sys.executable
else:
self.python = "python3"
elif self.fix_python:
raise DistutilsOptionError(
"--python and --fix-python are mutually exclusive options")
if os.name != 'posix':
raise DistutilsPlatformError("don't know how to create RPM "
"distributions on platform %s" % os.name)
if self.binary_only and self.source_only:
raise DistutilsOptionError(
"cannot supply both '--source-only' and '--binary-only'")
# don't pass CFLAGS to pure python distributions
if not self.distribution.has_ext_modules():
self.use_rpm_opt_flags = 0
self.set_undefined_options('bdist', ('dist_dir', 'dist_dir'))
self.finalize_package_data()
def finalize_package_data(self):
self.ensure_string('group', "Development/Libraries")
self.ensure_string('vendor',
"%s <%s>" % (self.distribution.get_contact(),
self.distribution.get_contact_email()))
self.ensure_string('packager')
self.ensure_string_list('doc_files')
if isinstance(self.doc_files, list):
for readme in ('README', 'README.txt'):
if os.path.exists(readme) and readme not in self.doc_files:
self.doc_files.append(readme)
self.ensure_string('release', "1")
self.ensure_string('serial') # should it be an int?
self.ensure_string('distribution_name')
self.ensure_string('changelog')
# Format changelog correctly
self.changelog = self._format_changelog(self.changelog)
self.ensure_filename('icon')
self.ensure_filename('prep_script')
self.ensure_filename('build_script')
self.ensure_filename('install_script')
self.ensure_filename('clean_script')
self.ensure_filename('verify_script')
self.ensure_filename('pre_install')
self.ensure_filename('post_install')
self.ensure_filename('pre_uninstall')
self.ensure_filename('post_uninstall')
# XXX don't forget we punted on summaries and descriptions -- they
# should be handled here eventually!
# Now *this* is some meta-data that belongs in the setup script...
self.ensure_string_list('provides')
self.ensure_string_list('requires')
self.ensure_string_list('conflicts')
self.ensure_string_list('build_requires')
self.ensure_string_list('obsoletes')
self.ensure_string('force_arch')
def run(self):
if DEBUG:
print("before _get_package_data():")
print("vendor =", self.vendor)
print("packager =", self.packager)
print("doc_files =", self.doc_files)
print("changelog =", self.changelog)
# make directories
if self.spec_only:
spec_dir = self.dist_dir
self.mkpath(spec_dir)
else:
rpm_dir = {}
for d in ('SOURCES', 'SPECS', 'BUILD', 'RPMS', 'SRPMS'):
rpm_dir[d] = os.path.join(self.rpm_base, d)
self.mkpath(rpm_dir[d])
spec_dir = rpm_dir['SPECS']
# Spec file goes into 'dist_dir' if '--spec-only specified',
# build/rpm.<plat> otherwise.
spec_path = os.path.join(spec_dir,
"%s.spec" % self.distribution.get_name())
self.execute(write_file,
(spec_path,
self._make_spec_file()),
"writing '%s'" % spec_path)
if self.spec_only: # stop if requested
return
# Make a source distribution and copy to SOURCES directory with
# optional icon.
saved_dist_files = self.distribution.dist_files[:]
sdist = self.reinitialize_command('sdist')
if self.use_bzip2:
sdist.formats = ['bztar']
else:
sdist.formats = ['gztar']
self.run_command('sdist')
self.distribution.dist_files = saved_dist_files
source = sdist.get_archive_files()[0]
source_dir = rpm_dir['SOURCES']
self.copy_file(source, source_dir)
if self.icon:
if os.path.exists(self.icon):
self.copy_file(self.icon, source_dir)
else:
raise DistutilsFileError(
"icon file '%s' does not exist" % self.icon)
# build package
log.info("building RPMs")
rpm_cmd = ['rpmbuild']
if self.source_only: # what kind of RPMs?
rpm_cmd.append('-bs')
elif self.binary_only:
rpm_cmd.append('-bb')
else:
rpm_cmd.append('-ba')
rpm_cmd.extend(['--define', '__python %s' % self.python])
if self.rpm3_mode:
rpm_cmd.extend(['--define',
'_topdir %s' % os.path.abspath(self.rpm_base)])
if not self.keep_temp:
rpm_cmd.append('--clean')
if self.quiet:
rpm_cmd.append('--quiet')
rpm_cmd.append(spec_path)
# Determine the binary rpm names that should be built out of this spec
# file
# Note that some of these may not be really built (if the file
# list is empty)
nvr_string = "%{name}-%{version}-%{release}"
src_rpm = nvr_string + ".src.rpm"
non_src_rpm = "%{arch}/" + nvr_string + ".%{arch}.rpm"
q_cmd = r"rpm -q --qf '%s %s\n' --specfile '%s'" % (
src_rpm, non_src_rpm, spec_path)
out = os.popen(q_cmd)
try:
binary_rpms = []
source_rpm = None
while True:
line = out.readline()
if not line:
break
l = line.strip().split()
assert(len(l) == 2)
binary_rpms.append(l[1])
# The source rpm is named after the first entry in the spec file
if source_rpm is None:
source_rpm = l[0]
status = out.close()
if status:
raise DistutilsExecError("Failed to execute: %s" % repr(q_cmd))
finally:
out.close()
self.spawn(rpm_cmd)
if not self.dry_run:
if self.distribution.has_ext_modules():
pyversion = get_python_version()
else:
pyversion = 'any'
if not self.binary_only:
srpm = os.path.join(rpm_dir['SRPMS'], source_rpm)
assert(os.path.exists(srpm))
self.move_file(srpm, self.dist_dir)
filename = os.path.join(self.dist_dir, source_rpm)
self.distribution.dist_files.append(
('bdist_rpm', pyversion, filename))
if not self.source_only:
for rpm in binary_rpms:
rpm = os.path.join(rpm_dir['RPMS'], rpm)
if os.path.exists(rpm):
self.move_file(rpm, self.dist_dir)
filename = os.path.join(self.dist_dir,
os.path.basename(rpm))
self.distribution.dist_files.append(
('bdist_rpm', pyversion, filename))
def _dist_path(self, path):
return os.path.join(self.dist_dir, os.path.basename(path))
def _make_spec_file(self):
"""Generate the text of an RPM spec file and return it as a
list of strings (one per line).
"""
# definitions and headers
spec_file = [
'%define name ' + self.distribution.get_name(),
'%define version ' + self.distribution.get_version().replace('-','_'),
'%define unmangled_version ' + self.distribution.get_version(),
'%define release ' + self.release.replace('-','_'),
'',
'Summary: ' + self.distribution.get_description(),
]
# Workaround for #14443 which affects some RPM based systems such as
# RHEL6 (and probably derivatives)
vendor_hook = subprocess.getoutput('rpm --eval %{__os_install_post}')
# Generate a potential replacement value for __os_install_post (whilst
# normalizing the whitespace to simplify the test for whether the
# invocation of brp-python-bytecompile passes in __python):
vendor_hook = '\n'.join([' %s \\' % line.strip()
for line in vendor_hook.splitlines()])
problem = "brp-python-bytecompile \\\n"
fixed = "brp-python-bytecompile %{__python} \\\n"
fixed_hook = vendor_hook.replace(problem, fixed)
if fixed_hook != vendor_hook:
spec_file.append('# Workaround for http://bugs.python.org/issue14443')
spec_file.append('%define __os_install_post ' + fixed_hook + '\n')
# put locale summaries into spec file
# XXX not supported for now (hard to put a dictionary
# in a config file -- arg!)
#for locale in self.summaries.keys():
# spec_file.append('Summary(%s): %s' % (locale,
# self.summaries[locale]))
spec_file.extend([
'Name: %{name}',
'Version: %{version}',
'Release: %{release}',])
# XXX yuck! this filename is available from the "sdist" command,
# but only after it has run: and we create the spec file before
# running "sdist", in case of --spec-only.
if self.use_bzip2:
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.bz2')
else:
spec_file.append('Source0: %{name}-%{unmangled_version}.tar.gz')
spec_file.extend([
'License: ' + self.distribution.get_license(),
'Group: ' + self.group,
'BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-buildroot',
'Prefix: %{_prefix}', ])
if not self.force_arch:
# noarch if no extension modules
if not self.distribution.has_ext_modules():
spec_file.append('BuildArch: noarch')
else:
spec_file.append( 'BuildArch: %s' % self.force_arch )
for field in ('Vendor',
'Packager',
'Provides',
'Requires',
'Conflicts',
'Obsoletes',
):
val = getattr(self, field.lower())
if isinstance(val, list):
spec_file.append('%s: %s' % (field, ' '.join(val)))
elif val is not None:
spec_file.append('%s: %s' % (field, val))
if self.distribution.get_url() != 'UNKNOWN':
spec_file.append('Url: ' + self.distribution.get_url())
if self.distribution_name:
spec_file.append('Distribution: ' + self.distribution_name)
if self.build_requires:
spec_file.append('BuildRequires: ' +
' '.join(self.build_requires))
if self.icon:
spec_file.append('Icon: ' + os.path.basename(self.icon))
if self.no_autoreq:
spec_file.append('AutoReq: 0')
spec_file.extend([
'',
'%description',
self.distribution.get_long_description()
])
# put locale descriptions into spec file
# XXX again, suppressed because config file syntax doesn't
# easily support this ;-(
#for locale in self.descriptions.keys():
# spec_file.extend([
# '',
# '%description -l ' + locale,
# self.descriptions[locale],
# ])
# rpm scripts
# figure out default build script
def_setup_call = "%s %s" % (self.python,os.path.basename(sys.argv[0]))
def_build = "%s build" % def_setup_call
if self.use_rpm_opt_flags:
def_build = 'env CFLAGS="$RPM_OPT_FLAGS" ' + def_build
# insert contents of files
# XXX this is kind of misleading: user-supplied options are files
# that we open and interpolate into the spec file, but the defaults
# are just text that we drop in as-is. Hmmm.
install_cmd = ('%s install -O1 --root=$RPM_BUILD_ROOT '
'--record=INSTALLED_FILES') % def_setup_call
script_options = [
('prep', 'prep_script', "%setup -n %{name}-%{unmangled_version}"),
('build', 'build_script', def_build),
('install', 'install_script', install_cmd),
('clean', 'clean_script', "rm -rf $RPM_BUILD_ROOT"),
('verifyscript', 'verify_script', None),
('pre', 'pre_install', None),
('post', 'post_install', None),
('preun', 'pre_uninstall', None),
('postun', 'post_uninstall', None),
]
for (rpm_opt, attr, default) in script_options:
# Insert contents of file referred to, if no file is referred to
# use 'default' as contents of script
val = getattr(self, attr)
if val or default:
spec_file.extend([
'',
'%' + rpm_opt,])
if val:
with open(val) as f:
spec_file.extend(f.read().split('\n'))
else:
spec_file.append(default)
# files section
spec_file.extend([
'',
'%files -f INSTALLED_FILES',
'%defattr(-,root,root)',
])
if self.doc_files:
spec_file.append('%doc ' + ' '.join(self.doc_files))
if self.changelog:
spec_file.extend([
'',
'%changelog',])
spec_file.extend(self.changelog)
return spec_file
def _format_changelog(self, changelog):
"""Format the changelog correctly and convert it to a list of strings
"""
if not changelog:
return changelog
new_changelog = []
for line in changelog.strip().split('\n'):
line = line.strip()
if line[0] == '*':
new_changelog.extend(['', line])
elif line[0] == '-':
new_changelog.append(line)
else:
new_changelog.append(' ' + line)
# strip trailing newline inserted by first changelog entry
if not new_changelog[0]:
del new_changelog[0]
return new_changelog
| Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/command/bdist_rpm.py",
"repo_id": "Django-locallibrary",
"token_count": 10372
} | 35 |
"""distutils.command.register
Implements the Distutils 'register' command (register with the repository).
"""
# created 2002/10/21, Richard Jones
import getpass
import io
import urllib.parse, urllib.request
from warnings import warn
from distutils.core import PyPIRCCommand
from distutils.errors import *
from distutils import log
class register(PyPIRCCommand):
description = ("register the distribution with the Python package index")
user_options = PyPIRCCommand.user_options + [
('list-classifiers', None,
'list the valid Trove classifiers'),
('strict', None ,
'Will stop the registering if the meta-data are not fully compliant')
]
boolean_options = PyPIRCCommand.boolean_options + [
'verify', 'list-classifiers', 'strict']
sub_commands = [('check', lambda self: True)]
def initialize_options(self):
PyPIRCCommand.initialize_options(self)
self.list_classifiers = 0
self.strict = 0
def finalize_options(self):
PyPIRCCommand.finalize_options(self)
# setting options for the `check` subcommand
check_options = {'strict': ('register', self.strict),
'restructuredtext': ('register', 1)}
self.distribution.command_options['check'] = check_options
def run(self):
self.finalize_options()
self._set_config()
# Run sub commands
for cmd_name in self.get_sub_commands():
self.run_command(cmd_name)
if self.dry_run:
self.verify_metadata()
elif self.list_classifiers:
self.classifiers()
else:
self.send_metadata()
def check_metadata(self):
"""Deprecated API."""
warn("distutils.command.register.check_metadata is deprecated, \
use the check command instead", PendingDeprecationWarning)
check = self.distribution.get_command_obj('check')
check.ensure_finalized()
check.strict = self.strict
check.restructuredtext = 1
check.run()
def _set_config(self):
''' Reads the configuration file and set attributes.
'''
config = self._read_pypirc()
if config != {}:
self.username = config['username']
self.password = config['password']
self.repository = config['repository']
self.realm = config['realm']
self.has_config = True
else:
if self.repository not in ('pypi', self.DEFAULT_REPOSITORY):
raise ValueError('%s not found in .pypirc' % self.repository)
if self.repository == 'pypi':
self.repository = self.DEFAULT_REPOSITORY
self.has_config = False
def classifiers(self):
''' Fetch the list of classifiers from the server.
'''
url = self.repository+'?:action=list_classifiers'
response = urllib.request.urlopen(url)
log.info(self._read_pypi_response(response))
def verify_metadata(self):
''' Send the metadata to the package index server to be checked.
'''
# send the info to the server and report the result
(code, result) = self.post_to_server(self.build_post_data('verify'))
log.info('Server response (%s): %s', code, result)
def send_metadata(self):
''' Send the metadata to the package index server.
Well, do the following:
1. figure who the user is, and then
2. send the data as a Basic auth'ed POST.
First we try to read the username/password from $HOME/.pypirc,
which is a ConfigParser-formatted file with a section
[distutils] containing username and password entries (both
in clear text). Eg:
[distutils]
index-servers =
pypi
[pypi]
username: fred
password: sekrit
Otherwise, to figure who the user is, we offer the user three
choices:
1. use existing login,
2. register as a new user, or
3. set the password to a random string and email the user.
'''
# see if we can short-cut and get the username/password from the
# config
if self.has_config:
choice = '1'
username = self.username
password = self.password
else:
choice = 'x'
username = password = ''
# get the user's login info
choices = '1 2 3 4'.split()
while choice not in choices:
self.announce('''\
We need to know who you are, so please choose either:
1. use your existing login,
2. register as a new user,
3. have the server generate a new password for you (and email it to you), or
4. quit
Your selection [default 1]: ''', log.INFO)
choice = input()
if not choice:
choice = '1'
elif choice not in choices:
print('Please choose one of the four options!')
if choice == '1':
# get the username and password
while not username:
username = input('Username: ')
while not password:
password = getpass.getpass('Password: ')
# set up the authentication
auth = urllib.request.HTTPPasswordMgr()
host = urllib.parse.urlparse(self.repository)[1]
auth.add_password(self.realm, host, username, password)
# send the info to the server and report the result
code, result = self.post_to_server(self.build_post_data('submit'),
auth)
self.announce('Server response (%s): %s' % (code, result),
log.INFO)
# possibly save the login
if code == 200:
if self.has_config:
# sharing the password in the distribution instance
# so the upload command can reuse it
self.distribution.password = password
else:
self.announce(('I can store your PyPI login so future '
'submissions will be faster.'), log.INFO)
self.announce('(the login will be stored in %s)' % \
self._get_rc_file(), log.INFO)
choice = 'X'
while choice.lower() not in 'yn':
choice = input('Save your login (y/N)?')
if not choice:
choice = 'n'
if choice.lower() == 'y':
self._store_pypirc(username, password)
elif choice == '2':
data = {':action': 'user'}
data['name'] = data['password'] = data['email'] = ''
data['confirm'] = None
while not data['name']:
data['name'] = input('Username: ')
while data['password'] != data['confirm']:
while not data['password']:
data['password'] = getpass.getpass('Password: ')
while not data['confirm']:
data['confirm'] = getpass.getpass(' Confirm: ')
if data['password'] != data['confirm']:
data['password'] = ''
data['confirm'] = None
print("Password and confirm don't match!")
while not data['email']:
data['email'] = input(' EMail: ')
code, result = self.post_to_server(data)
if code != 200:
log.info('Server response (%s): %s', code, result)
else:
log.info('You will receive an email shortly.')
log.info(('Follow the instructions in it to '
'complete registration.'))
elif choice == '3':
data = {':action': 'password_reset'}
data['email'] = ''
while not data['email']:
data['email'] = input('Your email address: ')
code, result = self.post_to_server(data)
log.info('Server response (%s): %s', code, result)
def build_post_data(self, action):
# figure the data to send - the metadata plus some additional
# information used by the package server
meta = self.distribution.metadata
data = {
':action': action,
'metadata_version' : '1.0',
'name': meta.get_name(),
'version': meta.get_version(),
'summary': meta.get_description(),
'home_page': meta.get_url(),
'author': meta.get_contact(),
'author_email': meta.get_contact_email(),
'license': meta.get_licence(),
'description': meta.get_long_description(),
'keywords': meta.get_keywords(),
'platform': meta.get_platforms(),
'classifiers': meta.get_classifiers(),
'download_url': meta.get_download_url(),
# PEP 314
'provides': meta.get_provides(),
'requires': meta.get_requires(),
'obsoletes': meta.get_obsoletes(),
}
if data['provides'] or data['requires'] or data['obsoletes']:
data['metadata_version'] = '1.1'
return data
def post_to_server(self, data, auth=None):
''' Post a query to the server, and return a string response.
'''
if 'name' in data:
self.announce('Registering %s to %s' % (data['name'],
self.repository),
log.INFO)
# Build up the MIME payload for the urllib2 POST data
boundary = '--------------GHSKFJDLGDS7543FJKLFHRE75642756743254'
sep_boundary = '\n--' + boundary
end_boundary = sep_boundary + '--'
body = io.StringIO()
for key, value in data.items():
# handle multiple entries for the same name
if type(value) not in (type([]), type( () )):
value = [value]
for value in value:
value = str(value)
body.write(sep_boundary)
body.write('\nContent-Disposition: form-data; name="%s"'%key)
body.write("\n\n")
body.write(value)
if value and value[-1] == '\r':
body.write('\n') # write an extra newline (lurve Macs)
body.write(end_boundary)
body.write("\n")
body = body.getvalue().encode("utf-8")
# build the Request
headers = {
'Content-type': 'multipart/form-data; boundary=%s; charset=utf-8'%boundary,
'Content-length': str(len(body))
}
req = urllib.request.Request(self.repository, body, headers)
# handle HTTP and include the Basic Auth handler
opener = urllib.request.build_opener(
urllib.request.HTTPBasicAuthHandler(password_mgr=auth)
)
data = ''
try:
result = opener.open(req)
except urllib.error.HTTPError as e:
if self.show_response:
data = e.fp.read()
result = e.code, e.msg
except urllib.error.URLError as e:
result = 500, str(e)
else:
if self.show_response:
data = self._read_pypi_response(result)
result = 200, 'OK'
if self.show_response:
msg = '\n'.join(('-' * 75, data, '-' * 75))
self.announce(msg, log.INFO)
return result
| Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/command/register.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/command/register.py",
"repo_id": "Django-locallibrary",
"token_count": 5634
} | 36 |
"""distutils.msvc9compiler
Contains MSVCCompiler, an implementation of the abstract CCompiler class
for the Microsoft Visual Studio 2008.
The module is compatible with VS 2005 and VS 2008. You can find legacy support
for older versions of VS in distutils.msvccompiler.
"""
# Written by Perry Stoll
# hacked by Robin Becker and Thomas Heller to do a better job of
# finding DevStudio (through the registry)
# ported to VS2005 and VS 2008 by Christian Heimes
import os
import subprocess
import sys
import re
from distutils.errors import DistutilsExecError, DistutilsPlatformError, \
CompileError, LibError, LinkError
from distutils.ccompiler import CCompiler, gen_lib_options
from distutils import log
from distutils.util import get_platform
import winreg
RegOpenKeyEx = winreg.OpenKeyEx
RegEnumKey = winreg.EnumKey
RegEnumValue = winreg.EnumValue
RegError = winreg.error
HKEYS = (winreg.HKEY_USERS,
winreg.HKEY_CURRENT_USER,
winreg.HKEY_LOCAL_MACHINE,
winreg.HKEY_CLASSES_ROOT)
NATIVE_WIN64 = (sys.platform == 'win32' and sys.maxsize > 2**32)
if NATIVE_WIN64:
# Visual C++ is a 32-bit application, so we need to look in
# the corresponding registry branch, if we're running a
# 64-bit Python on Win64
VS_BASE = r"Software\Wow6432Node\Microsoft\VisualStudio\%0.1f"
WINSDK_BASE = r"Software\Wow6432Node\Microsoft\Microsoft SDKs\Windows"
NET_BASE = r"Software\Wow6432Node\Microsoft\.NETFramework"
else:
VS_BASE = r"Software\Microsoft\VisualStudio\%0.1f"
WINSDK_BASE = r"Software\Microsoft\Microsoft SDKs\Windows"
NET_BASE = r"Software\Microsoft\.NETFramework"
# A map keyed by get_platform() return values to values accepted by
# 'vcvarsall.bat'. Note a cross-compile may combine these (eg, 'x86_amd64' is
# the param to cross-compile on x86 targeting amd64.)
PLAT_TO_VCVARS = {
'win32' : 'x86',
'win-amd64' : 'amd64',
}
class Reg:
"""Helper class to read values from the registry
"""
def get_value(cls, path, key):
for base in HKEYS:
d = cls.read_values(base, path)
if d and key in d:
return d[key]
raise KeyError(key)
get_value = classmethod(get_value)
def read_keys(cls, base, key):
"""Return list of registry keys."""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
L = []
i = 0
while True:
try:
k = RegEnumKey(handle, i)
except RegError:
break
L.append(k)
i += 1
return L
read_keys = classmethod(read_keys)
def read_values(cls, base, key):
"""Return dict of registry keys and values.
All names are converted to lowercase.
"""
try:
handle = RegOpenKeyEx(base, key)
except RegError:
return None
d = {}
i = 0
while True:
try:
name, value, type = RegEnumValue(handle, i)
except RegError:
break
name = name.lower()
d[cls.convert_mbcs(name)] = cls.convert_mbcs(value)
i += 1
return d
read_values = classmethod(read_values)
def convert_mbcs(s):
dec = getattr(s, "decode", None)
if dec is not None:
try:
s = dec("mbcs")
except UnicodeError:
pass
return s
convert_mbcs = staticmethod(convert_mbcs)
class MacroExpander:
def __init__(self, version):
self.macros = {}
self.vsbase = VS_BASE % version
self.load_macros(version)
def set_macro(self, macro, path, key):
self.macros["$(%s)" % macro] = Reg.get_value(path, key)
def load_macros(self, version):
self.set_macro("VCInstallDir", self.vsbase + r"\Setup\VC", "productdir")
self.set_macro("VSInstallDir", self.vsbase + r"\Setup\VS", "productdir")
self.set_macro("FrameworkDir", NET_BASE, "installroot")
try:
if version >= 8.0:
self.set_macro("FrameworkSDKDir", NET_BASE,
"sdkinstallrootv2.0")
else:
raise KeyError("sdkinstallrootv2.0")
except KeyError:
raise DistutilsPlatformError(
"""Python was built with Visual Studio 2008;
extensions must be built with a compiler than can generate compatible binaries.
Visual Studio 2008 was not found on this system. If you have Cygwin installed,
you can try compiling with MingW32, by passing "-c mingw32" to setup.py.""")
if version >= 9.0:
self.set_macro("FrameworkVersion", self.vsbase, "clr version")
self.set_macro("WindowsSdkDir", WINSDK_BASE, "currentinstallfolder")
else:
p = r"Software\Microsoft\NET Framework Setup\Product"
for base in HKEYS:
try:
h = RegOpenKeyEx(base, p)
except RegError:
continue
key = RegEnumKey(h, 0)
d = Reg.get_value(base, r"%s\%s" % (p, key))
self.macros["$(FrameworkVersion)"] = d["version"]
def sub(self, s):
for k, v in self.macros.items():
s = s.replace(k, v)
return s
def get_build_version():
"""Return the version of MSVC that was used to build Python.
For Python 2.3 and up, the version number is included in
sys.version. For earlier versions, assume the compiler is MSVC 6.
"""
prefix = "MSC v."
i = sys.version.find(prefix)
if i == -1:
return 6
i = i + len(prefix)
s, rest = sys.version[i:].split(" ", 1)
majorVersion = int(s[:-2]) - 6
if majorVersion >= 13:
# v13 was skipped and should be v14
majorVersion += 1
minorVersion = int(s[2:3]) / 10.0
# I don't think paths are affected by minor version in version 6
if majorVersion == 6:
minorVersion = 0
if majorVersion >= 6:
return majorVersion + minorVersion
# else we don't know what version of the compiler this is
return None
def normalize_and_reduce_paths(paths):
"""Return a list of normalized paths with duplicates removed.
The current order of paths is maintained.
"""
# Paths are normalized so things like: /a and /a/ aren't both preserved.
reduced_paths = []
for p in paths:
np = os.path.normpath(p)
# XXX(nnorwitz): O(n**2), if reduced_paths gets long perhaps use a set.
if np not in reduced_paths:
reduced_paths.append(np)
return reduced_paths
def removeDuplicates(variable):
"""Remove duplicate values of an environment variable.
"""
oldList = variable.split(os.pathsep)
newList = []
for i in oldList:
if i not in newList:
newList.append(i)
newVariable = os.pathsep.join(newList)
return newVariable
def find_vcvarsall(version):
"""Find the vcvarsall.bat file
At first it tries to find the productdir of VS 2008 in the registry. If
that fails it falls back to the VS90COMNTOOLS env var.
"""
vsbase = VS_BASE % version
try:
productdir = Reg.get_value(r"%s\Setup\VC" % vsbase,
"productdir")
except KeyError:
log.debug("Unable to find productdir in registry")
productdir = None
if not productdir or not os.path.isdir(productdir):
toolskey = "VS%0.f0COMNTOOLS" % version
toolsdir = os.environ.get(toolskey, None)
if toolsdir and os.path.isdir(toolsdir):
productdir = os.path.join(toolsdir, os.pardir, os.pardir, "VC")
productdir = os.path.abspath(productdir)
if not os.path.isdir(productdir):
log.debug("%s is not a valid directory" % productdir)
return None
else:
log.debug("Env var %s is not set or invalid" % toolskey)
if not productdir:
log.debug("No productdir found")
return None
vcvarsall = os.path.join(productdir, "vcvarsall.bat")
if os.path.isfile(vcvarsall):
return vcvarsall
log.debug("Unable to find vcvarsall.bat")
return None
def query_vcvarsall(version, arch="x86"):
"""Launch vcvarsall.bat and read the settings from its environment
"""
vcvarsall = find_vcvarsall(version)
interesting = {"include", "lib", "libpath", "path"}
result = {}
if vcvarsall is None:
raise DistutilsPlatformError("Unable to find vcvarsall.bat")
log.debug("Calling 'vcvarsall.bat %s' (version=%s)", arch, version)
popen = subprocess.Popen('"%s" %s & set' % (vcvarsall, arch),
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
try:
stdout, stderr = popen.communicate()
if popen.wait() != 0:
raise DistutilsPlatformError(stderr.decode("mbcs"))
stdout = stdout.decode("mbcs")
for line in stdout.split("\n"):
line = Reg.convert_mbcs(line)
if '=' not in line:
continue
line = line.strip()
key, value = line.split('=', 1)
key = key.lower()
if key in interesting:
if value.endswith(os.pathsep):
value = value[:-1]
result[key] = removeDuplicates(value)
finally:
popen.stdout.close()
popen.stderr.close()
if len(result) != len(interesting):
raise ValueError(str(list(result.keys())))
return result
# More globals
VERSION = get_build_version()
if VERSION < 8.0:
raise DistutilsPlatformError("VC %0.1f is not supported by this module" % VERSION)
# MACROS = MacroExpander(VERSION)
class MSVCCompiler(CCompiler) :
"""Concrete class that implements an interface to Microsoft Visual C++,
as defined by the CCompiler abstract class."""
compiler_type = 'msvc'
# Just set this so CCompiler's constructor doesn't barf. We currently
# don't use the 'set_executables()' bureaucracy provided by CCompiler,
# as it really isn't necessary for this sort of single-compiler class.
# Would be nice to have a consistent interface with UnixCCompiler,
# though, so it's worth thinking about.
executables = {}
# Private class data (need to distinguish C from C++ source for compiler)
_c_extensions = ['.c']
_cpp_extensions = ['.cc', '.cpp', '.cxx']
_rc_extensions = ['.rc']
_mc_extensions = ['.mc']
# Needed for the filename generation methods provided by the
# base class, CCompiler.
src_extensions = (_c_extensions + _cpp_extensions +
_rc_extensions + _mc_extensions)
res_extension = '.res'
obj_extension = '.obj'
static_lib_extension = '.lib'
shared_lib_extension = '.dll'
static_lib_format = shared_lib_format = '%s%s'
exe_extension = '.exe'
def __init__(self, verbose=0, dry_run=0, force=0):
CCompiler.__init__ (self, verbose, dry_run, force)
self.__version = VERSION
self.__root = r"Software\Microsoft\VisualStudio"
# self.__macros = MACROS
self.__paths = []
# target platform (.plat_name is consistent with 'bdist')
self.plat_name = None
self.__arch = None # deprecated name
self.initialized = False
def initialize(self, plat_name=None):
# multi-init means we would need to check platform same each time...
assert not self.initialized, "don't init multiple times"
if plat_name is None:
plat_name = get_platform()
# sanity check for platforms to prevent obscure errors later.
ok_plats = 'win32', 'win-amd64'
if plat_name not in ok_plats:
raise DistutilsPlatformError("--plat-name must be one of %s" %
(ok_plats,))
if "DISTUTILS_USE_SDK" in os.environ and "MSSdk" in os.environ and self.find_exe("cl.exe"):
# Assume that the SDK set up everything alright; don't try to be
# smarter
self.cc = "cl.exe"
self.linker = "link.exe"
self.lib = "lib.exe"
self.rc = "rc.exe"
self.mc = "mc.exe"
else:
# On x86, 'vcvars32.bat amd64' creates an env that doesn't work;
# to cross compile, you use 'x86_amd64'.
# On AMD64, 'vcvars32.bat amd64' is a native build env; to cross
# compile use 'x86' (ie, it runs the x86 compiler directly)
if plat_name == get_platform() or plat_name == 'win32':
# native build or cross-compile to win32
plat_spec = PLAT_TO_VCVARS[plat_name]
else:
# cross compile from win32 -> some 64bit
plat_spec = PLAT_TO_VCVARS[get_platform()] + '_' + \
PLAT_TO_VCVARS[plat_name]
vc_env = query_vcvarsall(VERSION, plat_spec)
self.__paths = vc_env['path'].split(os.pathsep)
os.environ['lib'] = vc_env['lib']
os.environ['include'] = vc_env['include']
if len(self.__paths) == 0:
raise DistutilsPlatformError("Python was built with %s, "
"and extensions need to be built with the same "
"version of the compiler, but it isn't installed."
% self.__product)
self.cc = self.find_exe("cl.exe")
self.linker = self.find_exe("link.exe")
self.lib = self.find_exe("lib.exe")
self.rc = self.find_exe("rc.exe") # resource compiler
self.mc = self.find_exe("mc.exe") # message compiler
#self.set_path_env_var('lib')
#self.set_path_env_var('include')
# extend the MSVC path with the current path
try:
for p in os.environ['path'].split(';'):
self.__paths.append(p)
except KeyError:
pass
self.__paths = normalize_and_reduce_paths(self.__paths)
os.environ['path'] = ";".join(self.__paths)
self.preprocess_options = None
if self.__arch == "x86":
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3',
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3',
'/Z7', '/D_DEBUG']
else:
# Win64
self.compile_options = [ '/nologo', '/Ox', '/MD', '/W3', '/GS-' ,
'/DNDEBUG']
self.compile_options_debug = ['/nologo', '/Od', '/MDd', '/W3', '/GS-',
'/Z7', '/D_DEBUG']
self.ldflags_shared = ['/DLL', '/nologo', '/INCREMENTAL:NO']
if self.__version >= 7:
self.ldflags_shared_debug = [
'/DLL', '/nologo', '/INCREMENTAL:no', '/DEBUG'
]
self.ldflags_static = [ '/nologo']
self.initialized = True
# -- Worker methods ------------------------------------------------
def object_filenames(self,
source_filenames,
strip_dir=0,
output_dir=''):
# Copied from ccompiler.py, extended to return .res as 'object'-file
# for .rc input file
if output_dir is None: output_dir = ''
obj_names = []
for src_name in source_filenames:
(base, ext) = os.path.splitext (src_name)
base = os.path.splitdrive(base)[1] # Chop off the drive
base = base[os.path.isabs(base):] # If abs, chop off leading /
if ext not in self.src_extensions:
# Better to raise an exception instead of silently continuing
# and later complain about sources and targets having
# different lengths
raise CompileError ("Don't know how to compile %s" % src_name)
if strip_dir:
base = os.path.basename (base)
if ext in self._rc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
elif ext in self._mc_extensions:
obj_names.append (os.path.join (output_dir,
base + self.res_extension))
else:
obj_names.append (os.path.join (output_dir,
base + self.obj_extension))
return obj_names
def compile(self, sources,
output_dir=None, macros=None, include_dirs=None, debug=0,
extra_preargs=None, extra_postargs=None, depends=None):
if not self.initialized:
self.initialize()
compile_info = self._setup_compile(output_dir, macros, include_dirs,
sources, depends, extra_postargs)
macros, objects, extra_postargs, pp_opts, build = compile_info
compile_opts = extra_preargs or []
compile_opts.append ('/c')
if debug:
compile_opts.extend(self.compile_options_debug)
else:
compile_opts.extend(self.compile_options)
for obj in objects:
try:
src, ext = build[obj]
except KeyError:
continue
if debug:
# pass the full pathname to MSVC in debug mode,
# this allows the debugger to find the source file
# without asking the user to browse for it
src = os.path.abspath(src)
if ext in self._c_extensions:
input_opt = "/Tc" + src
elif ext in self._cpp_extensions:
input_opt = "/Tp" + src
elif ext in self._rc_extensions:
# compile .RC to .RES file
input_opt = src
output_opt = "/fo" + obj
try:
self.spawn([self.rc] + pp_opts +
[output_opt] + [input_opt])
except DistutilsExecError as msg:
raise CompileError(msg)
continue
elif ext in self._mc_extensions:
# Compile .MC to .RC file to .RES file.
# * '-h dir' specifies the directory for the
# generated include file
# * '-r dir' specifies the target directory of the
# generated RC file and the binary message resource
# it includes
#
# For now (since there are no options to change this),
# we use the source-directory for the include file and
# the build directory for the RC file and message
# resources. This works at least for win32all.
h_dir = os.path.dirname(src)
rc_dir = os.path.dirname(obj)
try:
# first compile .MC to .RC and .H file
self.spawn([self.mc] +
['-h', h_dir, '-r', rc_dir] + [src])
base, _ = os.path.splitext (os.path.basename (src))
rc_file = os.path.join (rc_dir, base + '.rc')
# then compile .RC to .RES file
self.spawn([self.rc] +
["/fo" + obj] + [rc_file])
except DistutilsExecError as msg:
raise CompileError(msg)
continue
else:
# how to handle this file?
raise CompileError("Don't know how to compile %s to %s"
% (src, obj))
output_opt = "/Fo" + obj
try:
self.spawn([self.cc] + compile_opts + pp_opts +
[input_opt, output_opt] +
extra_postargs)
except DistutilsExecError as msg:
raise CompileError(msg)
return objects
def create_static_lib(self,
objects,
output_libname,
output_dir=None,
debug=0,
target_lang=None):
if not self.initialized:
self.initialize()
(objects, output_dir) = self._fix_object_args(objects, output_dir)
output_filename = self.library_filename(output_libname,
output_dir=output_dir)
if self._need_link(objects, output_filename):
lib_args = objects + ['/OUT:' + output_filename]
if debug:
pass # XXX what goes here?
try:
self.spawn([self.lib] + lib_args)
except DistutilsExecError as msg:
raise LibError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def link(self,
target_desc,
objects,
output_filename,
output_dir=None,
libraries=None,
library_dirs=None,
runtime_library_dirs=None,
export_symbols=None,
debug=0,
extra_preargs=None,
extra_postargs=None,
build_temp=None,
target_lang=None):
if not self.initialized:
self.initialize()
(objects, output_dir) = self._fix_object_args(objects, output_dir)
fixed_args = self._fix_lib_args(libraries, library_dirs,
runtime_library_dirs)
(libraries, library_dirs, runtime_library_dirs) = fixed_args
if runtime_library_dirs:
self.warn ("I don't know what to do with 'runtime_library_dirs': "
+ str (runtime_library_dirs))
lib_opts = gen_lib_options(self,
library_dirs, runtime_library_dirs,
libraries)
if output_dir is not None:
output_filename = os.path.join(output_dir, output_filename)
if self._need_link(objects, output_filename):
if target_desc == CCompiler.EXECUTABLE:
if debug:
ldflags = self.ldflags_shared_debug[1:]
else:
ldflags = self.ldflags_shared[1:]
else:
if debug:
ldflags = self.ldflags_shared_debug
else:
ldflags = self.ldflags_shared
export_opts = []
for sym in (export_symbols or []):
export_opts.append("/EXPORT:" + sym)
ld_args = (ldflags + lib_opts + export_opts +
objects + ['/OUT:' + output_filename])
# The MSVC linker generates .lib and .exp files, which cannot be
# suppressed by any linker switches. The .lib files may even be
# needed! Make sure they are generated in the temporary build
# directory. Since they have different names for debug and release
# builds, they can go into the same directory.
build_temp = os.path.dirname(objects[0])
if export_symbols is not None:
(dll_name, dll_ext) = os.path.splitext(
os.path.basename(output_filename))
implib_file = os.path.join(
build_temp,
self.library_filename(dll_name))
ld_args.append ('/IMPLIB:' + implib_file)
self.manifest_setup_ldargs(output_filename, build_temp, ld_args)
if extra_preargs:
ld_args[:0] = extra_preargs
if extra_postargs:
ld_args.extend(extra_postargs)
self.mkpath(os.path.dirname(output_filename))
try:
self.spawn([self.linker] + ld_args)
except DistutilsExecError as msg:
raise LinkError(msg)
# embed the manifest
# XXX - this is somewhat fragile - if mt.exe fails, distutils
# will still consider the DLL up-to-date, but it will not have a
# manifest. Maybe we should link to a temp file? OTOH, that
# implies a build environment error that shouldn't go undetected.
mfinfo = self.manifest_get_embed_info(target_desc, ld_args)
if mfinfo is not None:
mffilename, mfid = mfinfo
out_arg = '-outputresource:%s;%s' % (output_filename, mfid)
try:
self.spawn(['mt.exe', '-nologo', '-manifest',
mffilename, out_arg])
except DistutilsExecError as msg:
raise LinkError(msg)
else:
log.debug("skipping %s (up-to-date)", output_filename)
def manifest_setup_ldargs(self, output_filename, build_temp, ld_args):
# If we need a manifest at all, an embedded manifest is recommended.
# See MSDN article titled
# "How to: Embed a Manifest Inside a C/C++ Application"
# (currently at http://msdn2.microsoft.com/en-us/library/ms235591(VS.80).aspx)
# Ask the linker to generate the manifest in the temp dir, so
# we can check it, and possibly embed it, later.
temp_manifest = os.path.join(
build_temp,
os.path.basename(output_filename) + ".manifest")
ld_args.append('/MANIFESTFILE:' + temp_manifest)
def manifest_get_embed_info(self, target_desc, ld_args):
# If a manifest should be embedded, return a tuple of
# (manifest_filename, resource_id). Returns None if no manifest
# should be embedded. See http://bugs.python.org/issue7833 for why
# we want to avoid any manifest for extension modules if we can)
for arg in ld_args:
if arg.startswith("/MANIFESTFILE:"):
temp_manifest = arg.split(":", 1)[1]
break
else:
# no /MANIFESTFILE so nothing to do.
return None
if target_desc == CCompiler.EXECUTABLE:
# by default, executables always get the manifest with the
# CRT referenced.
mfid = 1
else:
# Extension modules try and avoid any manifest if possible.
mfid = 2
temp_manifest = self._remove_visual_c_ref(temp_manifest)
if temp_manifest is None:
return None
return temp_manifest, mfid
def _remove_visual_c_ref(self, manifest_file):
try:
# Remove references to the Visual C runtime, so they will
# fall through to the Visual C dependency of Python.exe.
# This way, when installed for a restricted user (e.g.
# runtimes are not in WinSxS folder, but in Python's own
# folder), the runtimes do not need to be in every folder
# with .pyd's.
# Returns either the filename of the modified manifest or
# None if no manifest should be embedded.
manifest_f = open(manifest_file)
try:
manifest_buf = manifest_f.read()
finally:
manifest_f.close()
pattern = re.compile(
r"""<assemblyIdentity.*?name=("|')Microsoft\."""\
r"""VC\d{2}\.CRT("|').*?(/>|</assemblyIdentity>)""",
re.DOTALL)
manifest_buf = re.sub(pattern, "", manifest_buf)
pattern = r"<dependentAssembly>\s*</dependentAssembly>"
manifest_buf = re.sub(pattern, "", manifest_buf)
# Now see if any other assemblies are referenced - if not, we
# don't want a manifest embedded.
pattern = re.compile(
r"""<assemblyIdentity.*?name=(?:"|')(.+?)(?:"|')"""
r""".*?(?:/>|</assemblyIdentity>)""", re.DOTALL)
if re.search(pattern, manifest_buf) is None:
return None
manifest_f = open(manifest_file, 'w')
try:
manifest_f.write(manifest_buf)
return manifest_file
finally:
manifest_f.close()
except OSError:
pass
# -- Miscellaneous methods -----------------------------------------
# These are all used by the 'gen_lib_options() function, in
# ccompiler.py.
def library_dir_option(self, dir):
return "/LIBPATH:" + dir
def runtime_library_dir_option(self, dir):
raise DistutilsPlatformError(
"don't know how to set runtime library search path for MSVC++")
def library_option(self, lib):
return self.library_filename(lib)
def find_library_file(self, dirs, lib, debug=0):
# Prefer a debugging library if found (and requested), but deal
# with it if we don't have one.
if debug:
try_names = [lib + "_d", lib]
else:
try_names = [lib]
for dir in dirs:
for name in try_names:
libfile = os.path.join(dir, self.library_filename (name))
if os.path.exists(libfile):
return libfile
else:
# Oops, didn't find it in *any* of 'dirs'
return None
# Helper methods for using the MSVC registry settings
def find_exe(self, exe):
"""Return path to an MSVC executable program.
Tries to find the program in several places: first, one of the
MSVC program search paths from the registry; next, the directories
in the PATH environment variable. If any of those work, return an
absolute path that is known to exist. If none of them work, just
return the original program name, 'exe'.
"""
for p in self.__paths:
fn = os.path.join(os.path.abspath(p), exe)
if os.path.isfile(fn):
return fn
# didn't find it; try existing path
for p in os.environ['Path'].split(';'):
fn = os.path.join(os.path.abspath(p),exe)
if os.path.isfile(fn):
return fn
return exe
| Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/msvc9compiler.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/msvc9compiler.py",
"repo_id": "Django-locallibrary",
"token_count": 14796
} | 37 |
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
__all__ = [
"__title__",
"__summary__",
"__uri__",
"__version__",
"__author__",
"__email__",
"__license__",
"__copyright__",
]
__title__ = "packaging"
__summary__ = "Core utilities for Python packages"
__uri__ = "https://github.com/pypa/packaging"
__version__ = "19.2"
__author__ = "Donald Stufft and individual contributors"
__email__ = "[email protected]"
__license__ = "BSD or Apache License, Version 2.0"
__copyright__ = "Copyright 2014-2019 %s" % __author__
| Django-locallibrary/env/Lib/site-packages/setuptools/_vendor/packaging/__about__.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/_vendor/packaging/__about__.py",
"repo_id": "Django-locallibrary",
"token_count": 256
} | 38 |
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import abc
import functools
import itertools
import re
from ._compat import string_types, with_metaclass
from .version import Version, LegacyVersion, parse
class InvalidSpecifier(ValueError):
"""
An invalid specifier was found, users should refer to PEP 440.
"""
class BaseSpecifier(with_metaclass(abc.ABCMeta, object)):
@abc.abstractmethod
def __str__(self):
"""
Returns the str representation of this Specifier like object. This
should be representative of the Specifier itself.
"""
@abc.abstractmethod
def __hash__(self):
"""
Returns a hash value for this Specifier like object.
"""
@abc.abstractmethod
def __eq__(self, other):
"""
Returns a boolean representing whether or not the two Specifier like
objects are equal.
"""
@abc.abstractmethod
def __ne__(self, other):
"""
Returns a boolean representing whether or not the two Specifier like
objects are not equal.
"""
@abc.abstractproperty
def prereleases(self):
"""
Returns whether or not pre-releases as a whole are allowed by this
specifier.
"""
@prereleases.setter
def prereleases(self, value):
"""
Sets whether or not pre-releases as a whole are allowed by this
specifier.
"""
@abc.abstractmethod
def contains(self, item, prereleases=None):
"""
Determines if the given item is contained within this specifier.
"""
@abc.abstractmethod
def filter(self, iterable, prereleases=None):
"""
Takes an iterable of items and filters them so that only items which
are contained within this specifier are allowed in it.
"""
class _IndividualSpecifier(BaseSpecifier):
_operators = {}
def __init__(self, spec="", prereleases=None):
match = self._regex.search(spec)
if not match:
raise InvalidSpecifier("Invalid specifier: '{0}'".format(spec))
self._spec = (match.group("operator").strip(), match.group("version").strip())
# Store whether or not this Specifier should accept prereleases
self._prereleases = prereleases
def __repr__(self):
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
else ""
)
return "<{0}({1!r}{2})>".format(self.__class__.__name__, str(self), pre)
def __str__(self):
return "{0}{1}".format(*self._spec)
def __hash__(self):
return hash(self._spec)
def __eq__(self, other):
if isinstance(other, string_types):
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec == other._spec
def __ne__(self, other):
if isinstance(other, string_types):
try:
other = self.__class__(other)
except InvalidSpecifier:
return NotImplemented
elif not isinstance(other, self.__class__):
return NotImplemented
return self._spec != other._spec
def _get_operator(self, op):
return getattr(self, "_compare_{0}".format(self._operators[op]))
def _coerce_version(self, version):
if not isinstance(version, (LegacyVersion, Version)):
version = parse(version)
return version
@property
def operator(self):
return self._spec[0]
@property
def version(self):
return self._spec[1]
@property
def prereleases(self):
return self._prereleases
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
def __contains__(self, item):
return self.contains(item)
def contains(self, item, prereleases=None):
# Determine if prereleases are to be allowed or not.
if prereleases is None:
prereleases = self.prereleases
# Normalize item to a Version or LegacyVersion, this allows us to have
# a shortcut for ``"2.0" in Specifier(">=2")
item = self._coerce_version(item)
# Determine if we should be supporting prereleases in this specifier
# or not, if we do not support prereleases than we can short circuit
# logic if this version is a prereleases.
if item.is_prerelease and not prereleases:
return False
# Actually do the comparison to determine if this item is contained
# within this Specifier or not.
return self._get_operator(self.operator)(item, self.version)
def filter(self, iterable, prereleases=None):
yielded = False
found_prereleases = []
kw = {"prereleases": prereleases if prereleases is not None else True}
# Attempt to iterate over all the values in the iterable and if any of
# them match, yield them.
for version in iterable:
parsed_version = self._coerce_version(version)
if self.contains(parsed_version, **kw):
# If our version is a prerelease, and we were not set to allow
# prereleases, then we'll store it for later incase nothing
# else matches this specifier.
if parsed_version.is_prerelease and not (
prereleases or self.prereleases
):
found_prereleases.append(version)
# Either this is not a prerelease, or we should have been
# accepting prereleases from the beginning.
else:
yielded = True
yield version
# Now that we've iterated over everything, determine if we've yielded
# any values, and if we have not and we have any prereleases stored up
# then we will go ahead and yield the prereleases.
if not yielded and found_prereleases:
for version in found_prereleases:
yield version
class LegacySpecifier(_IndividualSpecifier):
_regex_str = r"""
(?P<operator>(==|!=|<=|>=|<|>))
\s*
(?P<version>
[^,;\s)]* # Since this is a "legacy" specifier, and the version
# string can be just about anything, we match everything
# except for whitespace, a semi-colon for marker support,
# a closing paren since versions can be enclosed in
# them, and a comma since it's a version separator.
)
"""
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = {
"==": "equal",
"!=": "not_equal",
"<=": "less_than_equal",
">=": "greater_than_equal",
"<": "less_than",
">": "greater_than",
}
def _coerce_version(self, version):
if not isinstance(version, LegacyVersion):
version = LegacyVersion(str(version))
return version
def _compare_equal(self, prospective, spec):
return prospective == self._coerce_version(spec)
def _compare_not_equal(self, prospective, spec):
return prospective != self._coerce_version(spec)
def _compare_less_than_equal(self, prospective, spec):
return prospective <= self._coerce_version(spec)
def _compare_greater_than_equal(self, prospective, spec):
return prospective >= self._coerce_version(spec)
def _compare_less_than(self, prospective, spec):
return prospective < self._coerce_version(spec)
def _compare_greater_than(self, prospective, spec):
return prospective > self._coerce_version(spec)
def _require_version_compare(fn):
@functools.wraps(fn)
def wrapped(self, prospective, spec):
if not isinstance(prospective, Version):
return False
return fn(self, prospective, spec)
return wrapped
class Specifier(_IndividualSpecifier):
_regex_str = r"""
(?P<operator>(~=|==|!=|<=|>=|<|>|===))
(?P<version>
(?:
# The identity operators allow for an escape hatch that will
# do an exact string match of the version you wish to install.
# This will not be parsed by PEP 440 and we cannot determine
# any semantic meaning from it. This operator is discouraged
# but included entirely as an escape hatch.
(?<====) # Only match for the identity operator
\s*
[^\s]* # We just match everything, except for whitespace
# since we are only testing for strict identity.
)
|
(?:
# The (non)equality operators allow for wild card and local
# versions to be specified so we have to define these two
# operators separately to enable that.
(?<===|!=) # Only match for equals and not equals
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)* # release
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
# You cannot use a wild card and a dev or local version
# together so group them with a | and make them optional.
(?:
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
(?:\+[a-z0-9]+(?:[-_\.][a-z0-9]+)*)? # local
|
\.\* # Wild card syntax of .*
)?
)
|
(?:
# The compatible operator requires at least two digits in the
# release segment.
(?<=~=) # Only match for the compatible operator
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)+ # release (We have a + instead of a *)
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
)
|
(?:
# All other operators only allow a sub set of what the
# (non)equality operators do. Specifically they do not allow
# local versions to be specified nor do they allow the prefix
# matching wild cards.
(?<!==|!=|~=) # We have special cases for these
# operators so we want to make sure they
# don't match here.
\s*
v?
(?:[0-9]+!)? # epoch
[0-9]+(?:\.[0-9]+)* # release
(?: # pre release
[-_\.]?
(a|b|c|rc|alpha|beta|pre|preview)
[-_\.]?
[0-9]*
)?
(?: # post release
(?:-[0-9]+)|(?:[-_\.]?(post|rev|r)[-_\.]?[0-9]*)
)?
(?:[-_\.]?dev[-_\.]?[0-9]*)? # dev release
)
)
"""
_regex = re.compile(r"^\s*" + _regex_str + r"\s*$", re.VERBOSE | re.IGNORECASE)
_operators = {
"~=": "compatible",
"==": "equal",
"!=": "not_equal",
"<=": "less_than_equal",
">=": "greater_than_equal",
"<": "less_than",
">": "greater_than",
"===": "arbitrary",
}
@_require_version_compare
def _compare_compatible(self, prospective, spec):
# Compatible releases have an equivalent combination of >= and ==. That
# is that ~=2.2 is equivalent to >=2.2,==2.*. This allows us to
# implement this in terms of the other specifiers instead of
# implementing it ourselves. The only thing we need to do is construct
# the other specifiers.
# We want everything but the last item in the version, but we want to
# ignore post and dev releases and we want to treat the pre-release as
# it's own separate segment.
prefix = ".".join(
list(
itertools.takewhile(
lambda x: (not x.startswith("post") and not x.startswith("dev")),
_version_split(spec),
)
)[:-1]
)
# Add the prefix notation to the end of our string
prefix += ".*"
return self._get_operator(">=")(prospective, spec) and self._get_operator("==")(
prospective, prefix
)
@_require_version_compare
def _compare_equal(self, prospective, spec):
# We need special logic to handle prefix matching
if spec.endswith(".*"):
# In the case of prefix matching we want to ignore local segment.
prospective = Version(prospective.public)
# Split the spec out by dots, and pretend that there is an implicit
# dot in between a release segment and a pre-release segment.
spec = _version_split(spec[:-2]) # Remove the trailing .*
# Split the prospective version out by dots, and pretend that there
# is an implicit dot in between a release segment and a pre-release
# segment.
prospective = _version_split(str(prospective))
# Shorten the prospective version to be the same length as the spec
# so that we can determine if the specifier is a prefix of the
# prospective version or not.
prospective = prospective[: len(spec)]
# Pad out our two sides with zeros so that they both equal the same
# length.
spec, prospective = _pad_version(spec, prospective)
else:
# Convert our spec string into a Version
spec = Version(spec)
# If the specifier does not have a local segment, then we want to
# act as if the prospective version also does not have a local
# segment.
if not spec.local:
prospective = Version(prospective.public)
return prospective == spec
@_require_version_compare
def _compare_not_equal(self, prospective, spec):
return not self._compare_equal(prospective, spec)
@_require_version_compare
def _compare_less_than_equal(self, prospective, spec):
return prospective <= Version(spec)
@_require_version_compare
def _compare_greater_than_equal(self, prospective, spec):
return prospective >= Version(spec)
@_require_version_compare
def _compare_less_than(self, prospective, spec):
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
spec = Version(spec)
# Check to see if the prospective version is less than the spec
# version. If it's not we can short circuit and just return False now
# instead of doing extra unneeded work.
if not prospective < spec:
return False
# This special case is here so that, unless the specifier itself
# includes is a pre-release version, that we do not accept pre-release
# versions for the version mentioned in the specifier (e.g. <3.1 should
# not match 3.1.dev0, but should match 3.0.dev0).
if not spec.is_prerelease and prospective.is_prerelease:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# If we've gotten to here, it means that prospective version is both
# less than the spec version *and* it's not a pre-release of the same
# version in the spec.
return True
@_require_version_compare
def _compare_greater_than(self, prospective, spec):
# Convert our spec to a Version instance, since we'll want to work with
# it as a version.
spec = Version(spec)
# Check to see if the prospective version is greater than the spec
# version. If it's not we can short circuit and just return False now
# instead of doing extra unneeded work.
if not prospective > spec:
return False
# This special case is here so that, unless the specifier itself
# includes is a post-release version, that we do not accept
# post-release versions for the version mentioned in the specifier
# (e.g. >3.1 should not match 3.0.post0, but should match 3.2.post0).
if not spec.is_postrelease and prospective.is_postrelease:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# Ensure that we do not allow a local version of the version mentioned
# in the specifier, which is technically greater than, to match.
if prospective.local is not None:
if Version(prospective.base_version) == Version(spec.base_version):
return False
# If we've gotten to here, it means that prospective version is both
# greater than the spec version *and* it's not a pre-release of the
# same version in the spec.
return True
def _compare_arbitrary(self, prospective, spec):
return str(prospective).lower() == str(spec).lower()
@property
def prereleases(self):
# If there is an explicit prereleases set for this, then we'll just
# blindly use that.
if self._prereleases is not None:
return self._prereleases
# Look at all of our specifiers and determine if they are inclusive
# operators, and if they are if they are including an explicit
# prerelease.
operator, version = self._spec
if operator in ["==", ">=", "<=", "~=", "==="]:
# The == specifier can include a trailing .*, if it does we
# want to remove before parsing.
if operator == "==" and version.endswith(".*"):
version = version[:-2]
# Parse the version, and if it is a pre-release than this
# specifier allows pre-releases.
if parse(version).is_prerelease:
return True
return False
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
_prefix_regex = re.compile(r"^([0-9]+)((?:a|b|c|rc)[0-9]+)$")
def _version_split(version):
result = []
for item in version.split("."):
match = _prefix_regex.search(item)
if match:
result.extend(match.groups())
else:
result.append(item)
return result
def _pad_version(left, right):
left_split, right_split = [], []
# Get the release segment of our versions
left_split.append(list(itertools.takewhile(lambda x: x.isdigit(), left)))
right_split.append(list(itertools.takewhile(lambda x: x.isdigit(), right)))
# Get the rest of our versions
left_split.append(left[len(left_split[0]) :])
right_split.append(right[len(right_split[0]) :])
# Insert our padding
left_split.insert(1, ["0"] * max(0, len(right_split[0]) - len(left_split[0])))
right_split.insert(1, ["0"] * max(0, len(left_split[0]) - len(right_split[0])))
return (list(itertools.chain(*left_split)), list(itertools.chain(*right_split)))
class SpecifierSet(BaseSpecifier):
def __init__(self, specifiers="", prereleases=None):
# Split on , to break each indidivual specifier into it's own item, and
# strip each item to remove leading/trailing whitespace.
specifiers = [s.strip() for s in specifiers.split(",") if s.strip()]
# Parsed each individual specifier, attempting first to make it a
# Specifier and falling back to a LegacySpecifier.
parsed = set()
for specifier in specifiers:
try:
parsed.add(Specifier(specifier))
except InvalidSpecifier:
parsed.add(LegacySpecifier(specifier))
# Turn our parsed specifiers into a frozen set and save them for later.
self._specs = frozenset(parsed)
# Store our prereleases value so we can use it later to determine if
# we accept prereleases or not.
self._prereleases = prereleases
def __repr__(self):
pre = (
", prereleases={0!r}".format(self.prereleases)
if self._prereleases is not None
else ""
)
return "<SpecifierSet({0!r}{1})>".format(str(self), pre)
def __str__(self):
return ",".join(sorted(str(s) for s in self._specs))
def __hash__(self):
return hash(self._specs)
def __and__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif not isinstance(other, SpecifierSet):
return NotImplemented
specifier = SpecifierSet()
specifier._specs = frozenset(self._specs | other._specs)
if self._prereleases is None and other._prereleases is not None:
specifier._prereleases = other._prereleases
elif self._prereleases is not None and other._prereleases is None:
specifier._prereleases = self._prereleases
elif self._prereleases == other._prereleases:
specifier._prereleases = self._prereleases
else:
raise ValueError(
"Cannot combine SpecifierSets with True and False prerelease "
"overrides."
)
return specifier
def __eq__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs == other._specs
def __ne__(self, other):
if isinstance(other, string_types):
other = SpecifierSet(other)
elif isinstance(other, _IndividualSpecifier):
other = SpecifierSet(str(other))
elif not isinstance(other, SpecifierSet):
return NotImplemented
return self._specs != other._specs
def __len__(self):
return len(self._specs)
def __iter__(self):
return iter(self._specs)
@property
def prereleases(self):
# If we have been given an explicit prerelease modifier, then we'll
# pass that through here.
if self._prereleases is not None:
return self._prereleases
# If we don't have any specifiers, and we don't have a forced value,
# then we'll just return None since we don't know if this should have
# pre-releases or not.
if not self._specs:
return None
# Otherwise we'll see if any of the given specifiers accept
# prereleases, if any of them do we'll return True, otherwise False.
return any(s.prereleases for s in self._specs)
@prereleases.setter
def prereleases(self, value):
self._prereleases = value
def __contains__(self, item):
return self.contains(item)
def contains(self, item, prereleases=None):
# Ensure that our item is a Version or LegacyVersion instance.
if not isinstance(item, (LegacyVersion, Version)):
item = parse(item)
# Determine if we're forcing a prerelease or not, if we're not forcing
# one for this particular filter call, then we'll use whatever the
# SpecifierSet thinks for whether or not we should support prereleases.
if prereleases is None:
prereleases = self.prereleases
# We can determine if we're going to allow pre-releases by looking to
# see if any of the underlying items supports them. If none of them do
# and this item is a pre-release then we do not allow it and we can
# short circuit that here.
# Note: This means that 1.0.dev1 would not be contained in something
# like >=1.0.devabc however it would be in >=1.0.debabc,>0.0.dev0
if not prereleases and item.is_prerelease:
return False
# We simply dispatch to the underlying specs here to make sure that the
# given version is contained within all of them.
# Note: This use of all() here means that an empty set of specifiers
# will always return True, this is an explicit design decision.
return all(s.contains(item, prereleases=prereleases) for s in self._specs)
def filter(self, iterable, prereleases=None):
# Determine if we're forcing a prerelease or not, if we're not forcing
# one for this particular filter call, then we'll use whatever the
# SpecifierSet thinks for whether or not we should support prereleases.
if prereleases is None:
prereleases = self.prereleases
# If we have any specifiers, then we want to wrap our iterable in the
# filter method for each one, this will act as a logical AND amongst
# each specifier.
if self._specs:
for spec in self._specs:
iterable = spec.filter(iterable, prereleases=bool(prereleases))
return iterable
# If we do not have any specifiers, then we need to have a rough filter
# which will filter out any pre-releases, unless there are no final
# releases, and which will filter out LegacyVersion in general.
else:
filtered = []
found_prereleases = []
for item in iterable:
# Ensure that we some kind of Version class for this item.
if not isinstance(item, (LegacyVersion, Version)):
parsed_version = parse(item)
else:
parsed_version = item
# Filter out any item which is parsed as a LegacyVersion
if isinstance(parsed_version, LegacyVersion):
continue
# Store any item which is a pre-release for later unless we've
# already found a final version or we are accepting prereleases
if parsed_version.is_prerelease and not prereleases:
if not filtered:
found_prereleases.append(item)
else:
filtered.append(item)
# If we've found no items except for pre-releases, then we'll go
# ahead and use the pre-releases
if not filtered and found_prereleases and prereleases is None:
return found_prereleases
return filtered
| Django-locallibrary/env/Lib/site-packages/setuptools/_vendor/packaging/specifiers.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/_vendor/packaging/specifiers.py",
"repo_id": "Django-locallibrary",
"token_count": 12296
} | 39 |
from distutils.errors import DistutilsArgError
import inspect
import glob
import warnings
import platform
import distutils.command.install as orig
import setuptools
# Prior to numpy 1.9, NumPy relies on the '_install' name, so provide it for
# now. See https://github.com/pypa/setuptools/issues/199/
_install = orig.install
class install(orig.install):
"""Use easy_install to install the package, w/dependencies"""
user_options = orig.install.user_options + [
('old-and-unmanageable', None, "Try not to use this!"),
('single-version-externally-managed', None,
"used by system package builders to create 'flat' eggs"),
]
boolean_options = orig.install.boolean_options + [
'old-and-unmanageable', 'single-version-externally-managed',
]
new_commands = [
('install_egg_info', lambda self: True),
('install_scripts', lambda self: True),
]
_nc = dict(new_commands)
def initialize_options(self):
orig.install.initialize_options(self)
self.old_and_unmanageable = None
self.single_version_externally_managed = None
def finalize_options(self):
orig.install.finalize_options(self)
if self.root:
self.single_version_externally_managed = True
elif self.single_version_externally_managed:
if not self.root and not self.record:
raise DistutilsArgError(
"You must specify --record or --root when building system"
" packages"
)
def handle_extra_path(self):
if self.root or self.single_version_externally_managed:
# explicit backward-compatibility mode, allow extra_path to work
return orig.install.handle_extra_path(self)
# Ignore extra_path when installing an egg (or being run by another
# command without --root or --single-version-externally-managed
self.path_file = None
self.extra_dirs = ''
def run(self):
# Explicit request for old-style install? Just do it
if self.old_and_unmanageable or self.single_version_externally_managed:
return orig.install.run(self)
if not self._called_from_setup(inspect.currentframe()):
# Run in backward-compatibility mode to support bdist_* commands.
orig.install.run(self)
else:
self.do_egg_install()
@staticmethod
def _called_from_setup(run_frame):
"""
Attempt to detect whether run() was called from setup() or by another
command. If called by setup(), the parent caller will be the
'run_command' method in 'distutils.dist', and *its* caller will be
the 'run_commands' method. If called any other way, the
immediate caller *might* be 'run_command', but it won't have been
called by 'run_commands'. Return True in that case or if a call stack
is unavailable. Return False otherwise.
"""
if run_frame is None:
msg = "Call stack not available. bdist_* commands may fail."
warnings.warn(msg)
if platform.python_implementation() == 'IronPython':
msg = "For best results, pass -X:Frames to enable call stack."
warnings.warn(msg)
return True
res = inspect.getouterframes(run_frame)[2]
caller, = res[:1]
info = inspect.getframeinfo(caller)
caller_module = caller.f_globals.get('__name__', '')
return (
caller_module == 'distutils.dist'
and info.function == 'run_commands'
)
def do_egg_install(self):
easy_install = self.distribution.get_command_class('easy_install')
cmd = easy_install(
self.distribution, args="x", root=self.root, record=self.record,
)
cmd.ensure_finalized() # finalize before bdist_egg munges install cmd
cmd.always_copy_from = '.' # make sure local-dir eggs get installed
# pick up setup-dir .egg files only: no .egg-info
cmd.package_index.scan(glob.glob('*.egg'))
self.run_command('bdist_egg')
args = [self.distribution.get_command_obj('bdist_egg').egg_output]
if setuptools.bootstrap_install_from:
# Bootstrap self-installation of setuptools
args.insert(0, setuptools.bootstrap_install_from)
cmd.args = args
cmd.run(show_deprecation=False)
setuptools.bootstrap_install_from = None
# XXX Python 3.1 doesn't see _nc if this is inside the class
install.sub_commands = (
[cmd for cmd in orig.install.sub_commands if cmd[0] not in install._nc] +
install.new_commands
)
| Django-locallibrary/env/Lib/site-packages/setuptools/command/install.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/command/install.py",
"repo_id": "Django-locallibrary",
"token_count": 1916
} | 40 |
import sys
import marshal
import contextlib
from distutils.version import StrictVersion
from .py33compat import Bytecode
from .py27compat import find_module, PY_COMPILED, PY_FROZEN, PY_SOURCE
from . import py27compat
__all__ = [
'Require', 'find_module', 'get_module_constant', 'extract_constant'
]
class Require:
"""A prerequisite to building or installing a distribution"""
def __init__(
self, name, requested_version, module, homepage='',
attribute=None, format=None):
if format is None and requested_version is not None:
format = StrictVersion
if format is not None:
requested_version = format(requested_version)
if attribute is None:
attribute = '__version__'
self.__dict__.update(locals())
del self.self
def full_name(self):
"""Return full package/distribution name, w/version"""
if self.requested_version is not None:
return '%s-%s' % (self.name, self.requested_version)
return self.name
def version_ok(self, version):
"""Is 'version' sufficiently up-to-date?"""
return self.attribute is None or self.format is None or \
str(version) != "unknown" and version >= self.requested_version
def get_version(self, paths=None, default="unknown"):
"""Get version number of installed module, 'None', or 'default'
Search 'paths' for module. If not found, return 'None'. If found,
return the extracted version attribute, or 'default' if no version
attribute was specified, or the value cannot be determined without
importing the module. The version is formatted according to the
requirement's version format (if any), unless it is 'None' or the
supplied 'default'.
"""
if self.attribute is None:
try:
f, p, i = find_module(self.module, paths)
if f:
f.close()
return default
except ImportError:
return None
v = get_module_constant(self.module, self.attribute, default, paths)
if v is not None and v is not default and self.format is not None:
return self.format(v)
return v
def is_present(self, paths=None):
"""Return true if dependency is present on 'paths'"""
return self.get_version(paths) is not None
def is_current(self, paths=None):
"""Return true if dependency is present and up-to-date on 'paths'"""
version = self.get_version(paths)
if version is None:
return False
return self.version_ok(version)
def maybe_close(f):
@contextlib.contextmanager
def empty():
yield
return
if not f:
return empty()
return contextlib.closing(f)
def get_module_constant(module, symbol, default=-1, paths=None):
"""Find 'module' by searching 'paths', and extract 'symbol'
Return 'None' if 'module' does not exist on 'paths', or it does not define
'symbol'. If the module defines 'symbol' as a constant, return the
constant. Otherwise, return 'default'."""
try:
f, path, (suffix, mode, kind) = info = find_module(module, paths)
except ImportError:
# Module doesn't exist
return None
with maybe_close(f):
if kind == PY_COMPILED:
f.read(8) # skip magic & date
code = marshal.load(f)
elif kind == PY_FROZEN:
code = py27compat.get_frozen_object(module, paths)
elif kind == PY_SOURCE:
code = compile(f.read(), path, 'exec')
else:
# Not something we can parse; we'll have to import it. :(
imported = py27compat.get_module(module, paths, info)
return getattr(imported, symbol, None)
return extract_constant(code, symbol, default)
def extract_constant(code, symbol, default=-1):
"""Extract the constant value of 'symbol' from 'code'
If the name 'symbol' is bound to a constant value by the Python code
object 'code', return that value. If 'symbol' is bound to an expression,
return 'default'. Otherwise, return 'None'.
Return value is based on the first assignment to 'symbol'. 'symbol' must
be a global, or at least a non-"fast" local in the code block. That is,
only 'STORE_NAME' and 'STORE_GLOBAL' opcodes are checked, and 'symbol'
must be present in 'code.co_names'.
"""
if symbol not in code.co_names:
# name's not there, can't possibly be an assignment
return None
name_idx = list(code.co_names).index(symbol)
STORE_NAME = 90
STORE_GLOBAL = 97
LOAD_CONST = 100
const = default
for byte_code in Bytecode(code):
op = byte_code.opcode
arg = byte_code.arg
if op == LOAD_CONST:
const = code.co_consts[arg]
elif arg == name_idx and (op == STORE_NAME or op == STORE_GLOBAL):
return const
else:
const = default
def _update_globals():
"""
Patch the globals to remove the objects not available on some platforms.
XXX it'd be better to test assertions about bytecode instead.
"""
if not sys.platform.startswith('java') and sys.platform != 'cli':
return
incompatible = 'extract_constant', 'get_module_constant'
for name in incompatible:
del globals()[name]
__all__.remove(name)
_update_globals()
| Django-locallibrary/env/Lib/site-packages/setuptools/depends.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/depends.py",
"repo_id": "Django-locallibrary",
"token_count": 2227
} | 41 |
import os
from distutils import log
import itertools
from setuptools.extern.six.moves import map
flatten = itertools.chain.from_iterable
class Installer:
nspkg_ext = '-nspkg.pth'
def install_namespaces(self):
nsp = self._get_all_ns_packages()
if not nsp:
return
filename, ext = os.path.splitext(self._get_target())
filename += self.nspkg_ext
self.outputs.append(filename)
log.info("Installing %s", filename)
lines = map(self._gen_nspkg_line, nsp)
if self.dry_run:
# always generate the lines, even in dry run
list(lines)
return
with open(filename, 'wt') as f:
f.writelines(lines)
def uninstall_namespaces(self):
filename, ext = os.path.splitext(self._get_target())
filename += self.nspkg_ext
if not os.path.exists(filename):
return
log.info("Removing %s", filename)
os.remove(filename)
def _get_target(self):
return self.target
_nspkg_tmpl = (
"import sys, types, os",
"has_mfs = sys.version_info > (3, 5)",
"p = os.path.join(%(root)s, *%(pth)r)",
"importlib = has_mfs and __import__('importlib.util')",
"has_mfs and __import__('importlib.machinery')",
(
"m = has_mfs and "
"sys.modules.setdefault(%(pkg)r, "
"importlib.util.module_from_spec("
"importlib.machinery.PathFinder.find_spec(%(pkg)r, "
"[os.path.dirname(p)])))"
),
(
"m = m or "
"sys.modules.setdefault(%(pkg)r, types.ModuleType(%(pkg)r))"
),
"mp = (m or []) and m.__dict__.setdefault('__path__',[])",
"(p not in mp) and mp.append(p)",
)
"lines for the namespace installer"
_nspkg_tmpl_multi = (
'm and setattr(sys.modules[%(parent)r], %(child)r, m)',
)
"additional line(s) when a parent package is indicated"
def _get_root(self):
return "sys._getframe(1).f_locals['sitedir']"
def _gen_nspkg_line(self, pkg):
# ensure pkg is not a unicode string under Python 2.7
pkg = str(pkg)
pth = tuple(pkg.split('.'))
root = self._get_root()
tmpl_lines = self._nspkg_tmpl
parent, sep, child = pkg.rpartition('.')
if parent:
tmpl_lines += self._nspkg_tmpl_multi
return ';'.join(tmpl_lines) % locals() + '\n'
def _get_all_ns_packages(self):
"""Return sorted list of all package namespaces"""
pkgs = self.distribution.namespace_packages or []
return sorted(flatten(map(self._pkg_names, pkgs)))
@staticmethod
def _pkg_names(pkg):
"""
Given a namespace package, yield the components of that
package.
>>> names = Installer._pkg_names('a.b.c')
>>> set(names) == set(['a', 'a.b', 'a.b.c'])
True
"""
parts = pkg.split('.')
while parts:
yield '.'.join(parts)
parts.pop()
class DevelopInstaller(Installer):
def _get_root(self):
return repr(str(self.egg_path))
def _get_target(self):
return self.egg_link
| Django-locallibrary/env/Lib/site-packages/setuptools/namespaces.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/namespaces.py",
"repo_id": "Django-locallibrary",
"token_count": 1569
} | 42 |
@echo off
rem This file is UTF-8 encoded, so we need to update the current code page while executing it
for /f "tokens=2 delims=:." %%a in ('"%SystemRoot%\System32\chcp.com"') do (
set _OLD_CODEPAGE=%%a
)
if defined _OLD_CODEPAGE (
"%SystemRoot%\System32\chcp.com" 65001 > nul
)
set VIRTUAL_ENV=D:\Django-Apps\Local Library2\env
if not defined PROMPT set PROMPT=$P$G
if defined _OLD_VIRTUAL_PROMPT set PROMPT=%_OLD_VIRTUAL_PROMPT%
if defined _OLD_VIRTUAL_PYTHONHOME set PYTHONHOME=%_OLD_VIRTUAL_PYTHONHOME%
set _OLD_VIRTUAL_PROMPT=%PROMPT%
set PROMPT=(env) %PROMPT%
if defined PYTHONHOME set _OLD_VIRTUAL_PYTHONHOME=%PYTHONHOME%
set PYTHONHOME=
if defined _OLD_VIRTUAL_PATH set PATH=%_OLD_VIRTUAL_PATH%
if not defined _OLD_VIRTUAL_PATH set _OLD_VIRTUAL_PATH=%PATH%
set PATH=%VIRTUAL_ENV%\Scripts;%PATH%
:END
if defined _OLD_CODEPAGE (
"%SystemRoot%\System32\chcp.com" %_OLD_CODEPAGE% > nul
set _OLD_CODEPAGE=
)
| Django-locallibrary/env/Scripts/activate.bat/0 | {
"file_path": "Django-locallibrary/env/Scripts/activate.bat",
"repo_id": "Django-locallibrary",
"token_count": 421
} | 43 |
//+------------------------------------------------------------------+
//| CrashClassifyScript.mq5 |
//| Copyright 2022, Omega Joctan. |
//| https://www.mql5.com/users/omegajoctan |
//+------------------------------------------------------------------+
#property copyright "Copyright 2022, Omega Joctan."
#property link "https://www.mql5.com/users/omegajoctan"
#property version "1.00"
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
#include "LogisticRegressionLib.mqh";
CLogisticRegression logreg; //logistic regression class
//+------------------------------------------------------------------+
//| Script program start function |
//+------------------------------------------------------------------+
void OnStart()
{
//---
double stock_prices[];
string dates[];
string file_name = "NFLX.csv";
StoreDataToArray(2,stock_prices,file_name);
StoreDataToArray(1,dates,file_name);
Print("stock prices size = ",ArraySize(stock_prices));
//---
int crash[];
if (ArraySize(stock_prices)>1)
DetectCrash(stock_prices,crash);
for (int i=1; i<ArraySize(stock_prices); i++)
{
Print(" DATE ",dates[i]," TREND ",crash[i-1]);
}
StoreArrayToCSV(crash,"Trend",dates,"DATE","Netflix Trends.csv",",");
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
void DetectCrash(double &prices[], int& out_binary[])
{
double prev_high = prices[0];
ArrayResize(out_binary,ArraySize(prices)-1); //we reduce the size by one since we ignore the current we predict the previous one
for (int i=1; i<ArraySize(prices); i++)
{
int prev = i-1;
if (prices[i] >= prev_high)
prev_high = prices[i]; //grab the highest price
double percent_crash = ((prev_high - prices[i]) / prev_high) * 100.0; //convert crash to percentage
//printf("crash percentage %.2f high price %.4f curr price %.4f ", percent_crash,prev_high,prices[i]);
//based on the definition of a crash; markets has to fall more than 10% percent
if (percent_crash > 10)
out_binary[prev] = 0; //downtrend (crash)
else
out_binary[prev] = 1; //uptrend (no crash )
}
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
void StoreArrayToCSV(int& Array[], string label, string &stArray[], string label2, string filename, string delimiter)
{
FileDelete(filename);
int handle = FileOpen(filename,FILE_CSV|FILE_READ|FILE_WRITE,delimiter);
if (handle == INVALID_HANDLE)
Print("Invalid csv handle Err=",GetLastError());
//---
if (handle>0)
{
FileWrite(handle,label,label2);
for (int i=0; i<ArraySize(Array); i++)
{
string str1 = DoubleToString(Array[i],Digits());
FileWrite(handle,str1,stArray[i+1]);
}
}
FileClose(handle);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
void StoreDataToArray(int from_column_number,double &toArr[],string file_name,string delimiter=",")
{
int counter=0;
int column = 0, rows=0;
int handle = FileOpen(file_name,FILE_CSV|FILE_READ|FILE_WRITE|FILE_ANSI,delimiter,CP_UTF8);
if (handle == INVALID_HANDLE)
Print("Invalid csv handle Err =",GetLastError());
while (!FileIsEnding(handle))
{
string data = FileReadString(handle);
column++;
//---
if (column==from_column_number)
{
if (rows>=1) //Avoid the first column which contains the column's header
{
counter++;
ArrayResize(toArr,counter);
toArr[counter-1]=(double)data;
}
}
//---
if (FileIsLineEnding(handle))
{
rows++;
column=0;
}
}
FileClose(handle);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
void StoreDataToArray(int from_column_number,string &toArr[],string file_name,string delimiter=",")
{
int counter=0;
int column = 0, rows=0;
int handle = FileOpen(file_name,FILE_CSV|FILE_READ|FILE_WRITE|FILE_ANSI,delimiter,CP_UTF8);
if (handle == INVALID_HANDLE)
Print("Invalid csv handle Err=",GetLastError());
while (!FileIsEnding(handle))
{
string data = FileReadString(handle);
column++;
//---
if (column==from_column_number)
{
if (rows>=1) //Avoid the first column which contains the column's header
{
counter++;
ArrayResize(toArr,counter);
toArr[counter-1]=data;
}
}
//---
if (FileIsLineEnding(handle))
{
rows++;
column=0;
}
}
FileClose(handle);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
| LogisticRegression-MQL5-and-python/LogisticRegression mql5/CrashClassifyScript.mq5/0 | {
"file_path": "LogisticRegression-MQL5-and-python/LogisticRegression mql5/CrashClassifyScript.mq5",
"repo_id": "LogisticRegression-MQL5-and-python",
"token_count": 2924
} | 44 |
## Linear Discriminant Analysis (LDA)
This documentation explains the `CLDA` class in MQL5, which implements **Linear Discriminant Analysis (LDA)** for dimensionality reduction and classification tasks.
**I. LDA Theory:**
LDA is a supervised learning technique that aims to find **linear projections** of the data that **maximize the separation between different classes** while **minimizing variance within each class**. This makes it particularly useful for **classification** problems where the goal is to distinguish between distinct groups.
**II. CLDA Class:**
The `CLDA` class provides functionalities for performing LDA in MQL5:
**Public Functions:**
* `CLDA(uint k=NULL, lda_criterion CRITERION_=CRITERION_SCREE_PLOT, double reg_param =1e-6)` Constructor, allows setting hyperparameters:
* `k`: Number of components to extract (default: None, determined automatically).
* `CRITERION_`: Criterion for selecting the best components (default: CRITERION_SCREE_PLOT).
* `reg_param`: Regularization parameter to prevent overfitting (default: 1e-6).
* `~CLDA(void)`: Destructor.
* `matrix fit_transform(const matrix &x, const vector &y):` Trains the model on the provided data (`x` - independent variables, `y` - class labels) and returns the transformed data.
* `matrix transform(const matrix &x):` Transforms new data (`x`) using the trained model.
* `vector transform(const vector &x):` Transforms a single new data point (`x`) using the trained model.
**Internal Functions:**
* `calculate_variance(vector &eigen_values, double threshold=0.95)`: Calculates the number of components based on explained variance (optional).
* `calculate_kaiser(vector &eigen_values)`: Calculates the number of components based on Kaiser's criterion (optional).
* `extract_components(vector &eigen_values, double threshold=0.95)`: Extracts the selected number of components based on the chosen criterion.
**III. Additional Notes:**
* The `m_criterion` member variable allows choosing the criterion for selecting the number of components:
* **CRITERION_VARIANCE:** Retains components that explain a specific percentage of variance (set by `threshold`).
* **CRITERION_KAISER:** Retains components with eigenvalues greater than 1.
* **CRITERION_SCREE_PLOT:** Analyzes the scree plot to visually determine the number of components with significant eigenvalues.
* The `m_regparam` member variable allows for regularization to prevent overfitting.
* The class internally uses the `CPlots` class (not documented here) for potential visualization purposes (e.g., scree plot).
## Principal Component Analysis (PCA)
This documentation explains the `CPCA` class in MQL5, which implements **Principal Component Analysis (PCA)** for dimensionality reduction and data visualization tasks.
**I. PCA Theory:**
PCA is an unsupervised learning technique that aims to find a **linear transformation** of the data that captures the most **variance** in a **reduced number of dimensions**. This allows for:
* **Dimensionality reduction:** Reduce the number of features while retaining most of the information in the data.
* **Data visualization:** Project high-dimensional data onto a lower-dimensional space for easier visualization.
**II. CPCA Class:**
The `CPCA` class provides functionalities for performing PCA in MQL5:
**Public Functions:**
* `CPCA(int k=0, criterion CRITERION_=CRITERION_SCREE_PLOT)` Constructor, allows setting hyperparameters:
* `k`: Number of components to extract (default: 0, determined automatically using the chosen criterion).
* `CRITERION_`: Criterion for selecting the best components (default: CRITERION_SCREE_PLOT).
* `~CPCA(void)` Destructor.
* `matrix fit_transform(matrix &X)` Trains the model on the provided data (`X`) and returns the transformed data.
* `matrix transform(matrix &X)` Transforms new data (`X`) using the trained model.
* `vector transform(vector &X)` Transforms a single new data point (`X`) using the trained model.
* `bool save(string dir)` Saves the model parameters to a specified directory (`dir`).
* `bool load(string dir)` Loads the model parameters from a specified directory (`dir`).
**Internal Functions:**
* `extract_components(vector &eigen_values, double threshold=0.95)`: Extracts the selected number of components based on the chosen criterion (similar to the `CLDA` class).
**III. Additional Notes:**
* The `m_criterion` member variable allows choosing the criterion for selecting the number of components (same options as in `CLDA`).
* The class internally uses the `CPlots` class (not documented here) for potential visualization purposes.
* Saving and loading functionalities allow for model persistence and reusability.
## Non-Negative Matrix Factorization (NMF)
This documentation explains the `CNMF` class in MQL5, which implements **Non-Negative Matrix Factorization (NMF)** for data decomposition and feature extraction tasks.
**I. NMF Theory:**
NMF is a dimensionality reduction technique that decomposes a **non-negative matrix** `V` into two **non-negative matrices** `W` and `H`:
* `V` (shape: `m x n`): The input data matrix, where `m` is the number of data points and `n` is the number of features.
* `W` (shape: `m x k`): The **basis matrix**, where `k` is the number of chosen components and each row represents a **basis vector**.
* `H` (shape: `k x n`): The **coefficient matrix**, where each row corresponds to a basis vector and each element represents the **contribution** of that basis vector to a specific feature in the original data.
By finding a suitable factorization, NMF aims to represent the original data as a **linear combination** of basis vectors while preserving the non-negative nature of the input data. This allows for:
* **Dimensionality reduction:** Reduce the number of features while still capturing essential information.
* **Feature extraction:** Identify underlying factors or patterns in the data through the basis vectors.
* **Data interpretation:** Gain insights into the data by analyzing the non-negative contributions of basis vectors to each feature.
**II. CNMF Class:**
The `CNMF` class provides functionalities for performing NMF :
**Public Functions:**
* `CNMF(uint max_iter=100, double tol=1e-4, int random_state=-1)` Constructor, allows setting hyperparameters:
* `max_iter`: Maximum number of iterations for the NMF algorithm (default: 100).
* `tol`: Tolerance for convergence (default: 1e-4).
* `random_state`: Random seed for initialization (default: -1, uses random seed).
* `~CNMF(void)` Destructor.
* `matrix fit_transform(matrix &X, uint k=2)` Trains the model on the provided data (`X`) and returns the decomposed components (`W` and `H`).
* `k`: Number of components to extract (default: 2).
* `matrix transform(matrix &X)` Transforms new data (`X`) using the trained model.
* `vector transform(vector &X)` Transforms a single new data point (`X`) using the trained model.
* `uint select_best_components(matrix &X)` Analyzes the input data and suggests an appropriate number of components (implementation details might vary depending on the specific NMF algorithm used).
**III. Additional Notes:**
* The internal implementation details of the NMF algorithm might vary depending on the specific chosen library or technique.
* Choosing the appropriate number of components is crucial for optimal performance and avoiding overfitting. The `select_best_components` function is helpful as a starting point, but further evaluation and domain knowledge might be needed for optimal selection.
By understanding the theoretical foundation and functionalities of the `CNMF` class, MQL5 users can leverage NMF for various tasks, including:
* Dimensionality reduction for data visualization or machine learning algorithms that require lower-dimensional inputs.
* Feature extraction to identify underlying structure or patterns in non-negative data.
* Topic modeling for analyzing text data or other types of document collections.
## Truncated Singular Value Decomposition (Truncated SVD)
This documentation explains the `CTruncatedSVD` class in MQL5, which implements **Truncated Singular Value Decomposition (Truncated SVD)** for dimensionality reduction and data visualization tasks.
**I. Truncated SVD Theory:**
Truncated SVD is a dimensionality reduction technique based on **Singular Value Decomposition (SVD)**. SVD decomposes a matrix `X` into three matrices:
* `U`: A left singular vectors matrix.
* `Σ`: A diagonal matrix containing the singular values of `X`.
* `V^T`: A right singular vectors matrix (transposed).
Truncated SVD retains only **k** top singular values from `Σ` and their corresponding columns from `U` and `V^T`. This creates a lower-dimensional representation of the original data that captures most of the variance.
**II. CTruncatedSVD Class:**
The `CTruncatedSVD` class provides functionalities for performing Truncated SVD in MQL5:
**Public Functions:**
* `CTruncatedSVD(uint k=0):` Constructor, allows setting the number of components (`k`) to retain (default: 0, determined automatically).
* `~CTruncatedSVD(void):` Destructor.
* `matrix fit_transform(matrix& X):` Trains the model on the provided data (`X`) and returns the transformed data.
* `matrix transform(matrix &X):` Transforms new data (`X`) using the trained model.
* `vector transform(vector &X):` Transforms a single new data point (`X`) using the trained model.
* `ulong _select_n_components(vector &singular_values):` (Internal function, not directly exposed to users) Determines the number of components based on the explained variance ratio (implementation details may vary).
**III. Additional Notes:**
* The class internally uses the `CPlots` class (not documented here) for potential visualization purposes.
* Choosing the appropriate number of components (`k`) is crucial for balancing dimensionality reduction and information preservation. The `_select_n_components` function might use different criteria (e.g., explained variance ratio) for automatic selection, and user discretion might be needed depending on the specific task.
By understanding the theoretical foundation and functionalities of the `CTruncatedSVD` class, MQL5 users can leverage Truncated SVD for:
* **Dimensionality reduction:** Reduce the number of features while retaining most of the information.
* **Data visualization:** Project high-dimensional data onto a lower-dimensional space for easier visualization.
* **Feature extraction:** Identify underlying factors or patterns in the data through the singular vectors (although not explicitly mentioned in the class functionalities).
It's important to note that the specific implementation details of the `CTruncatedSVD` class and the `_select_n_components` function might vary depending on the chosen MQL5 library or framework. Refer to the specific documentation of your chosen library for the most accurate and up-to-date information.
**Reference:**
* [Data Science and Machine Learning (Part 18): The battle of Mastering Market Complexity, Truncated SVD Versus NMF](https://www.mql5.com/en/articles/13968)
* [Data Science and Machine Learning(Part 20) : Algorithmic Trading Insights, A Faceoff Between LDA and PCA in MQL5](https://www.mql5.com/en/articles/14128)
| MALE5/Dimensionality Reduction/README.md/0 | {
"file_path": "MALE5/Dimensionality Reduction/README.md",
"repo_id": "MALE5",
"token_count": 2987
} | 45 |
# -*- coding: utf-8 -*-
"""
Created on Tue Feb 14 10:37:36 2023
@author: Omega Joctan
"""
import pandas as pd
import matplotlib.pyplot as plt
import seaborn as sns
# Change this directory to be the one containing the Files
directory = r"C:\Users\Omega Joctan\AppData\Roaming\MetaQuotes\Terminal\F4F6C6D7A7155578A6DEA66D12B1D40D\MQL5\Files\NAIVE BAYES"
data = pd.read_csv(f"{directory}\\vars.csv")
for var in data:
sns.distplot(data[var])
plt.show() | MALE5/Naive Bayes/naive bayes visuals.py/0 | {
"file_path": "MALE5/Naive Bayes/naive bayes visuals.py",
"repo_id": "MALE5",
"token_count": 195
} | 46 |
//+------------------------------------------------------------------+
//| metrics.mqh |
//| Copyright 2022, Fxalgebra.com |
//| https://www.mql5.com/en/users/omegajoctan |
//+------------------------------------------------------------------+
#property copyright "Copyright 2022, Fxalgebra.com"
#property link "https://www.mql5.com/en/users/omegajoctan"
//+------------------------------------------------------------------+
//| defines |
//| |
//| |
//+------------------------------------------------------------------+
#include <MALE5\MatrixExtend.mqh>
#include <MALE5\MqPlotLib\plots.mqh>
struct roc_curve_struct
{
vector TPR,
FPR,
Thresholds;
};
struct confusion_matrix_struct
{
matrix MATRIX;
vector CLASSES;
vector TP,
TN,
FP,
FN;
};
enum regression_metrics
{
METRIC_R_SQUARED, // R-squared
METRIC_ADJUSTED_R, // Adjusted R-squared
METRIC_RSS, // Residual Sum of Squares
METRIC_MSE, // Mean Squared Error
METRIC_RMSE, // Root Mean Squared Error
METRIC_MAE // Mean Absolute Error
};
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
class Metrics
{
protected:
static int SearchPatterns(const vector &True, int value_A, const vector &B, int value_B);
static confusion_matrix_struct confusion_matrix(const vector &True, const vector &Preds);
public:
Metrics(void);
~Metrics(void);
//--- Regression metrics
static double r_squared(const vector &True, const vector &Pred);
static double adjusted_r(const vector &True, const vector &Pred, uint indep_vars = 1);
static double rss(const vector &True, const vector &Pred);
static double mse(const vector &True, const vector &Pred);
static double rmse(const vector &True, const vector &Pred);
static double mae(const vector &True, const vector &Pred);
static double RegressionMetric(const vector &True, const vector &Pred, regression_metrics METRIC_);
//--- Classification metrics
static double accuracy_score(const vector &True, const vector &Pred);
static vector accuracy(const vector &True, const vector &Preds);
static vector precision(const vector &True, const vector &Preds);
static vector recall(const vector &True, const vector &Preds);
static vector f1_score(const vector &True, const vector &Preds);
static vector specificity(const vector &True, const vector &Preds);
static roc_curve_struct roc_curve(const vector &True, const vector &Preds, bool show_roc_curve=false);
static void classification_report(const vector &True, const vector &Pred, bool show_roc_curve=false);
};
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
Metrics::Metrics(void)
{
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
Metrics::~Metrics(void)
{
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
double Metrics::r_squared(const vector &True, const vector &Pred)
{
return(Pred.RegressionMetric(True, REGRESSION_R2));
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
double Metrics::adjusted_r(const vector &True, const vector &Pred, uint indep_vars = 1)
{
if(True.Size() != Pred.Size())
{
Print(__FUNCTION__, " Vector True and P are not equal in size ");
return(0);
}
double r2 = r_squared(True, Pred);
ulong N = Pred.Size();
return(1 - ((1 - r2) * (N - 1)) / (N - indep_vars - 1));
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
confusion_matrix_struct Metrics::confusion_matrix(const vector &True, const vector &Preds)
{
confusion_matrix_struct confusion_matrix;
vector classes = MatrixExtend::Unique(True);
confusion_matrix.CLASSES = classes;
//--- Fill the confusion matrix
matrix MATRIX(classes.Size(), classes.Size());
MATRIX.Fill(0.0);
for(ulong i = 0; i < classes.Size(); i++)
for(ulong j = 0; j < classes.Size(); j++)
MATRIX[i][j] = SearchPatterns(True, (int)classes[i], Preds, (int)classes[j]);
confusion_matrix.MATRIX = MATRIX;
confusion_matrix.TP = MATRIX.Diag();
confusion_matrix.FP = MATRIX.Sum(0) - confusion_matrix.TP;
confusion_matrix.FN = MATRIX.Sum(1) - confusion_matrix.TP;
confusion_matrix.TN = MATRIX.Sum() - (confusion_matrix.TP + confusion_matrix.FP + confusion_matrix.FN);
return confusion_matrix;
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
vector Metrics::accuracy(const vector &True,const vector &Preds)
{
confusion_matrix_struct conf_m = confusion_matrix(True, Preds);
return (conf_m.TP + conf_m.TN) / conf_m.MATRIX.Sum();
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
vector Metrics::precision(const vector &True,const vector &Preds)
{
confusion_matrix_struct conf_m = confusion_matrix(True, Preds);
return conf_m.TP / (conf_m.TP + conf_m.FP + DBL_EPSILON);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
vector Metrics::f1_score(const vector &True,const vector &Preds)
{
vector precision = precision(True, Preds);
vector recall = recall(True, Preds);
return 2 * precision * recall / (precision + recall + DBL_EPSILON);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
vector Metrics::recall(const vector &True,const vector &Preds)
{
confusion_matrix_struct conf_m = confusion_matrix(True, Preds);
return conf_m.TP / (conf_m.TP + conf_m.FN + DBL_EPSILON);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
vector Metrics::specificity(const vector &True,const vector &Preds)
{
confusion_matrix_struct conf_m = confusion_matrix(True, Preds);
return conf_m.TN / (conf_m.TN + conf_m.FP + DBL_EPSILON);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
roc_curve_struct Metrics::roc_curve(const vector &True,const vector &Preds, bool show_roc_curve=false)
{
roc_curve_struct roc;
confusion_matrix_struct conf_m = confusion_matrix(True, Preds);
roc.TPR = recall(True, Preds);
roc.FPR = conf_m.FP / (conf_m.FP + conf_m.TN + DBL_EPSILON);
if (show_roc_curve)
{
CPlots plt;
plt.Plot("Roc Curve",roc.FPR,roc.TPR,"roc_curve","False Positive Rate(FPR)","True Positive Rate(TPR)");
while (MessageBox("Close or Cancel ROC CURVE to proceed","Roc Curve",MB_OK)<0)
Sleep(1);
}
return roc;
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
double Metrics::accuracy_score(const vector &True, const vector &Preds)
{
confusion_matrix_struct conf_m = confusion_matrix(True, Preds);
return conf_m.MATRIX.Diag().Sum() / (conf_m.MATRIX.Sum() + DBL_EPSILON);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
void Metrics::classification_report(const vector &True, const vector &Pred, bool show_roc_curve=false)
{
vector accuracy = accuracy(True, Pred);
vector precision = precision(True, Pred);
vector specificity = specificity(True, Pred);
vector recall = recall(True, Pred);
vector f1_score = f1_score(True, Pred);
double acc = accuracy_score(True, Pred);
confusion_matrix_struct conf_m = confusion_matrix(True, Pred);
//--- support
ulong size = conf_m.MATRIX.Rows();
vector support(size);
for(ulong i = 0; i < size; i++)
support[i] = NormalizeDouble(MathIsValidNumber(conf_m.MATRIX.Row(i).Sum()) ? conf_m.MATRIX.Row(i).Sum() : 0, 8);
int total_size = (int)conf_m.MATRIX.Sum();
//--- Avg and w avg
vector avg, w_avg;
avg.Resize(5);
w_avg.Resize(5);
avg[0] = precision.Mean();
avg[1] = recall.Mean();
avg[2] = specificity.Mean();
avg[3] = f1_score.Mean();
avg[4] = total_size;
//--- w avg
vector support_prop = support / double(total_size + 1e-10);
vector c = precision * support_prop;
w_avg[0] = c.Sum();
c = recall * support_prop;
w_avg[1] = c.Sum();
c = specificity * support_prop;
w_avg[2] = c.Sum();
c = f1_score * support_prop;
w_avg[3] = c.Sum();
w_avg[4] = (int)total_size;
//--- Report
string report = "\n[CLS][ACC] \t\t\t\t\tprecision \trecall \tspecificity \tf1 score \tsupport";
for(ulong i = 0; i < size; i++)
{
report += "\n\t[" + string(conf_m.CLASSES[i])+"]["+DoubleToString(accuracy[i], 2)+"]";
//for (ulong j=0; j<3; j++)
report += StringFormat("\t\t\t\t\t %.2f \t\t\t\t\t %.2f \t\t\t\t\t %.2f \t\t\t\t\t %.2f \t\t\t\t %d", precision[i], recall[i], specificity[i], f1_score[i], (int)support[i]);
}
report += "\n";
report += StringFormat("\naccuracy\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t\t %.2f \t\t\t\t %d",acc,(int)conf_m.MATRIX.Sum());
report += StringFormat("\naverage\t\t\t\t\t\t\t\t\t %.2f \t\t\t\t\t %.2f \t\t\t\t\t %.2f \t\t\t\t\t %.2f \t\t\t\t %d", avg[0], avg[1], avg[2], avg[3], (int)avg[4]);
report += StringFormat("\nWeighed avg\t\t\t \t %.2f \t\t\t\t\t %.2f \t\t\t\t\t %.2f \t\t\t\t\t %.2f \t\t\t\t %d", w_avg[0], w_avg[1], w_avg[2], w_avg[3], (int)w_avg[4]);
Print("Confusion Matrix\n", conf_m.MATRIX);
Print("\nClassification Report\n", report);
roc_curve(True, Pred, show_roc_curve);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
double Metrics::rss(const vector &True, const vector &Pred)
{
vector c = True - Pred;
c = MathPow(c, 2);
return (c.Sum());
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
double Metrics::mse(const vector &True, const vector &Pred)
{
vector c = True - Pred;
c = MathPow(c, 2);
return(c.Sum() / c.Size());
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
int Metrics::SearchPatterns(const vector &True, int value_A, const vector &B, int value_B)
{
int count=0;
for(ulong i = 0; i < True.Size(); i++)
if(True[i] == value_A && B[i] == value_B)
count++;
return count;
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
double Metrics::rmse(const vector &True, const vector &Pred)
{
return Pred.RegressionMetric(True, REGRESSION_RMSE);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
double Metrics::mae(const vector &True, const vector &Pred)
{
return Pred.RegressionMetric(True, REGRESSION_MAE);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
double Metrics::RegressionMetric(const vector &True,const vector &Pred,regression_metrics METRIC_)
{
double err = 0;
switch (METRIC_)
{
case METRIC_MSE:
err = mse(True, Pred);
break;
case METRIC_RMSE:
err = rmse(True, Pred);
break;
case METRIC_MAE:
err = mae(True, Pred);
break;
case METRIC_RSS:
err = rss(True, Pred);
break;
case METRIC_R_SQUARED:
err = r_squared(True, Pred);
break;
case METRIC_ADJUSTED_R:
err = adjusted_r(True, Pred);
break;
default:
break;
}
return err;
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+ | MALE5/metrics.mqh/0 | {
"file_path": "MALE5/metrics.mqh",
"repo_id": "MALE5",
"token_count": 6085
} | 47 |
"""
Django settings for LocalLibrary project.
Generated by 'django-admin startproject' using Django 3.2.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
from pathlib import Path
import os
import django_heroku
import dj_database_url
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = os.environ.get('DJANGO_SECRET_KEY', 'cg#p$g+j9tax!#a3cup@1$8obt2_+&k3q+pmu)5%asj6yjpkag')
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG=True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'catalog.apps.CatalogConfig',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
'whitenoise.middleware.WhiteNoiseMiddleware',
]
ROOT_URLCONF = 'LocalLibrary.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': ['Templates',os.path.join(BASE_DIR,'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'LocalLibrary.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'LocalLibraryDB',
'USER': 'postgres',
'PASSWORD': 'Omg.Database@2021',
'HOST':'127.0.0.1',
'PORT': '5432',
}
}
db_from_env = dj_database_url.config(conn_max_age=500)
DATABASES['default'].update(db_from_env)
STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage'
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = BASE_DIR/'catalog/Static'
STATICFILES_DIRS = [
'staticfiles',
]
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
LOGIN_REDIRECT_URL = '/'
EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'
django_heroku.settings(locals()) | Django-locallibrary/LocalLibrary/LocalLibrary/settings.py/0 | {
"file_path": "Django-locallibrary/LocalLibrary/LocalLibrary/settings.py",
"repo_id": "Django-locallibrary",
"token_count": 1662
} | 0 |
{% extends "base_template.html" %}
{% block content %}
<h2>Error 404</h2>
<h3>It seems that the page you are looking for is not available</h3>
{% endblock %} | Django-locallibrary/LocalLibrary/catalog/Templates/404.html/0 | {
"file_path": "Django-locallibrary/LocalLibrary/catalog/Templates/404.html",
"repo_id": "Django-locallibrary",
"token_count": 77
} | 1 |
../../Scripts/pip.exe,sha256=RQmcK7BAjYaPI1p53DGO2nEWpy-oOT0skLF12VeoLx4,106365
../../Scripts/pip3.9.exe,sha256=RQmcK7BAjYaPI1p53DGO2nEWpy-oOT0skLF12VeoLx4,106365
../../Scripts/pip3.exe,sha256=RQmcK7BAjYaPI1p53DGO2nEWpy-oOT0skLF12VeoLx4,106365
pip-20.2.3.dist-info/INSTALLER,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
pip-20.2.3.dist-info/LICENSE.txt,sha256=W6Ifuwlk-TatfRU2LR7W1JMcyMj5_y1NkRkOEJvnRDE,1090
pip-20.2.3.dist-info/METADATA,sha256=9mmHP3BezeQwiPj12NdLFspqcxqrf7xqW6OX9PwZSr4,3708
pip-20.2.3.dist-info/RECORD,,
pip-20.2.3.dist-info/REQUESTED,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip-20.2.3.dist-info/WHEEL,sha256=ADKeyaGyKF5DwBNE0sRE5pvW-bSkFMJfBuhzZ3rceP4,110
pip-20.2.3.dist-info/entry_points.txt,sha256=HtfDOwpUlr9s73jqLQ6wF9V0_0qvUXJwCBz7Vwx0Ue0,125
pip-20.2.3.dist-info/top_level.txt,sha256=zuuue4knoyJ-UwPPXg8fezS7VCrXJQrAP7zeNuwvFQg,4
pip/__init__.py,sha256=NkPibWV383InU5x7DgeQLdL2zhlXTKjJRBMQTSKNYjI,455
pip/__main__.py,sha256=bqCAM1cj1HwHCDx3WJa-LJxOBXimGxE8OjBqAvnhVg0,911
pip/__pycache__/__init__.cpython-39.pyc,,
pip/__pycache__/__main__.cpython-39.pyc,,
pip/_internal/__init__.py,sha256=2si23JBW1erg19xIJ8CD6tfGknz0ijtXmzuXjGfGMGE,495
pip/_internal/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/__pycache__/build_env.cpython-39.pyc,,
pip/_internal/__pycache__/cache.cpython-39.pyc,,
pip/_internal/__pycache__/configuration.cpython-39.pyc,,
pip/_internal/__pycache__/exceptions.cpython-39.pyc,,
pip/_internal/__pycache__/locations.cpython-39.pyc,,
pip/_internal/__pycache__/main.cpython-39.pyc,,
pip/_internal/__pycache__/pyproject.cpython-39.pyc,,
pip/_internal/__pycache__/self_outdated_check.cpython-39.pyc,,
pip/_internal/__pycache__/wheel_builder.cpython-39.pyc,,
pip/_internal/build_env.py,sha256=9_UaQ2fpsBvpKAji27f7bPAi2v3mb0cBvDYcejwFKNM,8088
pip/_internal/cache.py,sha256=pT17VVxgzZK32aqY5FRS8GyAI73LKzNMF8ZelQ7Ojm0,12249
pip/_internal/cli/__init__.py,sha256=FkHBgpxxb-_gd6r1FjnNhfMOzAUYyXoXKJ6abijfcFU,132
pip/_internal/cli/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/cli/__pycache__/autocompletion.cpython-39.pyc,,
pip/_internal/cli/__pycache__/base_command.cpython-39.pyc,,
pip/_internal/cli/__pycache__/cmdoptions.cpython-39.pyc,,
pip/_internal/cli/__pycache__/command_context.cpython-39.pyc,,
pip/_internal/cli/__pycache__/main.cpython-39.pyc,,
pip/_internal/cli/__pycache__/main_parser.cpython-39.pyc,,
pip/_internal/cli/__pycache__/parser.cpython-39.pyc,,
pip/_internal/cli/__pycache__/progress_bars.cpython-39.pyc,,
pip/_internal/cli/__pycache__/req_command.cpython-39.pyc,,
pip/_internal/cli/__pycache__/spinners.cpython-39.pyc,,
pip/_internal/cli/__pycache__/status_codes.cpython-39.pyc,,
pip/_internal/cli/autocompletion.py,sha256=ekGNtcDI0p7rFVc-7s4T9Tbss4Jgb7vsB649XJIblRg,6547
pip/_internal/cli/base_command.py,sha256=bf058xM1HE9QJCNEHExbuTjL0peKg9kSxCNxDtwAh88,9302
pip/_internal/cli/cmdoptions.py,sha256=M_BtuqeyRpZAUUYytts3pguBCF2RaGukVpDPE0niroI,28782
pip/_internal/cli/command_context.py,sha256=ygMVoTy2jpNilKT-6416gFSQpaBtrKRBbVbi2fy__EU,975
pip/_internal/cli/main.py,sha256=Hxc9dZyW3xiDsYZX-_J2cGXT5DWNLNn_Y7o9oUme-Ec,2616
pip/_internal/cli/main_parser.py,sha256=voAtjo4WVPIYeu7Fqabva9SXaB3BjG0gH93GBfe6jHQ,2843
pip/_internal/cli/parser.py,sha256=4FfwW8xB84CrkLs35ud90ZkhCcWyVkx17XD6j3XCW7c,9480
pip/_internal/cli/progress_bars.py,sha256=J1zykt2LI4gbBeXorfYRmYV5FgXhcW4x3r6xE_a7Z7c,9121
pip/_internal/cli/req_command.py,sha256=Eiz8TVzeqzG-40t7qLC1vO-vzjCRvX9C-qXMyfw9D1I,15132
pip/_internal/cli/spinners.py,sha256=PS9s53LB5aDPelIn8FhKerK3bOdgeefFH5wSWJ2PCzI,5509
pip/_internal/cli/status_codes.py,sha256=F6uDG6Gj7RNKQJUDnd87QKqI16Us-t-B0wPF_4QMpWc,156
pip/_internal/commands/__init__.py,sha256=yoLAnmEXjoQgYfDuwsuWG3RzzD19oeHobGEhmpIYsB4,4100
pip/_internal/commands/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/commands/__pycache__/cache.cpython-39.pyc,,
pip/_internal/commands/__pycache__/check.cpython-39.pyc,,
pip/_internal/commands/__pycache__/completion.cpython-39.pyc,,
pip/_internal/commands/__pycache__/configuration.cpython-39.pyc,,
pip/_internal/commands/__pycache__/debug.cpython-39.pyc,,
pip/_internal/commands/__pycache__/download.cpython-39.pyc,,
pip/_internal/commands/__pycache__/freeze.cpython-39.pyc,,
pip/_internal/commands/__pycache__/hash.cpython-39.pyc,,
pip/_internal/commands/__pycache__/help.cpython-39.pyc,,
pip/_internal/commands/__pycache__/install.cpython-39.pyc,,
pip/_internal/commands/__pycache__/list.cpython-39.pyc,,
pip/_internal/commands/__pycache__/search.cpython-39.pyc,,
pip/_internal/commands/__pycache__/show.cpython-39.pyc,,
pip/_internal/commands/__pycache__/uninstall.cpython-39.pyc,,
pip/_internal/commands/__pycache__/wheel.cpython-39.pyc,,
pip/_internal/commands/cache.py,sha256=U3rLjls0AMMO8PxnhXVwIp7Biyvns8-gBThKTH3tX7Y,5676
pip/_internal/commands/check.py,sha256=fqRrz2uKPC8Qsx2rgLygAD2Rbr-qxp1Q55zUoyZzB9Q,1677
pip/_internal/commands/completion.py,sha256=ObssM77quf61qvbuSE6XLwUBdm_WcWIvXFI-Hy1RBsI,3081
pip/_internal/commands/configuration.py,sha256=IN2QBF653sRiRU7-pHTpnZ6_gyiXNKUQkLiLaNRLKNw,9344
pip/_internal/commands/debug.py,sha256=otBZnpnostX2kmYyOl6g6CeCLmk6H00Tsj2CDsCtFXw,7314
pip/_internal/commands/download.py,sha256=EKFlj_ceGUEJj6yCDw7P6w7yUoB16IcNHhT2qnCFDNQ,4918
pip/_internal/commands/freeze.py,sha256=vLBBP1d8wgEXrmlh06hbz_x_Q1mWHUdiWDa9NP2eKLE,3452
pip/_internal/commands/hash.py,sha256=v2nYCiEsEI9nEam1p6GwdG8xyj5gFv-4WrqvNexKmeY,1843
pip/_internal/commands/help.py,sha256=ryuMDt2tc7ic3NJYMjjoNRH5r6LrB2yQVZvehAm8bLs,1270
pip/_internal/commands/install.py,sha256=OXjZCNSioJRfP7YMkJyAWLl7X7-8f6DkhWlhPhG6fXk,27995
pip/_internal/commands/list.py,sha256=2o3rYw37ECrhe4-Bu5s_2C0bwhYgghh4833MxcWAEug,11312
pip/_internal/commands/search.py,sha256=1HPAFU-tmgKrHhr4xNuk3xMoPeSzD_oDvDDiUFZZ15E,5756
pip/_internal/commands/show.py,sha256=r69-G8HIepDKm4SeyeHj0Ez1P9xoihrpVUyXm6NmXYY,6996
pip/_internal/commands/uninstall.py,sha256=Ys8hwFsg0kvvGtLGYG3ibL5BKvURhlSlCX50ZQ-hsHk,3311
pip/_internal/commands/wheel.py,sha256=-HSISE5AV29I752Aqw4DdmulrGd8rB_ZTOdpbJ6T8iM,6419
pip/_internal/configuration.py,sha256=-Gxz2J-KuvxiqWIJ9F-XnYVZ5lKhNk7VO6ondEbH4EM,14115
pip/_internal/distributions/__init__.py,sha256=ECBUW5Gtu9TjJwyFLvim-i6kUMYVuikNh9I5asL6tbA,959
pip/_internal/distributions/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/distributions/__pycache__/base.cpython-39.pyc,,
pip/_internal/distributions/__pycache__/installed.cpython-39.pyc,,
pip/_internal/distributions/__pycache__/sdist.cpython-39.pyc,,
pip/_internal/distributions/__pycache__/wheel.cpython-39.pyc,,
pip/_internal/distributions/base.py,sha256=ruprpM_L2T2HNi3KLUHlbHimZ1sWVw-3Q0Lb8O7TDAI,1425
pip/_internal/distributions/installed.py,sha256=YqlkBKr6TVP1MAYS6SG8ojud21wVOYLMZ8jMLJe9MSU,760
pip/_internal/distributions/sdist.py,sha256=D4XTMlCwgPlK69l62GLYkNSVTVe99fR5iAcVt2EbGok,4086
pip/_internal/distributions/wheel.py,sha256=95uD-TfaYoq3KiKBdzk9YMN4RRqJ28LNoSTS2K46gek,1294
pip/_internal/exceptions.py,sha256=ZVpArxQrSlm4qAMtHaY3nHvG_t5eSi3WCnMowdm_m8I,12637
pip/_internal/index/__init__.py,sha256=vpt-JeTZefh8a-FC22ZeBSXFVbuBcXSGiILhQZJaNpQ,30
pip/_internal/index/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/index/__pycache__/collector.cpython-39.pyc,,
pip/_internal/index/__pycache__/package_finder.cpython-39.pyc,,
pip/_internal/index/collector.py,sha256=rMdGdAABOrvIl0DYlCMWXr7mIoqrU2VGeQpCuWiPu1Q,22838
pip/_internal/index/package_finder.py,sha256=ISieDd20dOSndMNybafCu3pO2JR3BKOfHv92Bes0j0Q,37364
pip/_internal/locations.py,sha256=7YjzJy2CroQD8GBMemnHWRl9448BSIt0lfH98B-Dkd8,6732
pip/_internal/main.py,sha256=IVBnUQ-FG7DK6617uEXRB5_QJqspAsBFmTmTesYkbdQ,437
pip/_internal/models/__init__.py,sha256=3DHUd_qxpPozfzouoqa9g9ts1Czr5qaHfFxbnxriepM,63
pip/_internal/models/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/models/__pycache__/candidate.cpython-39.pyc,,
pip/_internal/models/__pycache__/direct_url.cpython-39.pyc,,
pip/_internal/models/__pycache__/format_control.cpython-39.pyc,,
pip/_internal/models/__pycache__/index.cpython-39.pyc,,
pip/_internal/models/__pycache__/link.cpython-39.pyc,,
pip/_internal/models/__pycache__/scheme.cpython-39.pyc,,
pip/_internal/models/__pycache__/search_scope.cpython-39.pyc,,
pip/_internal/models/__pycache__/selection_prefs.cpython-39.pyc,,
pip/_internal/models/__pycache__/target_python.cpython-39.pyc,,
pip/_internal/models/__pycache__/wheel.cpython-39.pyc,,
pip/_internal/models/candidate.py,sha256=gACeCSHTIaWuB6RAeLmGJnbFFbKfp_47UERDoC_ldOU,1195
pip/_internal/models/direct_url.py,sha256=MnBLPci1hE9Ndh6d3m0LAqB7hX3ci80CCJTE5eerFaQ,6900
pip/_internal/models/format_control.py,sha256=RdnnmXxVJppCZWzWEmFTr-zD_m3G0izPLqJi6Iop75M,2823
pip/_internal/models/index.py,sha256=carvxxaT7mJyoEkptaECHUZiNaA6R5NrsGF55zawNn8,1161
pip/_internal/models/link.py,sha256=FMlxvqKmLoj7xTQSgKqfO2ehE1WcgD4C5DmEBuC_Qos,7470
pip/_internal/models/scheme.py,sha256=EhPkT_6G0Md84JTLSVopYsp5H_K6BREYmFvU8H6wMK8,778
pip/_internal/models/search_scope.py,sha256=Lum0mY4_pdR9DDBy6HV5xHGIMPp_kU8vMsqYKFHZip4,4751
pip/_internal/models/selection_prefs.py,sha256=pgNjTfgePPiX1R5S2S8Yc6odOfU9NzG7YP_m_gnS0kw,2044
pip/_internal/models/target_python.py,sha256=R7tAXI15B_cgw7Fgnq5cI9F-44goUZncH9JMtE8pXRw,4034
pip/_internal/models/wheel.py,sha256=FTfzVb4WIbfIehxhdlAVvCil_MQ0-W44oyN56cE6NHc,2772
pip/_internal/network/__init__.py,sha256=jf6Tt5nV_7zkARBrKojIXItgejvoegVJVKUbhAa5Ioc,50
pip/_internal/network/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/network/__pycache__/auth.cpython-39.pyc,,
pip/_internal/network/__pycache__/cache.cpython-39.pyc,,
pip/_internal/network/__pycache__/download.cpython-39.pyc,,
pip/_internal/network/__pycache__/lazy_wheel.cpython-39.pyc,,
pip/_internal/network/__pycache__/session.cpython-39.pyc,,
pip/_internal/network/__pycache__/utils.cpython-39.pyc,,
pip/_internal/network/__pycache__/xmlrpc.cpython-39.pyc,,
pip/_internal/network/auth.py,sha256=dt3NvTRJ8182S3VpdYFEZMPT0JhOKHyFtR-O-JMlJII,11652
pip/_internal/network/cache.py,sha256=6cCD7XNrqh1d1lOSY5U-0ZXOG1YwEgMYs-VhRZVyzMA,2329
pip/_internal/network/download.py,sha256=VTGDO01_nX-5MCdatd4Icv0F88_M8N3WnW6BevA6a0o,5151
pip/_internal/network/lazy_wheel.py,sha256=RXcQILT5v5UO6kxgv76CSncLTqRL29o-OXbaW2aK7t4,8138
pip/_internal/network/session.py,sha256=Zs0uiyPxTpfpgSv-ZI9hK9TjasmTplBuBivOTcUiJME,15208
pip/_internal/network/utils.py,sha256=ZPHg7u6DEcg2EvILIdPECnvPLp21OPHxNVmeXfMy-n0,4172
pip/_internal/network/xmlrpc.py,sha256=PFCiX_nnwYxC8SFIf7J3trP40ECGjA6fl2-IVNhbkPM,1882
pip/_internal/operations/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/operations/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/operations/__pycache__/check.cpython-39.pyc,,
pip/_internal/operations/__pycache__/freeze.cpython-39.pyc,,
pip/_internal/operations/__pycache__/prepare.cpython-39.pyc,,
pip/_internal/operations/build/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/operations/build/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/operations/build/__pycache__/metadata.cpython-39.pyc,,
pip/_internal/operations/build/__pycache__/metadata_legacy.cpython-39.pyc,,
pip/_internal/operations/build/__pycache__/wheel.cpython-39.pyc,,
pip/_internal/operations/build/__pycache__/wheel_legacy.cpython-39.pyc,,
pip/_internal/operations/build/metadata.py,sha256=2aILgWCQTF1aIhWuCH8TTSjv_kYmA3x1262fT2FQ6pQ,1254
pip/_internal/operations/build/metadata_legacy.py,sha256=VgzBTk8naIO8-8N_ifEYF7ZAxWUDhphWVIaVlZ2FqYM,2011
pip/_internal/operations/build/wheel.py,sha256=33vdkxTO-gNqrtWH1eNL_uZo4Irax85moDx2o9zae3M,1465
pip/_internal/operations/build/wheel_legacy.py,sha256=N1aqNZyGURBX0Bj6wPmB0t4866oMbxoHUpC9pz6FyT0,3356
pip/_internal/operations/check.py,sha256=JYDsVLvpFyJuJq0ttStgg8TRKbc0myYFAMnfnnQOREM,5215
pip/_internal/operations/freeze.py,sha256=_vJSZwHBNzBV0GpRUSXhUJz3BrGFdcT2aTcWxH1L4P0,10373
pip/_internal/operations/install/__init__.py,sha256=mX7hyD2GNBO2mFGokDQ30r_GXv7Y_PLdtxcUv144e-s,51
pip/_internal/operations/install/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/operations/install/__pycache__/editable_legacy.cpython-39.pyc,,
pip/_internal/operations/install/__pycache__/legacy.cpython-39.pyc,,
pip/_internal/operations/install/__pycache__/wheel.cpython-39.pyc,,
pip/_internal/operations/install/editable_legacy.py,sha256=rJ_xs2qtDUjpY2-n6eYlVyZiNoKbOtZXZrYrcnIELt4,1488
pip/_internal/operations/install/legacy.py,sha256=zu3Gw54dgHtluyW5n8j5qKcAScidQXJvqB8fb0oLB-4,4281
pip/_internal/operations/install/wheel.py,sha256=nJmOSOYY3keksXd_3GFuhAWeeoKvGOyoSGbjXABjZ40,31310
pip/_internal/operations/prepare.py,sha256=Rt7Yh7w10_Q-vI3b7R1wkt2R6XPX8YVUdODk-TaGI9c,19903
pip/_internal/pyproject.py,sha256=VJKsrXORGiGoDPVKCQhuu4tWlQSTOhoiRlVLRNu4rx4,7400
pip/_internal/req/__init__.py,sha256=s-E5Vxxqqpcs7xfY5gY69oHogsWJ4sLbnUiDoWmkHOU,3133
pip/_internal/req/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/req/__pycache__/constructors.cpython-39.pyc,,
pip/_internal/req/__pycache__/req_file.cpython-39.pyc,,
pip/_internal/req/__pycache__/req_install.cpython-39.pyc,,
pip/_internal/req/__pycache__/req_set.cpython-39.pyc,,
pip/_internal/req/__pycache__/req_tracker.cpython-39.pyc,,
pip/_internal/req/__pycache__/req_uninstall.cpython-39.pyc,,
pip/_internal/req/constructors.py,sha256=LrSHbRHu52-h6HM1qJKG68o1Jw5q8MvJGfr4As6j2uU,16387
pip/_internal/req/req_file.py,sha256=p7n3Y0q275Eisqfxd0vtfnxYvlT6TCCY0tj75p-yiOY,19448
pip/_internal/req/req_install.py,sha256=5IYle0AaLivlkZo6mhU9sj30CbzPqLe92csBnAfJq8U,33610
pip/_internal/req/req_set.py,sha256=dxcfbieWYfYkTJNE07U8xaO40zLxl8BhWOcIHVFTmoo,7886
pip/_internal/req/req_tracker.py,sha256=qWaiejNK6o6cqeyTOIGKIU1CoyrXCcqgMHYi3cqelOA,4690
pip/_internal/req/req_uninstall.py,sha256=opMGDGb7ZaFippRbaarJaljtzl2CNZmBGEUSnTubE-A,23706
pip/_internal/resolution/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/resolution/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/resolution/__pycache__/base.cpython-39.pyc,,
pip/_internal/resolution/base.py,sha256=xi72YmIS-lEjyK13PN_3qkGGthA4yGoK0C6qWynyHrE,682
pip/_internal/resolution/legacy/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/resolution/legacy/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/resolution/legacy/__pycache__/resolver.cpython-39.pyc,,
pip/_internal/resolution/legacy/resolver.py,sha256=d-qW6UUxbZqKyXmX2bqnW5C8UtnO0ZcsQuKw_QXualc,18755
pip/_internal/resolution/resolvelib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/resolution/resolvelib/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/base.cpython-39.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/candidates.cpython-39.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/factory.cpython-39.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/provider.cpython-39.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/requirements.cpython-39.pyc,,
pip/_internal/resolution/resolvelib/__pycache__/resolver.cpython-39.pyc,,
pip/_internal/resolution/resolvelib/base.py,sha256=n8Rilea9jCzhlbtFiJKwCwIQSPW0ATjEKsCc0Vpm894,2342
pip/_internal/resolution/resolvelib/candidates.py,sha256=RHo9r9g25FWzufKv93Ti9nS4hvAPUrhAjSDL7GCZFNQ,20339
pip/_internal/resolution/resolvelib/factory.py,sha256=--ahYsr-r9zIhdyJJ1ZuETgaQrWiPIqwILWiMDn1IIU,17169
pip/_internal/resolution/resolvelib/provider.py,sha256=BP8nh07Z1FlcT-Iaw4FblRM-DjUeUkiItKdKARYeM6M,6134
pip/_internal/resolution/resolvelib/requirements.py,sha256=lGvoHRhkusRfaz4cFxYBoQNqxS6TeuO3K68qlui6g-0,4511
pip/_internal/resolution/resolvelib/resolver.py,sha256=kI8g0NVlYIsDMRmDplWQdox6WO-0H7CI2wN-1ixnaew,10149
pip/_internal/self_outdated_check.py,sha256=q6_nqUHPpt-DScwD97h7FCSqd4nI1s-xkpOI4I5Za3Y,6779
pip/_internal/utils/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_internal/utils/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/utils/__pycache__/appdirs.cpython-39.pyc,,
pip/_internal/utils/__pycache__/compat.cpython-39.pyc,,
pip/_internal/utils/__pycache__/compatibility_tags.cpython-39.pyc,,
pip/_internal/utils/__pycache__/datetime.cpython-39.pyc,,
pip/_internal/utils/__pycache__/deprecation.cpython-39.pyc,,
pip/_internal/utils/__pycache__/direct_url_helpers.cpython-39.pyc,,
pip/_internal/utils/__pycache__/distutils_args.cpython-39.pyc,,
pip/_internal/utils/__pycache__/encoding.cpython-39.pyc,,
pip/_internal/utils/__pycache__/entrypoints.cpython-39.pyc,,
pip/_internal/utils/__pycache__/filesystem.cpython-39.pyc,,
pip/_internal/utils/__pycache__/filetypes.cpython-39.pyc,,
pip/_internal/utils/__pycache__/glibc.cpython-39.pyc,,
pip/_internal/utils/__pycache__/hashes.cpython-39.pyc,,
pip/_internal/utils/__pycache__/inject_securetransport.cpython-39.pyc,,
pip/_internal/utils/__pycache__/logging.cpython-39.pyc,,
pip/_internal/utils/__pycache__/misc.cpython-39.pyc,,
pip/_internal/utils/__pycache__/models.cpython-39.pyc,,
pip/_internal/utils/__pycache__/packaging.cpython-39.pyc,,
pip/_internal/utils/__pycache__/parallel.cpython-39.pyc,,
pip/_internal/utils/__pycache__/pkg_resources.cpython-39.pyc,,
pip/_internal/utils/__pycache__/setuptools_build.cpython-39.pyc,,
pip/_internal/utils/__pycache__/subprocess.cpython-39.pyc,,
pip/_internal/utils/__pycache__/temp_dir.cpython-39.pyc,,
pip/_internal/utils/__pycache__/typing.cpython-39.pyc,,
pip/_internal/utils/__pycache__/unpacking.cpython-39.pyc,,
pip/_internal/utils/__pycache__/urls.cpython-39.pyc,,
pip/_internal/utils/__pycache__/virtualenv.cpython-39.pyc,,
pip/_internal/utils/__pycache__/wheel.cpython-39.pyc,,
pip/_internal/utils/appdirs.py,sha256=RZzUG-Bkh2b-miX0DSZ3v703_-bgK-v0PfWCCjwVE9g,1349
pip/_internal/utils/compat.py,sha256=GoCSUMoUmTGeg5irQGLDZ7v12As87yHrMzBXEke-njg,8865
pip/_internal/utils/compatibility_tags.py,sha256=EtBJj-pstj_U0STUZ8FjlG7YDTjuRZUy6GY1cM86yv8,5439
pip/_internal/utils/datetime.py,sha256=KL-vIdGU9JIpGB5NYkmwXWkH-G_2mvvABlmRtoSZsao,295
pip/_internal/utils/deprecation.py,sha256=pBnNogoA4UGTxa_JDnPXBRRYpKMbExAhXpBwAwklOBs,3318
pip/_internal/utils/direct_url_helpers.py,sha256=bZCBNwPQVyZpYGjX_VcomvVvRHvKw-9JzEV-Ft09LQc,4359
pip/_internal/utils/distutils_args.py,sha256=a56mblNxk9BGifbpEETG61mmBrqhjtjRkJ4HYn-oOEE,1350
pip/_internal/utils/encoding.py,sha256=wHDJ25yCT_T4ySscCL3P978OpLrfDCpitg8D64IEXMY,1284
pip/_internal/utils/entrypoints.py,sha256=vHcNpnksCv6mllihU6hfifdsKPEjwcaJ1aLIXEaynaU,1152
pip/_internal/utils/filesystem.py,sha256=-fU3XteCAIJwf_9FvCZU7vhywvt3nuf_cqkCdwgy1Y8,6943
pip/_internal/utils/filetypes.py,sha256=R2FwzoeX7b-rZALOXx5cuO8VPPMhUQ4ne7wm3n3IcWA,571
pip/_internal/utils/glibc.py,sha256=LOeNGgawCKS-4ke9fii78fwXD73dtNav3uxz1Bf-Ab8,3297
pip/_internal/utils/hashes.py,sha256=xHmrqNwC1eBN0oY0R_RXLJLXGvFdo5gwmbz_pas94k8,4358
pip/_internal/utils/inject_securetransport.py,sha256=M17ZlFVY66ApgeASVjKKLKNz0LAfk-SyU0HZ4ZB6MmI,810
pip/_internal/utils/logging.py,sha256=YIfuDUEkmdn9cIRQ_Ec8rgXs1m5nOwDECtZqM4CBH5U,13093
pip/_internal/utils/misc.py,sha256=QQZWMJkKKADPSWQYmrwlasc8b03eCcghn0yDNprYgrI,28001
pip/_internal/utils/models.py,sha256=HqiBVtTbW_b_Umvj2fjhDWOHo2RKhPwSz4iAYkQZ688,1201
pip/_internal/utils/packaging.py,sha256=VtiwcAAL7LBi7tGL2je7LeW4bE11KMHGCsJ1NZY5XtM,3035
pip/_internal/utils/parallel.py,sha256=7az3aaTMCkqpaLFbpYYOvk0rj7Hu5YH1NPXXomqjgf4,3404
pip/_internal/utils/pkg_resources.py,sha256=ZX-k7V5q_aNWyDse92nN7orN1aCpRLsaxzpkBZ1XKzU,1254
pip/_internal/utils/setuptools_build.py,sha256=E1KswI7wfNnCDE5R6G8c9ZbByENpu7NqocjY26PCQDw,5058
pip/_internal/utils/subprocess.py,sha256=UkPe89gcjxBMx73uutoeJXgD3kwdlL6YO16BkjDdVSI,9924
pip/_internal/utils/temp_dir.py,sha256=blmG0jEvEgdxbYUt_V15bgcTIJIrxZwAw8QZlCTJYDE,8378
pip/_internal/utils/typing.py,sha256=xkYwOeHlf4zsHXBDC4310HtEqwhQcYXFPq2h35Tcrl0,1401
pip/_internal/utils/unpacking.py,sha256=YFAckhqqvmehA8Kan5vd3b1kN_9TafqmOk4b-yz4fho,9488
pip/_internal/utils/urls.py,sha256=q2rw1kMiiig_XZcoyJSsWMJQqYw-2wUmrMoST4mCW_I,1527
pip/_internal/utils/virtualenv.py,sha256=fNGrRp-8QmNL5OzXajBd-z7PbwOsx1XY6G-AVMAhfso,3706
pip/_internal/utils/wheel.py,sha256=wFzn3h8GqYvgsyWPZtUyn0Rb3MJzmtyP3owMOhKnmL0,7303
pip/_internal/vcs/__init__.py,sha256=viJxJRqRE_mVScum85bgQIXAd6o0ozFt18VpC-qIJrM,617
pip/_internal/vcs/__pycache__/__init__.cpython-39.pyc,,
pip/_internal/vcs/__pycache__/bazaar.cpython-39.pyc,,
pip/_internal/vcs/__pycache__/git.cpython-39.pyc,,
pip/_internal/vcs/__pycache__/mercurial.cpython-39.pyc,,
pip/_internal/vcs/__pycache__/subversion.cpython-39.pyc,,
pip/_internal/vcs/__pycache__/versioncontrol.cpython-39.pyc,,
pip/_internal/vcs/bazaar.py,sha256=5rRR02uDZTLaxQT-R5Obd8FZDOMlShqYds-pwVSJJs8,3887
pip/_internal/vcs/git.py,sha256=kvB729wrKY0OWMSgOS1pUly4LosZp8utrd3kOQsWalA,13985
pip/_internal/vcs/mercurial.py,sha256=FzCGmYzVZvB-vyM73fKcQk2B4jMNXGnXlQ2bJ7nmglM,5162
pip/_internal/vcs/subversion.py,sha256=rldcn9ZDt5twjNPzFn_FKRn4qdfkjlxHMJEsR2MFfoA,12399
pip/_internal/vcs/versioncontrol.py,sha256=WpxeTRC0NoGB2uXJdmfq4pPxY-p7sk1rV_WkxMxgzQA,25966
pip/_internal/wheel_builder.py,sha256=6w1VPXrpUvCCPlV0cI1wNaCqNz4laF6B6whvaxl9cns,9522
pip/_vendor/__init__.py,sha256=CsxnpYPbi_2agrDI79iQrCmQeZRcwwIF0C6cm_1RynU,4588
pip/_vendor/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/__pycache__/appdirs.cpython-39.pyc,,
pip/_vendor/__pycache__/contextlib2.cpython-39.pyc,,
pip/_vendor/__pycache__/distro.cpython-39.pyc,,
pip/_vendor/__pycache__/ipaddress.cpython-39.pyc,,
pip/_vendor/__pycache__/pyparsing.cpython-39.pyc,,
pip/_vendor/__pycache__/retrying.cpython-39.pyc,,
pip/_vendor/__pycache__/six.cpython-39.pyc,,
pip/_vendor/appdirs.py,sha256=M6IYRJtdZgmSPCXCSMBRB0VT3P8MdFbWCDbSLrB2Ebg,25907
pip/_vendor/cachecontrol/__init__.py,sha256=pJtAaUxOsMPnytI1A3juAJkXYDr8krdSnsg4Yg3OBEg,302
pip/_vendor/cachecontrol/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/cachecontrol/__pycache__/_cmd.cpython-39.pyc,,
pip/_vendor/cachecontrol/__pycache__/adapter.cpython-39.pyc,,
pip/_vendor/cachecontrol/__pycache__/cache.cpython-39.pyc,,
pip/_vendor/cachecontrol/__pycache__/compat.cpython-39.pyc,,
pip/_vendor/cachecontrol/__pycache__/controller.cpython-39.pyc,,
pip/_vendor/cachecontrol/__pycache__/filewrapper.cpython-39.pyc,,
pip/_vendor/cachecontrol/__pycache__/heuristics.cpython-39.pyc,,
pip/_vendor/cachecontrol/__pycache__/serialize.cpython-39.pyc,,
pip/_vendor/cachecontrol/__pycache__/wrapper.cpython-39.pyc,,
pip/_vendor/cachecontrol/_cmd.py,sha256=URGE0KrA87QekCG3SGPatlSPT571dZTDjNa-ZXX3pDc,1295
pip/_vendor/cachecontrol/adapter.py,sha256=sSwaSYd93IIfCFU4tOMgSo6b2LCt_gBSaQUj8ktJFOA,4882
pip/_vendor/cachecontrol/cache.py,sha256=1fc4wJP8HYt1ycnJXeEw5pCpeBL2Cqxx6g9Fb0AYDWQ,805
pip/_vendor/cachecontrol/caches/__init__.py,sha256=-gHNKYvaeD0kOk5M74eOrsSgIKUtC6i6GfbmugGweEo,86
pip/_vendor/cachecontrol/caches/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/cachecontrol/caches/__pycache__/file_cache.cpython-39.pyc,,
pip/_vendor/cachecontrol/caches/__pycache__/redis_cache.cpython-39.pyc,,
pip/_vendor/cachecontrol/caches/file_cache.py,sha256=nYVKsJtXh6gJXvdn1iWyrhxvkwpQrK-eKoMRzuiwkKk,4153
pip/_vendor/cachecontrol/caches/redis_cache.py,sha256=HxelMpNCo-dYr2fiJDwM3hhhRmxUYtB5tXm1GpAAT4Y,856
pip/_vendor/cachecontrol/compat.py,sha256=kHNvMRdt6s_Xwqq_9qJmr9ou3wYMOMUMxPPcwNxT8Mc,695
pip/_vendor/cachecontrol/controller.py,sha256=CWEX3pedIM9s60suf4zZPtm_JvVgnvogMGK_OiBG5F8,14149
pip/_vendor/cachecontrol/filewrapper.py,sha256=vACKO8Llzu_ZWyjV1Fxn1MA4TGU60N5N3GSrAFdAY2Q,2533
pip/_vendor/cachecontrol/heuristics.py,sha256=BFGHJ3yQcxvZizfo90LLZ04T_Z5XSCXvFotrp7Us0sc,4070
pip/_vendor/cachecontrol/serialize.py,sha256=vIa4jvq4x_KSOLdEIedoknX2aXYHQujLDFV4-F21Dno,7091
pip/_vendor/cachecontrol/wrapper.py,sha256=5LX0uJwkNQUtYSEw3aGmGu9WY8wGipd81mJ8lG0d0M4,690
pip/_vendor/certifi/__init__.py,sha256=u1E_DrSGj_nnEkK5VglvEqP8D80KpghLVWL0A_pq41A,62
pip/_vendor/certifi/__main__.py,sha256=1k3Cr95vCxxGRGDljrW3wMdpZdL3Nhf0u1n-k2qdsCY,255
pip/_vendor/certifi/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/certifi/__pycache__/__main__.cpython-39.pyc,,
pip/_vendor/certifi/__pycache__/core.cpython-39.pyc,,
pip/_vendor/certifi/cacert.pem,sha256=GhT24f0R7_9y4YY_hkXwkO7BthZhRGDCEMO348E9S14,282394
pip/_vendor/certifi/core.py,sha256=jBrwKEWpG0IKcuozK0BQ2HHGp8adXAOyBPC7ddgR6vM,2315
pip/_vendor/chardet/__init__.py,sha256=YsP5wQlsHJ2auF1RZJfypiSrCA7_bQiRm3ES_NI76-Y,1559
pip/_vendor/chardet/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/big5freq.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/big5prober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/chardistribution.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/charsetgroupprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/charsetprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/codingstatemachine.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/compat.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/cp949prober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/enums.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/escprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/escsm.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/eucjpprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/euckrfreq.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/euckrprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/euctwfreq.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/euctwprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/gb2312freq.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/gb2312prober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/hebrewprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/jisfreq.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/jpcntx.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/langbulgarianmodel.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/langcyrillicmodel.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/langgreekmodel.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/langhebrewmodel.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/langhungarianmodel.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/langthaimodel.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/langturkishmodel.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/latin1prober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/mbcharsetprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/mbcsgroupprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/mbcssm.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/sbcharsetprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/sbcsgroupprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/sjisprober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/universaldetector.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/utf8prober.cpython-39.pyc,,
pip/_vendor/chardet/__pycache__/version.cpython-39.pyc,,
pip/_vendor/chardet/big5freq.py,sha256=D_zK5GyzoVsRes0HkLJziltFQX0bKCLOrFe9_xDvO_8,31254
pip/_vendor/chardet/big5prober.py,sha256=kBxHbdetBpPe7xrlb-e990iot64g_eGSLd32lB7_h3M,1757
pip/_vendor/chardet/chardistribution.py,sha256=3woWS62KrGooKyqz4zQSnjFbJpa6V7g02daAibTwcl8,9411
pip/_vendor/chardet/charsetgroupprober.py,sha256=6bDu8YIiRuScX4ca9Igb0U69TA2PGXXDej6Cc4_9kO4,3787
pip/_vendor/chardet/charsetprober.py,sha256=KSmwJErjypyj0bRZmC5F5eM7c8YQgLYIjZXintZNstg,5110
pip/_vendor/chardet/cli/__init__.py,sha256=AbpHGcgLb-kRsJGnwFEktk7uzpZOCcBY74-YBdrKVGs,1
pip/_vendor/chardet/cli/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/chardet/cli/__pycache__/chardetect.cpython-39.pyc,,
pip/_vendor/chardet/cli/chardetect.py,sha256=DI8dlV3FBD0c0XA_y3sQ78z754DUv1J8n34RtDjOXNw,2774
pip/_vendor/chardet/codingstatemachine.py,sha256=VYp_6cyyki5sHgXDSZnXW4q1oelHc3cu9AyQTX7uug8,3590
pip/_vendor/chardet/compat.py,sha256=PKTzHkSbtbHDqS9PyujMbX74q1a8mMpeQTDVsQhZMRw,1134
pip/_vendor/chardet/cp949prober.py,sha256=TZ434QX8zzBsnUvL_8wm4AQVTZ2ZkqEEQL_lNw9f9ow,1855
pip/_vendor/chardet/enums.py,sha256=Aimwdb9as1dJKZaFNUH2OhWIVBVd6ZkJJ_WK5sNY8cU,1661
pip/_vendor/chardet/escprober.py,sha256=kkyqVg1Yw3DIOAMJ2bdlyQgUFQhuHAW8dUGskToNWSc,3950
pip/_vendor/chardet/escsm.py,sha256=RuXlgNvTIDarndvllNCk5WZBIpdCxQ0kcd9EAuxUh84,10510
pip/_vendor/chardet/eucjpprober.py,sha256=iD8Jdp0ISRjgjiVN7f0e8xGeQJ5GM2oeZ1dA8nbSeUw,3749
pip/_vendor/chardet/euckrfreq.py,sha256=-7GdmvgWez4-eO4SuXpa7tBiDi5vRXQ8WvdFAzVaSfo,13546
pip/_vendor/chardet/euckrprober.py,sha256=MqFMTQXxW4HbzIpZ9lKDHB3GN8SP4yiHenTmf8g_PxY,1748
pip/_vendor/chardet/euctwfreq.py,sha256=No1WyduFOgB5VITUA7PLyC5oJRNzRyMbBxaKI1l16MA,31621
pip/_vendor/chardet/euctwprober.py,sha256=13p6EP4yRaxqnP4iHtxHOJ6R2zxHq1_m8hTRjzVZ95c,1747
pip/_vendor/chardet/gb2312freq.py,sha256=JX8lsweKLmnCwmk8UHEQsLgkr_rP_kEbvivC4qPOrlc,20715
pip/_vendor/chardet/gb2312prober.py,sha256=gGvIWi9WhDjE-xQXHvNIyrnLvEbMAYgyUSZ65HUfylw,1754
pip/_vendor/chardet/hebrewprober.py,sha256=c3SZ-K7hvyzGY6JRAZxJgwJ_sUS9k0WYkvMY00YBYFo,13838
pip/_vendor/chardet/jisfreq.py,sha256=vpmJv2Bu0J8gnMVRPHMFefTRvo_ha1mryLig8CBwgOg,25777
pip/_vendor/chardet/jpcntx.py,sha256=PYlNqRUQT8LM3cT5FmHGP0iiscFlTWED92MALvBungo,19643
pip/_vendor/chardet/langbulgarianmodel.py,sha256=1HqQS9Pbtnj1xQgxitJMvw8X6kKr5OockNCZWfEQrPE,12839
pip/_vendor/chardet/langcyrillicmodel.py,sha256=LODajvsetH87yYDDQKA2CULXUH87tI223dhfjh9Zx9c,17948
pip/_vendor/chardet/langgreekmodel.py,sha256=8YAW7bU8YwSJap0kIJSbPMw1BEqzGjWzqcqf0WgUKAA,12688
pip/_vendor/chardet/langhebrewmodel.py,sha256=JSnqmE5E62tDLTPTvLpQsg5gOMO4PbdWRvV7Avkc0HA,11345
pip/_vendor/chardet/langhungarianmodel.py,sha256=RhapYSG5l0ZaO-VV4Fan5sW0WRGQqhwBM61yx3yxyOA,12592
pip/_vendor/chardet/langthaimodel.py,sha256=8l0173Gu_W6G8mxmQOTEF4ls2YdE7FxWf3QkSxEGXJQ,11290
pip/_vendor/chardet/langturkishmodel.py,sha256=W22eRNJsqI6uWAfwXSKVWWnCerYqrI8dZQTm_M0lRFk,11102
pip/_vendor/chardet/latin1prober.py,sha256=S2IoORhFk39FEFOlSFWtgVybRiP6h7BlLldHVclNkU8,5370
pip/_vendor/chardet/mbcharsetprober.py,sha256=AR95eFH9vuqSfvLQZN-L5ijea25NOBCoXqw8s5O9xLQ,3413
pip/_vendor/chardet/mbcsgroupprober.py,sha256=h6TRnnYq2OxG1WdD5JOyxcdVpn7dG0q-vB8nWr5mbh4,2012
pip/_vendor/chardet/mbcssm.py,sha256=SY32wVIF3HzcjY3BaEspy9metbNSKxIIB0RKPn7tjpI,25481
pip/_vendor/chardet/sbcharsetprober.py,sha256=LDSpCldDCFlYwUkGkwD2oFxLlPWIWXT09akH_2PiY74,5657
pip/_vendor/chardet/sbcsgroupprober.py,sha256=1IprcCB_k1qfmnxGC6MBbxELlKqD3scW6S8YIwdeyXA,3546
pip/_vendor/chardet/sjisprober.py,sha256=IIt-lZj0WJqK4rmUZzKZP4GJlE8KUEtFYVuY96ek5MQ,3774
pip/_vendor/chardet/universaldetector.py,sha256=qL0174lSZE442eB21nnktT9_VcAye07laFWUeUrjttY,12485
pip/_vendor/chardet/utf8prober.py,sha256=IdD8v3zWOsB8OLiyPi-y_fqwipRFxV9Nc1eKBLSuIEw,2766
pip/_vendor/chardet/version.py,sha256=sp3B08mrDXB-pf3K9fqJ_zeDHOCLC8RrngQyDFap_7g,242
pip/_vendor/colorama/__init__.py,sha256=DqjXH9URVP3IJwmMt7peYw50ns1RNAymIB9-XdPEFV8,239
pip/_vendor/colorama/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/colorama/__pycache__/ansi.cpython-39.pyc,,
pip/_vendor/colorama/__pycache__/ansitowin32.cpython-39.pyc,,
pip/_vendor/colorama/__pycache__/initialise.cpython-39.pyc,,
pip/_vendor/colorama/__pycache__/win32.cpython-39.pyc,,
pip/_vendor/colorama/__pycache__/winterm.cpython-39.pyc,,
pip/_vendor/colorama/ansi.py,sha256=Fi0un-QLqRm-v7o_nKiOqyC8PapBJK7DLV_q9LKtTO0,2524
pip/_vendor/colorama/ansitowin32.py,sha256=u8QaqdqS_xYSfNkPM1eRJLHz6JMWPodaJaP0mxgHCDc,10462
pip/_vendor/colorama/initialise.py,sha256=PprovDNxMTrvoNHFcL2NZjpH2XzDc8BLxLxiErfUl4k,1915
pip/_vendor/colorama/win32.py,sha256=bJ8Il9jwaBN5BJ8bmN6FoYZ1QYuMKv2j8fGrXh7TJjw,5404
pip/_vendor/colorama/winterm.py,sha256=2y_2b7Zsv34feAsP67mLOVc-Bgq51mdYGo571VprlrM,6438
pip/_vendor/contextlib2.py,sha256=5HjGflUzwWAUfcILhSmC2GqvoYdZZzFzVfIDztHigUs,16915
pip/_vendor/distlib/__init__.py,sha256=3veAk2rPznOB2gsK6tjbbh0TQMmGE5P82eE9wXq6NIk,581
pip/_vendor/distlib/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/compat.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/database.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/index.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/locators.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/manifest.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/markers.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/metadata.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/resources.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/scripts.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/util.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/version.cpython-39.pyc,,
pip/_vendor/distlib/__pycache__/wheel.cpython-39.pyc,,
pip/_vendor/distlib/_backport/__init__.py,sha256=bqS_dTOH6uW9iGgd0uzfpPjo6vZ4xpPZ7kyfZJ2vNaw,274
pip/_vendor/distlib/_backport/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/distlib/_backport/__pycache__/misc.cpython-39.pyc,,
pip/_vendor/distlib/_backport/__pycache__/shutil.cpython-39.pyc,,
pip/_vendor/distlib/_backport/__pycache__/sysconfig.cpython-39.pyc,,
pip/_vendor/distlib/_backport/__pycache__/tarfile.cpython-39.pyc,,
pip/_vendor/distlib/_backport/misc.py,sha256=KWecINdbFNOxSOP1fGF680CJnaC6S4fBRgEtaYTw0ig,971
pip/_vendor/distlib/_backport/shutil.py,sha256=IX_G2NPqwecJibkIDje04bqu0xpHkfSQ2GaGdEVqM5Y,25707
pip/_vendor/distlib/_backport/sysconfig.cfg,sha256=swZKxq9RY5e9r3PXCrlvQPMsvOdiWZBTHLEbqS8LJLU,2617
pip/_vendor/distlib/_backport/sysconfig.py,sha256=BQHFlb6pubCl_dvT1NjtzIthylofjKisox239stDg0U,26854
pip/_vendor/distlib/_backport/tarfile.py,sha256=Ihp7rXRcjbIKw8COm9wSePV9ARGXbSF9gGXAMn2Q-KU,92628
pip/_vendor/distlib/compat.py,sha256=ADA56xiAxar3mU6qemlBhNbsrFPosXRhO44RzsbJPqk,41408
pip/_vendor/distlib/database.py,sha256=Kl0YvPQKc4OcpVi7k5cFziydM1xOK8iqdxLGXgbZHV4,51059
pip/_vendor/distlib/index.py,sha256=SXKzpQCERctxYDMp_OLee2f0J0e19ZhGdCIoMlUfUQM,21066
pip/_vendor/distlib/locators.py,sha256=c9E4cDEacJ_uKbuE5BqAVocoWp6rsuBGTkiNDQq3zV4,52100
pip/_vendor/distlib/manifest.py,sha256=nQEhYmgoreaBZzyFzwYsXxJARu3fo4EkunU163U16iE,14811
pip/_vendor/distlib/markers.py,sha256=6Ac3cCfFBERexiESWIOXmg-apIP8l2esafNSX3KMy-8,4387
pip/_vendor/distlib/metadata.py,sha256=z2KPy3h3tcDnb9Xs7nAqQ5Oz0bqjWAUFmKWcFKRoodg,38962
pip/_vendor/distlib/resources.py,sha256=2FGv0ZHF14KXjLIlL0R991lyQQGcewOS4mJ-5n-JVnc,10766
pip/_vendor/distlib/scripts.py,sha256=_MAj3sMuv56kuM8FsiIWXqbT0gmumPGaOR_atOzn4a4,17180
pip/_vendor/distlib/t32.exe,sha256=NS3xBCVAld35JVFNmb-1QRyVtThukMrwZVeXn4LhaEQ,96768
pip/_vendor/distlib/t64.exe,sha256=oAqHes78rUWVM0OtVqIhUvequl_PKhAhXYQWnUf7zR0,105984
pip/_vendor/distlib/util.py,sha256=f2jZCPrcLCt6LcnC0gUy-Fur60tXD8reA7k4rDpHMDw,59845
pip/_vendor/distlib/version.py,sha256=_n7F6juvQGAcn769E_SHa7fOcf5ERlEVymJ_EjPRwGw,23391
pip/_vendor/distlib/w32.exe,sha256=lJtnZdeUxTZWya_EW5DZos_K5rswRECGspIl8ZJCIXs,90112
pip/_vendor/distlib/w64.exe,sha256=0aRzoN2BO9NWW4ENy4_4vHkHR4qZTFZNVSAJJYlODTI,99840
pip/_vendor/distlib/wheel.py,sha256=v6DnwTqhNHwrEVFr8_YeiTW6G4ftP_evsywNgrmdb2o,41144
pip/_vendor/distro.py,sha256=xxMIh2a3KmippeWEHzynTdHT3_jZM0o-pos0dAWJROM,43628
pip/_vendor/html5lib/__init__.py,sha256=BYzcKCqeEii52xDrqBFruhnmtmkiuHXFyFh-cglQ8mk,1160
pip/_vendor/html5lib/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/html5lib/__pycache__/_ihatexml.cpython-39.pyc,,
pip/_vendor/html5lib/__pycache__/_inputstream.cpython-39.pyc,,
pip/_vendor/html5lib/__pycache__/_tokenizer.cpython-39.pyc,,
pip/_vendor/html5lib/__pycache__/_utils.cpython-39.pyc,,
pip/_vendor/html5lib/__pycache__/constants.cpython-39.pyc,,
pip/_vendor/html5lib/__pycache__/html5parser.cpython-39.pyc,,
pip/_vendor/html5lib/__pycache__/serializer.cpython-39.pyc,,
pip/_vendor/html5lib/_ihatexml.py,sha256=ifOwF7pXqmyThIXc3boWc96s4MDezqRrRVp7FwDYUFs,16728
pip/_vendor/html5lib/_inputstream.py,sha256=jErNASMlkgs7MpOM9Ve_VdLDJyFFweAjLuhVutZz33U,32353
pip/_vendor/html5lib/_tokenizer.py,sha256=04mgA2sNTniutl2fxFv-ei5bns4iRaPxVXXHh_HrV_4,77040
pip/_vendor/html5lib/_trie/__init__.py,sha256=nqfgO910329BEVJ5T4psVwQtjd2iJyEXQ2-X8c1YxwU,109
pip/_vendor/html5lib/_trie/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/html5lib/_trie/__pycache__/_base.cpython-39.pyc,,
pip/_vendor/html5lib/_trie/__pycache__/py.cpython-39.pyc,,
pip/_vendor/html5lib/_trie/_base.py,sha256=CaybYyMro8uERQYjby2tTeSUatnWDfWroUN9N7ety5w,1013
pip/_vendor/html5lib/_trie/py.py,sha256=wXmQLrZRf4MyWNyg0m3h81m9InhLR7GJ002mIIZh-8o,1775
pip/_vendor/html5lib/_utils.py,sha256=Dx9AKntksRjFT1veBj7I362pf5OgIaT0zglwq43RnfU,4931
pip/_vendor/html5lib/constants.py,sha256=Ll-yzLU_jcjyAI_h57zkqZ7aQWE5t5xA4y_jQgoUUhw,83464
pip/_vendor/html5lib/filters/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/html5lib/filters/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/html5lib/filters/__pycache__/alphabeticalattributes.cpython-39.pyc,,
pip/_vendor/html5lib/filters/__pycache__/base.cpython-39.pyc,,
pip/_vendor/html5lib/filters/__pycache__/inject_meta_charset.cpython-39.pyc,,
pip/_vendor/html5lib/filters/__pycache__/lint.cpython-39.pyc,,
pip/_vendor/html5lib/filters/__pycache__/optionaltags.cpython-39.pyc,,
pip/_vendor/html5lib/filters/__pycache__/sanitizer.cpython-39.pyc,,
pip/_vendor/html5lib/filters/__pycache__/whitespace.cpython-39.pyc,,
pip/_vendor/html5lib/filters/alphabeticalattributes.py,sha256=lViZc2JMCclXi_5gduvmdzrRxtO5Xo9ONnbHBVCsykU,919
pip/_vendor/html5lib/filters/base.py,sha256=z-IU9ZAYjpsVsqmVt7kuWC63jR11hDMr6CVrvuao8W0,286
pip/_vendor/html5lib/filters/inject_meta_charset.py,sha256=egDXUEHXmAG9504xz0K6ALDgYkvUrC2q15YUVeNlVQg,2945
pip/_vendor/html5lib/filters/lint.py,sha256=jk6q56xY0ojiYfvpdP-OZSm9eTqcAdRqhCoPItemPYA,3643
pip/_vendor/html5lib/filters/optionaltags.py,sha256=8lWT75J0aBOHmPgfmqTHSfPpPMp01T84NKu0CRedxcE,10588
pip/_vendor/html5lib/filters/sanitizer.py,sha256=m6oGmkBhkGAnn2nV6D4hE78SCZ6WEnK9rKdZB3uXBIc,26897
pip/_vendor/html5lib/filters/whitespace.py,sha256=8eWqZxd4UC4zlFGW6iyY6f-2uuT8pOCSALc3IZt7_t4,1214
pip/_vendor/html5lib/html5parser.py,sha256=anr-aXre_ImfrkQ35c_rftKXxC80vJCREKe06Tq15HA,117186
pip/_vendor/html5lib/serializer.py,sha256=_PpvcZF07cwE7xr9uKkZqh5f4UEaI8ltCU2xPJzaTpk,15759
pip/_vendor/html5lib/treeadapters/__init__.py,sha256=A0rY5gXIe4bJOiSGRO_j_tFhngRBO8QZPzPtPw5dFzo,679
pip/_vendor/html5lib/treeadapters/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/html5lib/treeadapters/__pycache__/genshi.cpython-39.pyc,,
pip/_vendor/html5lib/treeadapters/__pycache__/sax.cpython-39.pyc,,
pip/_vendor/html5lib/treeadapters/genshi.py,sha256=CH27pAsDKmu4ZGkAUrwty7u0KauGLCZRLPMzaO3M5vo,1715
pip/_vendor/html5lib/treeadapters/sax.py,sha256=BKS8woQTnKiqeffHsxChUqL4q2ZR_wb5fc9MJ3zQC8s,1776
pip/_vendor/html5lib/treebuilders/__init__.py,sha256=AysSJyvPfikCMMsTVvaxwkgDieELD5dfR8FJIAuq7hY,3592
pip/_vendor/html5lib/treebuilders/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/html5lib/treebuilders/__pycache__/base.cpython-39.pyc,,
pip/_vendor/html5lib/treebuilders/__pycache__/dom.cpython-39.pyc,,
pip/_vendor/html5lib/treebuilders/__pycache__/etree.cpython-39.pyc,,
pip/_vendor/html5lib/treebuilders/__pycache__/etree_lxml.cpython-39.pyc,,
pip/_vendor/html5lib/treebuilders/base.py,sha256=z-o51vt9r_l2IDG5IioTOKGzZne4Fy3_Fc-7ztrOh4I,14565
pip/_vendor/html5lib/treebuilders/dom.py,sha256=22whb0C71zXIsai5mamg6qzBEiigcBIvaDy4Asw3at0,8925
pip/_vendor/html5lib/treebuilders/etree.py,sha256=w5ZFpKk6bAxnrwD2_BrF5EVC7vzz0L3LMi9Sxrbc_8w,12836
pip/_vendor/html5lib/treebuilders/etree_lxml.py,sha256=9gqDjs-IxsPhBYa5cpvv2FZ1KZlG83Giusy2lFmvIkE,14766
pip/_vendor/html5lib/treewalkers/__init__.py,sha256=OBPtc1TU5mGyy18QDMxKEyYEz0wxFUUNj5v0-XgmYhY,5719
pip/_vendor/html5lib/treewalkers/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/html5lib/treewalkers/__pycache__/base.cpython-39.pyc,,
pip/_vendor/html5lib/treewalkers/__pycache__/dom.cpython-39.pyc,,
pip/_vendor/html5lib/treewalkers/__pycache__/etree.cpython-39.pyc,,
pip/_vendor/html5lib/treewalkers/__pycache__/etree_lxml.cpython-39.pyc,,
pip/_vendor/html5lib/treewalkers/__pycache__/genshi.cpython-39.pyc,,
pip/_vendor/html5lib/treewalkers/base.py,sha256=ouiOsuSzvI0KgzdWP8PlxIaSNs9falhbiinAEc_UIJY,7476
pip/_vendor/html5lib/treewalkers/dom.py,sha256=EHyFR8D8lYNnyDU9lx_IKigVJRyecUGua0mOi7HBukc,1413
pip/_vendor/html5lib/treewalkers/etree.py,sha256=xo1L5m9VtkfpFJK0pFmkLVajhqYYVisVZn3k9kYpPkI,4551
pip/_vendor/html5lib/treewalkers/etree_lxml.py,sha256=_b0LAVWLcVu9WaU_-w3D8f0IRSpCbjf667V-3NRdhTw,6357
pip/_vendor/html5lib/treewalkers/genshi.py,sha256=4D2PECZ5n3ZN3qu3jMl9yY7B81jnQApBQSVlfaIuYbA,2309
pip/_vendor/idna/__init__.py,sha256=9Nt7xpyet3DmOrPUGooDdAwmHZZu1qUAy2EaJ93kGiQ,58
pip/_vendor/idna/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/idna/__pycache__/codec.cpython-39.pyc,,
pip/_vendor/idna/__pycache__/compat.cpython-39.pyc,,
pip/_vendor/idna/__pycache__/core.cpython-39.pyc,,
pip/_vendor/idna/__pycache__/idnadata.cpython-39.pyc,,
pip/_vendor/idna/__pycache__/intranges.cpython-39.pyc,,
pip/_vendor/idna/__pycache__/package_data.cpython-39.pyc,,
pip/_vendor/idna/__pycache__/uts46data.cpython-39.pyc,,
pip/_vendor/idna/codec.py,sha256=lvYb7yu7PhAqFaAIAdWcwgaWI2UmgseUua-1c0AsG0A,3299
pip/_vendor/idna/compat.py,sha256=R-h29D-6mrnJzbXxymrWUW7iZUvy-26TQwZ0ij57i4U,232
pip/_vendor/idna/core.py,sha256=jCoaLb3bA2tS_DDx9PpGuNTEZZN2jAzB369aP-IHYRE,11951
pip/_vendor/idna/idnadata.py,sha256=gmzFwZWjdms3kKZ_M_vwz7-LP_SCgYfSeE03B21Qpsk,42350
pip/_vendor/idna/intranges.py,sha256=TY1lpxZIQWEP6tNqjZkFA5hgoMWOj1OBmnUG8ihT87E,1749
pip/_vendor/idna/package_data.py,sha256=bxBjpLnE06_1jSYKEy5svOMu1zM3OMztXVUb1tPlcp0,22
pip/_vendor/idna/uts46data.py,sha256=lMdw2zdjkH1JUWXPPEfFUSYT3Fyj60bBmfLvvy5m7ko,202084
pip/_vendor/ipaddress.py,sha256=-0RmurI31XgAaN20WCi0zrcuoat90nNA70_6yGlx2PU,79875
pip/_vendor/msgpack/__init__.py,sha256=2gJwcsTIaAtCM0GMi2rU-_Y6kILeeQuqRkrQ22jSANc,1118
pip/_vendor/msgpack/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/msgpack/__pycache__/_version.cpython-39.pyc,,
pip/_vendor/msgpack/__pycache__/exceptions.cpython-39.pyc,,
pip/_vendor/msgpack/__pycache__/ext.cpython-39.pyc,,
pip/_vendor/msgpack/__pycache__/fallback.cpython-39.pyc,,
pip/_vendor/msgpack/_version.py,sha256=hu7lzmZ_ClOaOOmRsWb4xomhzQ4UIsLsvv8KY6UysHE,20
pip/_vendor/msgpack/exceptions.py,sha256=dCTWei8dpkrMsQDcjQk74ATl9HsIBH0ybt8zOPNqMYc,1081
pip/_vendor/msgpack/ext.py,sha256=nV19BzE9Be8SJHrxxYJHFbvEHJaXcP3avRkHVp5wovM,6034
pip/_vendor/msgpack/fallback.py,sha256=Z8V3iYUUPqKVy4WWTk64Vq3G0PylQIOmlWvgnMhmkdU,37133
pip/_vendor/packaging/__about__.py,sha256=PNMsaZn4UcCHyubgROH1bl6CluduPjI5kFrSp_Zgklo,736
pip/_vendor/packaging/__init__.py,sha256=6enbp5XgRfjBjsI9-bn00HjHf5TH21PDMOKkJW8xw-w,562
pip/_vendor/packaging/__pycache__/__about__.cpython-39.pyc,,
pip/_vendor/packaging/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/packaging/__pycache__/_compat.cpython-39.pyc,,
pip/_vendor/packaging/__pycache__/_structures.cpython-39.pyc,,
pip/_vendor/packaging/__pycache__/_typing.cpython-39.pyc,,
pip/_vendor/packaging/__pycache__/markers.cpython-39.pyc,,
pip/_vendor/packaging/__pycache__/requirements.cpython-39.pyc,,
pip/_vendor/packaging/__pycache__/specifiers.cpython-39.pyc,,
pip/_vendor/packaging/__pycache__/tags.cpython-39.pyc,,
pip/_vendor/packaging/__pycache__/utils.cpython-39.pyc,,
pip/_vendor/packaging/__pycache__/version.cpython-39.pyc,,
pip/_vendor/packaging/_compat.py,sha256=MXdsGpSE_W-ZrHoC87andI4LV2FAwU7HLL-eHe_CjhU,1128
pip/_vendor/packaging/_structures.py,sha256=ozkCX8Q8f2qE1Eic3YiQ4buDVfgz2iYevY9e7R2y3iY,2022
pip/_vendor/packaging/_typing.py,sha256=VgA0AAvsc97KB5nF89zoudOyCMEsV7FlaXzZbYqEkzA,1824
pip/_vendor/packaging/markers.py,sha256=V_RdoQqOUbSfy7y9o2vRk7BkzAh3yneC82cuWpKrqOg,9491
pip/_vendor/packaging/requirements.py,sha256=F93hkn7i8NKRZP-FtdTIlhz1PUsRjhe6eRbsBXX0Uh4,4903
pip/_vendor/packaging/specifiers.py,sha256=uYp9l13F0LcknS6d4N60ytiBgFmIhKideOq9AnsxTco,31944
pip/_vendor/packaging/tags.py,sha256=NKMS37Zo_nWrZxgsD6zbXsXgc9edn9m160cBiLmHJdE,24067
pip/_vendor/packaging/utils.py,sha256=RShlvnjO2CtYSD8uri32frMMFMTmB-3ihsq1-ghzLEw,1811
pip/_vendor/packaging/version.py,sha256=Cnbm-OO9D_qd8ZTFxzFcjSavexSYFZmyeaoPvMsjgPc,15470
pip/_vendor/pep517/__init__.py,sha256=r5uA106NGJa3slspaD2m32aFpFUiZX-mZ9vIlzAEOp4,84
pip/_vendor/pep517/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/pep517/__pycache__/_in_process.cpython-39.pyc,,
pip/_vendor/pep517/__pycache__/build.cpython-39.pyc,,
pip/_vendor/pep517/__pycache__/check.cpython-39.pyc,,
pip/_vendor/pep517/__pycache__/colorlog.cpython-39.pyc,,
pip/_vendor/pep517/__pycache__/compat.cpython-39.pyc,,
pip/_vendor/pep517/__pycache__/dirtools.cpython-39.pyc,,
pip/_vendor/pep517/__pycache__/envbuild.cpython-39.pyc,,
pip/_vendor/pep517/__pycache__/meta.cpython-39.pyc,,
pip/_vendor/pep517/__pycache__/wrappers.cpython-39.pyc,,
pip/_vendor/pep517/_in_process.py,sha256=XrKOTURJdia5R7i3i_OQmS89LASFXE3HQXfX63qZBIE,8438
pip/_vendor/pep517/build.py,sha256=DN4ouyj_bd00knOKqv0KHRtN0-JezJoNNZQmcDi4juk,3335
pip/_vendor/pep517/check.py,sha256=YoaNE3poJGpz96biVCYwtcDshwEGE2HRU5KKya9yfpY,5961
pip/_vendor/pep517/colorlog.py,sha256=Tk9AuYm_cLF3BKTBoSTJt9bRryn0aFojIQOwbfVUTxQ,4098
pip/_vendor/pep517/compat.py,sha256=M-5s4VNp8rjyT76ZZ_ibnPD44DYVzSQlyCEHayjtDPw,780
pip/_vendor/pep517/dirtools.py,sha256=2mkAkAL0mRz_elYFjRKuekTJVipH1zTn4tbf1EDev84,1129
pip/_vendor/pep517/envbuild.py,sha256=szKUFlO50X1ahQfXwz4hD9V2VE_bz9MLVPIeidsFo4w,6041
pip/_vendor/pep517/meta.py,sha256=8mnM5lDnT4zXQpBTliJbRGfesH7iioHwozbDxALPS9Y,2463
pip/_vendor/pep517/wrappers.py,sha256=yFU4Lp7TIYbmuVOTY-pXnlyGZ3F_grIi-JlLkpGN8Gk,10783
pip/_vendor/pkg_resources/__init__.py,sha256=XpGBfvS9fafA6bm5rx7vnxdxs7yqyoc_NnpzKApkJ64,108277
pip/_vendor/pkg_resources/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/pkg_resources/__pycache__/py31compat.cpython-39.pyc,,
pip/_vendor/pkg_resources/py31compat.py,sha256=CRk8fkiPRDLsbi5pZcKsHI__Pbmh_94L8mr9Qy9Ab2U,562
pip/_vendor/progress/__init__.py,sha256=fcbQQXo5np2CoQyhSH5XprkicwLZNLePR3uIahznSO0,4857
pip/_vendor/progress/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/progress/__pycache__/bar.cpython-39.pyc,,
pip/_vendor/progress/__pycache__/counter.cpython-39.pyc,,
pip/_vendor/progress/__pycache__/spinner.cpython-39.pyc,,
pip/_vendor/progress/bar.py,sha256=QuDuVNcmXgpxtNtxO0Fq72xKigxABaVmxYGBw4J3Z_E,2854
pip/_vendor/progress/counter.py,sha256=MznyBrvPWrOlGe4MZAlGUb9q3aODe6_aNYeAE_VNoYA,1372
pip/_vendor/progress/spinner.py,sha256=k8JbDW94T0-WXuXfxZIFhdoNPYp3jfnpXqBnfRv5fGs,1380
pip/_vendor/pyparsing.py,sha256=J1b4z3S_KwyJW7hKGnoN-hXW9pgMIzIP6QThyY5yJq4,273394
pip/_vendor/requests/__init__.py,sha256=orzv4-1uejMDc2v3LnTVneINGXiwqXSfrASoFBsYblE,4465
pip/_vendor/requests/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/__version__.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/_internal_utils.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/adapters.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/api.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/auth.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/certs.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/compat.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/cookies.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/exceptions.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/help.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/hooks.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/models.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/packages.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/sessions.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/status_codes.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/structures.cpython-39.pyc,,
pip/_vendor/requests/__pycache__/utils.cpython-39.pyc,,
pip/_vendor/requests/__version__.py,sha256=Xwky1FMlMkJJGidBM50JC7FKcosWzkjIW-WhQGrBdFM,441
pip/_vendor/requests/_internal_utils.py,sha256=Zx3PnEUccyfsB-ie11nZVAW8qClJy0gx1qNME7rgT18,1096
pip/_vendor/requests/adapters.py,sha256=e-bmKEApNVqFdylxuMJJfiaHdlmS_zhWhIMEzlHvGuc,21548
pip/_vendor/requests/api.py,sha256=PlHM-HT3PQ5lyufoeGmV-nJxRi7UnUyGVh7OV7B9XV4,6496
pip/_vendor/requests/auth.py,sha256=OMoJIVKyRLy9THr91y8rxysZuclwPB-K1Xg1zBomUhQ,10207
pip/_vendor/requests/certs.py,sha256=nXRVq9DtGmv_1AYbwjTu9UrgAcdJv05ZvkNeaoLOZxY,465
pip/_vendor/requests/compat.py,sha256=LQWuCR4qXk6w7-qQopXyz0WNHUdAD40k0mKnaAEf1-g,2045
pip/_vendor/requests/cookies.py,sha256=Y-bKX6TvW3FnYlE6Au0SXtVVWcaNdFvuAwQxw-G0iTI,18430
pip/_vendor/requests/exceptions.py,sha256=d9fJJw8YFBB9VzG9qhvxLuOx6be3c_Dwbck-dVUEAcs,3173
pip/_vendor/requests/help.py,sha256=SJPVcoXeo7KfK4AxJN5eFVQCjr0im87tU2n7ubLsksU,3578
pip/_vendor/requests/hooks.py,sha256=QReGyy0bRcr5rkwCuObNakbYsc7EkiKeBwG4qHekr2Q,757
pip/_vendor/requests/models.py,sha256=_tKIbrscbGvaTdX1UHCwRaiYmPF9VBIuBeydr4Qx1Tg,34287
pip/_vendor/requests/packages.py,sha256=njJmVifY4aSctuW3PP5EFRCxjEwMRDO6J_feG2dKWsI,695
pip/_vendor/requests/sessions.py,sha256=OBtwQs1vjkB1xamFdi_p5y8BVeX16BJoQcwSwx_Y3fI,29316
pip/_vendor/requests/status_codes.py,sha256=gT79Pbs_cQjBgp-fvrUgg1dn2DQO32bDj4TInjnMPSc,4188
pip/_vendor/requests/structures.py,sha256=msAtr9mq1JxHd-JRyiILfdFlpbJwvvFuP3rfUQT_QxE,3005
pip/_vendor/requests/utils.py,sha256=VBs99cvV8Z29WGXeWZqHzZ80_nu1AwwjYzJfe0wQIvs,30176
pip/_vendor/resolvelib/__init__.py,sha256=sqMOy4CbVJQiaG9bCPj0oAntGAVy-RWdPfVaC9XDIEQ,537
pip/_vendor/resolvelib/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/resolvelib/__pycache__/providers.cpython-39.pyc,,
pip/_vendor/resolvelib/__pycache__/reporters.cpython-39.pyc,,
pip/_vendor/resolvelib/__pycache__/resolvers.cpython-39.pyc,,
pip/_vendor/resolvelib/__pycache__/structs.cpython-39.pyc,,
pip/_vendor/resolvelib/compat/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/resolvelib/compat/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/resolvelib/compat/__pycache__/collections_abc.cpython-39.pyc,,
pip/_vendor/resolvelib/compat/collections_abc.py,sha256=mtTkpr3Gf3OGvU1PD8YuvrJRhVbioxV82T-niFPoX3o,127
pip/_vendor/resolvelib/providers.py,sha256=TZDCmL-Ic-R5JRIZY8G4FLG5xB2343B0DfuK7aw2Yqw,4547
pip/_vendor/resolvelib/reporters.py,sha256=ZPSJnVfK8WvXTbX8jE0Nren0-_Hg9ym4epCUPtU8Y0U,1405
pip/_vendor/resolvelib/resolvers.py,sha256=lQTGcc-2fgHbmdiLzeNDUxVmGc5ZFjkAL6JrVqnqJIw,15018
pip/_vendor/resolvelib/structs.py,sha256=yrdhd-n7DercimPGclXe20rgqhlxw8PnxC0wmcXO19Y,2016
pip/_vendor/retrying.py,sha256=k3fflf5_Mm0XcIJYhB7Tj34bqCCPhUDkYbx1NvW2FPE,9972
pip/_vendor/six.py,sha256=U4Z_yv534W5CNyjY9i8V1OXY2SjAny8y2L5vDLhhThM,34159
pip/_vendor/toml/__init__.py,sha256=rJ1pu933HgUtyeeNiusoPd5jJOPNhaKHhSSld3o8AQo,747
pip/_vendor/toml/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/toml/__pycache__/common.cpython-39.pyc,,
pip/_vendor/toml/__pycache__/decoder.cpython-39.pyc,,
pip/_vendor/toml/__pycache__/encoder.cpython-39.pyc,,
pip/_vendor/toml/__pycache__/ordered.cpython-39.pyc,,
pip/_vendor/toml/__pycache__/tz.cpython-39.pyc,,
pip/_vendor/toml/common.py,sha256=ViBccAduP6eZNJAb1POhRhjOAi56TDsNgWJ1TjgXAug,242
pip/_vendor/toml/decoder.py,sha256=atpXmyFCzNGiqhkcYLySBuJQkPeSHDzBz47sEaX1amw,38696
pip/_vendor/toml/encoder.py,sha256=fPqLyFdPAam17X9SELz2TMp9affkfHCmgWZxRKcmzhY,9955
pip/_vendor/toml/ordered.py,sha256=UWt5Eka90IWVBYdvLgY5PXnkBcVYpHjnw9T67rM85T8,378
pip/_vendor/toml/tz.py,sha256=DrAgI3wZxZiGcLuV_l8ueA_nPrYoxQ3hZA9tJSjWRsQ,618
pip/_vendor/urllib3/__init__.py,sha256=rdFZCO1L7e8861ZTvo8AiSKwxCe9SnWQUQwJ599YV9c,2683
pip/_vendor/urllib3/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/urllib3/__pycache__/_collections.cpython-39.pyc,,
pip/_vendor/urllib3/__pycache__/connection.cpython-39.pyc,,
pip/_vendor/urllib3/__pycache__/connectionpool.cpython-39.pyc,,
pip/_vendor/urllib3/__pycache__/exceptions.cpython-39.pyc,,
pip/_vendor/urllib3/__pycache__/fields.cpython-39.pyc,,
pip/_vendor/urllib3/__pycache__/filepost.cpython-39.pyc,,
pip/_vendor/urllib3/__pycache__/poolmanager.cpython-39.pyc,,
pip/_vendor/urllib3/__pycache__/request.cpython-39.pyc,,
pip/_vendor/urllib3/__pycache__/response.cpython-39.pyc,,
pip/_vendor/urllib3/_collections.py,sha256=GouVsNzwg6jADZTmimMI6oqmwKSswnMo9dh5tGNVWO4,10792
pip/_vendor/urllib3/connection.py,sha256=Fln8a_bkegdNMkFoSOwyI0PJvL1OqzVUO6ifihKOTpc,14461
pip/_vendor/urllib3/connectionpool.py,sha256=egdaX-Db_LVXifDxv3JY0dHIpQqDv0wC0_9Eeh8FkPM,35725
pip/_vendor/urllib3/contrib/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/urllib3/contrib/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/_appengine_environ.cpython-39.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/appengine.cpython-39.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/ntlmpool.cpython-39.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/pyopenssl.cpython-39.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/securetransport.cpython-39.pyc,,
pip/_vendor/urllib3/contrib/__pycache__/socks.cpython-39.pyc,,
pip/_vendor/urllib3/contrib/_appengine_environ.py,sha256=bDbyOEhW2CKLJcQqAKAyrEHN-aklsyHFKq6vF8ZFsmk,957
pip/_vendor/urllib3/contrib/_securetransport/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/urllib3/contrib/_securetransport/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/urllib3/contrib/_securetransport/__pycache__/bindings.cpython-39.pyc,,
pip/_vendor/urllib3/contrib/_securetransport/__pycache__/low_level.cpython-39.pyc,,
pip/_vendor/urllib3/contrib/_securetransport/bindings.py,sha256=mullWYFaghBdRWla6HYU-TBgFRTPLBEfxj3jplbeJmQ,16886
pip/_vendor/urllib3/contrib/_securetransport/low_level.py,sha256=V7GnujxnWZh2N2sMsV5N4d9Imymokkm3zBwgt77_bSE,11956
pip/_vendor/urllib3/contrib/appengine.py,sha256=gfdK4T7CRin7v9HRhHDbDh-Hbk66hHDWeoz7nV3PJo8,11034
pip/_vendor/urllib3/contrib/ntlmpool.py,sha256=a402AwGN_Ll3N-4ur_AS6UrU-ycUtlnYqoBF76lORg8,4160
pip/_vendor/urllib3/contrib/pyopenssl.py,sha256=9gm5kpC0ScbDCWobeCrh5LDqS8HgU8FNhmk5v8qQ5Bs,16582
pip/_vendor/urllib3/contrib/securetransport.py,sha256=vBDFjSnH2gWa-ztMKVaiwW46K1mlDZKqvo_VAonfdcY,32401
pip/_vendor/urllib3/contrib/socks.py,sha256=nzDMgDIFJWVubKHqvIn2-SKCO91hhJInP92WgHChGzA,7036
pip/_vendor/urllib3/exceptions.py,sha256=D2Jvab7M7m_n0rnmBmq481paoVT32VvVeB6VeQM0y-w,7172
pip/_vendor/urllib3/fields.py,sha256=kroD76QK-GdHHW7f_AUN4XxDC3OQPI2FFrS9eSL4BCs,8553
pip/_vendor/urllib3/filepost.py,sha256=vj0qbrpT1AFzvvW4SuC8M5kJiw7wftHcSr-7b8UpPpw,2440
pip/_vendor/urllib3/packages/__init__.py,sha256=h4BLhD4tLaBx1adaDtKXfupsgqY0wWLXb_f1_yVlV6A,108
pip/_vendor/urllib3/packages/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/urllib3/packages/__pycache__/six.cpython-39.pyc,,
pip/_vendor/urllib3/packages/backports/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
pip/_vendor/urllib3/packages/backports/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/urllib3/packages/backports/__pycache__/makefile.cpython-39.pyc,,
pip/_vendor/urllib3/packages/backports/makefile.py,sha256=005wrvH-_pWSnTFqQ2sdzzh4zVCtQUUQ4mR2Yyxwc0A,1418
pip/_vendor/urllib3/packages/six.py,sha256=adx4z-eM_D0Vvu0IIqVzFACQ_ux9l64y7DkSEfbxCDs,32536
pip/_vendor/urllib3/packages/ssl_match_hostname/__init__.py,sha256=ywgKMtfHi1-DrXlzPfVAhzsLzzqcK7GT6eLgdode1Fg,688
pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/urllib3/packages/ssl_match_hostname/__pycache__/_implementation.cpython-39.pyc,,
pip/_vendor/urllib3/packages/ssl_match_hostname/_implementation.py,sha256=rvQDQviqQLtPJB6MfEgABnBFj3nXft7ZJ3Dx-BC0AQY,5696
pip/_vendor/urllib3/poolmanager.py,sha256=iWEAIGrVNGoOmQyfiFwCqG-IyYy6GIQ-jJ9QCsX9li4,17861
pip/_vendor/urllib3/request.py,sha256=hhoHvEEatyd9Tn5EbGjQ0emn-ENMCyY591yNWTneINA,6018
pip/_vendor/urllib3/response.py,sha256=eo1Sfkn2x44FtjgP3qwwDsG9ak84spQAxEGy7Ovd4Pc,28221
pip/_vendor/urllib3/util/__init__.py,sha256=bWNaav_OT-1L7-sxm59cGb59rDORlbhb_4noduM5m0U,1038
pip/_vendor/urllib3/util/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/urllib3/util/__pycache__/connection.cpython-39.pyc,,
pip/_vendor/urllib3/util/__pycache__/queue.cpython-39.pyc,,
pip/_vendor/urllib3/util/__pycache__/request.cpython-39.pyc,,
pip/_vendor/urllib3/util/__pycache__/response.cpython-39.pyc,,
pip/_vendor/urllib3/util/__pycache__/retry.cpython-39.pyc,,
pip/_vendor/urllib3/util/__pycache__/ssl_.cpython-39.pyc,,
pip/_vendor/urllib3/util/__pycache__/timeout.cpython-39.pyc,,
pip/_vendor/urllib3/util/__pycache__/url.cpython-39.pyc,,
pip/_vendor/urllib3/util/__pycache__/wait.cpython-39.pyc,,
pip/_vendor/urllib3/util/connection.py,sha256=NsxUAKQ98GKywta--zg57CdVpeTCI6N-GElCq78Dl8U,4637
pip/_vendor/urllib3/util/queue.py,sha256=myTX3JDHntglKQNBf3b6dasHH-uF-W59vzGSQiFdAfI,497
pip/_vendor/urllib3/util/request.py,sha256=C-6-AWffxZG03AdRGoY59uqsn4CVItKU6gjxz7Hc3Mc,3815
pip/_vendor/urllib3/util/response.py,sha256=_WbTQr8xRQuJuY2rTIZxVdJD6mnEOtQupjaK_bF_Vj8,2573
pip/_vendor/urllib3/util/retry.py,sha256=3wbv7SdzYNOxPcBiFkPCubTbK1_6vWSepznOXirhUfA,15543
pip/_vendor/urllib3/util/ssl_.py,sha256=N7gqt2iqzKBsWGmc61YeKNSPri6Ns2iZ_MD5hV2y8tU,14523
pip/_vendor/urllib3/util/timeout.py,sha256=3qawUo-TZq4q7tyeRToMIOdNGEOBjOOQVq7nHnLryP4,9947
pip/_vendor/urllib3/util/url.py,sha256=S4YyAwWKJPjFFECC7l9Vp9EKqRH1XAb-uQFANn1Tak0,13981
pip/_vendor/urllib3/util/wait.py,sha256=k46KzqIYu3Vnzla5YW3EvtInNlU_QycFqQAghIOxoAg,5406
pip/_vendor/vendor.txt,sha256=bWUiaRjMJhuUsqFZHEJkBH_6lJ_Avl9cOyszcI74IHs,437
pip/_vendor/webencodings/__init__.py,sha256=qOBJIuPy_4ByYH6W_bNgJF-qYQ2DoU-dKsDu5yRWCXg,10579
pip/_vendor/webencodings/__pycache__/__init__.cpython-39.pyc,,
pip/_vendor/webencodings/__pycache__/labels.cpython-39.pyc,,
pip/_vendor/webencodings/__pycache__/mklabels.cpython-39.pyc,,
pip/_vendor/webencodings/__pycache__/tests.cpython-39.pyc,,
pip/_vendor/webencodings/__pycache__/x_user_defined.cpython-39.pyc,,
pip/_vendor/webencodings/labels.py,sha256=4AO_KxTddqGtrL9ns7kAPjb0CcN6xsCIxbK37HY9r3E,8979
pip/_vendor/webencodings/mklabels.py,sha256=GYIeywnpaLnP0GSic8LFWgd0UVvO_l1Nc6YoF-87R_4,1305
pip/_vendor/webencodings/tests.py,sha256=OtGLyjhNY1fvkW1GvLJ_FV9ZoqC9Anyjr7q3kxTbzNs,6563
pip/_vendor/webencodings/x_user_defined.py,sha256=yOqWSdmpytGfUgh_Z6JYgDNhoc-BAHyyeeT15Fr42tM,4307
| Django-locallibrary/env/Lib/site-packages/pip-20.2.3.dist-info/RECORD/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip-20.2.3.dist-info/RECORD",
"repo_id": "Django-locallibrary",
"token_count": 30902
} | 2 |
import logging
from pip._internal.cli.base_command import Command
from pip._internal.cli.status_codes import ERROR, SUCCESS
from pip._internal.operations.check import (
check_package_set,
create_package_set_from_installed,
)
from pip._internal.utils.misc import write_output
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
logger = logging.getLogger(__name__)
if MYPY_CHECK_RUNNING:
from typing import List, Any
from optparse import Values
class CheckCommand(Command):
"""Verify installed packages have compatible dependencies."""
usage = """
%prog [options]"""
def run(self, options, args):
# type: (Values, List[Any]) -> int
package_set, parsing_probs = create_package_set_from_installed()
missing, conflicting = check_package_set(package_set)
for project_name in missing:
version = package_set[project_name].version
for dependency in missing[project_name]:
write_output(
"%s %s requires %s, which is not installed.",
project_name, version, dependency[0],
)
for project_name in conflicting:
version = package_set[project_name].version
for dep_name, dep_version, req in conflicting[project_name]:
write_output(
"%s %s has requirement %s, but you have %s %s.",
project_name, version, req, dep_name, dep_version,
)
if missing or conflicting or parsing_probs:
return ERROR
else:
write_output("No broken requirements found.")
return SUCCESS
| Django-locallibrary/env/Lib/site-packages/pip/_internal/commands/check.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/commands/check.py",
"repo_id": "Django-locallibrary",
"token_count": 691
} | 3 |
"""Locations where we look for configs, install stuff, etc"""
# The following comment should be removed at some point in the future.
# mypy: strict-optional=False
from __future__ import absolute_import
import os
import os.path
import platform
import site
import sys
import sysconfig
from distutils import sysconfig as distutils_sysconfig
from distutils.command.install import SCHEME_KEYS # type: ignore
from distutils.command.install import install as distutils_install_command
from pip._internal.models.scheme import Scheme
from pip._internal.utils import appdirs
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.typing import MYPY_CHECK_RUNNING, cast
from pip._internal.utils.virtualenv import running_under_virtualenv
if MYPY_CHECK_RUNNING:
from typing import Dict, List, Optional, Union
from distutils.cmd import Command as DistutilsCommand
# Application Directories
USER_CACHE_DIR = appdirs.user_cache_dir("pip")
def get_major_minor_version():
# type: () -> str
"""
Return the major-minor version of the current Python as a string, e.g.
"3.7" or "3.10".
"""
return '{}.{}'.format(*sys.version_info)
def get_src_prefix():
# type: () -> str
if running_under_virtualenv():
src_prefix = os.path.join(sys.prefix, 'src')
else:
# FIXME: keep src in cwd for now (it is not a temporary folder)
try:
src_prefix = os.path.join(os.getcwd(), 'src')
except OSError:
# In case the current working directory has been renamed or deleted
sys.exit(
"The folder you are executing pip from can no longer be found."
)
# under macOS + virtualenv sys.prefix is not properly resolved
# it is something like /path/to/python/bin/..
return os.path.abspath(src_prefix)
# FIXME doesn't account for venv linked to global site-packages
site_packages = sysconfig.get_path("purelib") # type: Optional[str]
# This is because of a bug in PyPy's sysconfig module, see
# https://bitbucket.org/pypy/pypy/issues/2506/sysconfig-returns-incorrect-paths
# for more information.
if platform.python_implementation().lower() == "pypy":
site_packages = distutils_sysconfig.get_python_lib()
try:
# Use getusersitepackages if this is present, as it ensures that the
# value is initialised properly.
user_site = site.getusersitepackages()
except AttributeError:
user_site = site.USER_SITE
if WINDOWS:
bin_py = os.path.join(sys.prefix, 'Scripts')
bin_user = os.path.join(user_site, 'Scripts')
# buildout uses 'bin' on Windows too?
if not os.path.exists(bin_py):
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin')
else:
bin_py = os.path.join(sys.prefix, 'bin')
bin_user = os.path.join(user_site, 'bin')
# Forcing to use /usr/local/bin for standard macOS framework installs
# Also log to ~/Library/Logs/ for use with the Console.app log viewer
if sys.platform[:6] == 'darwin' and sys.prefix[:16] == '/System/Library/':
bin_py = '/usr/local/bin'
def distutils_scheme(
dist_name, user=False, home=None, root=None, isolated=False, prefix=None
):
# type:(str, bool, str, str, bool, str) -> Dict[str, str]
"""
Return a distutils install scheme
"""
from distutils.dist import Distribution
dist_args = {'name': dist_name} # type: Dict[str, Union[str, List[str]]]
if isolated:
dist_args["script_args"] = ["--no-user-cfg"]
d = Distribution(dist_args)
d.parse_config_files()
obj = None # type: Optional[DistutilsCommand]
obj = d.get_command_obj('install', create=True)
assert obj is not None
i = cast(distutils_install_command, obj)
# NOTE: setting user or home has the side-effect of creating the home dir
# or user base for installations during finalize_options()
# ideally, we'd prefer a scheme class that has no side-effects.
assert not (user and prefix), "user={} prefix={}".format(user, prefix)
assert not (home and prefix), "home={} prefix={}".format(home, prefix)
i.user = user or i.user
if user or home:
i.prefix = ""
i.prefix = prefix or i.prefix
i.home = home or i.home
i.root = root or i.root
i.finalize_options()
scheme = {}
for key in SCHEME_KEYS:
scheme[key] = getattr(i, 'install_' + key)
# install_lib specified in setup.cfg should install *everything*
# into there (i.e. it takes precedence over both purelib and
# platlib). Note, i.install_lib is *always* set after
# finalize_options(); we only want to override here if the user
# has explicitly requested it hence going back to the config
if 'install_lib' in d.get_option_dict('install'):
scheme.update(dict(purelib=i.install_lib, platlib=i.install_lib))
if running_under_virtualenv():
scheme['headers'] = os.path.join(
i.prefix,
'include',
'site',
'python{}'.format(get_major_minor_version()),
dist_name,
)
if root is not None:
path_no_drive = os.path.splitdrive(
os.path.abspath(scheme["headers"]))[1]
scheme["headers"] = os.path.join(
root,
path_no_drive[1:],
)
return scheme
def get_scheme(
dist_name, # type: str
user=False, # type: bool
home=None, # type: Optional[str]
root=None, # type: Optional[str]
isolated=False, # type: bool
prefix=None, # type: Optional[str]
):
# type: (...) -> Scheme
"""
Get the "scheme" corresponding to the input parameters. The distutils
documentation provides the context for the available schemes:
https://docs.python.org/3/install/index.html#alternate-installation
:param dist_name: the name of the package to retrieve the scheme for, used
in the headers scheme path
:param user: indicates to use the "user" scheme
:param home: indicates to use the "home" scheme and provides the base
directory for the same
:param root: root under which other directories are re-based
:param isolated: equivalent to --no-user-cfg, i.e. do not consider
~/.pydistutils.cfg (posix) or ~/pydistutils.cfg (non-posix) for
scheme paths
:param prefix: indicates to use the "prefix" scheme and provides the
base directory for the same
"""
scheme = distutils_scheme(
dist_name, user, home, root, isolated, prefix
)
return Scheme(
platlib=scheme["platlib"],
purelib=scheme["purelib"],
headers=scheme["headers"],
scripts=scheme["scripts"],
data=scheme["data"],
)
| Django-locallibrary/env/Lib/site-packages/pip/_internal/locations.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/locations.py",
"repo_id": "Django-locallibrary",
"token_count": 2550
} | 4 |
from pip._vendor.packaging.utils import canonicalize_name
from pip._internal.exceptions import CommandError
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Optional, Set, FrozenSet
class FormatControl(object):
"""Helper for managing formats from which a package can be installed.
"""
__slots__ = ["no_binary", "only_binary"]
def __init__(self, no_binary=None, only_binary=None):
# type: (Optional[Set[str]], Optional[Set[str]]) -> None
if no_binary is None:
no_binary = set()
if only_binary is None:
only_binary = set()
self.no_binary = no_binary
self.only_binary = only_binary
def __eq__(self, other):
# type: (object) -> bool
if not isinstance(other, self.__class__):
return NotImplemented
if self.__slots__ != other.__slots__:
return False
return all(
getattr(self, k) == getattr(other, k)
for k in self.__slots__
)
def __ne__(self, other):
# type: (object) -> bool
return not self.__eq__(other)
def __repr__(self):
# type: () -> str
return "{}({}, {})".format(
self.__class__.__name__,
self.no_binary,
self.only_binary
)
@staticmethod
def handle_mutual_excludes(value, target, other):
# type: (str, Set[str], Set[str]) -> None
if value.startswith('-'):
raise CommandError(
"--no-binary / --only-binary option requires 1 argument."
)
new = value.split(',')
while ':all:' in new:
other.clear()
target.clear()
target.add(':all:')
del new[:new.index(':all:') + 1]
# Without a none, we want to discard everything as :all: covers it
if ':none:' not in new:
return
for name in new:
if name == ':none:':
target.clear()
continue
name = canonicalize_name(name)
other.discard(name)
target.add(name)
def get_allowed_formats(self, canonical_name):
# type: (str) -> FrozenSet[str]
result = {"binary", "source"}
if canonical_name in self.only_binary:
result.discard('source')
elif canonical_name in self.no_binary:
result.discard('binary')
elif ':all:' in self.only_binary:
result.discard('source')
elif ':all:' in self.no_binary:
result.discard('binary')
return frozenset(result)
def disallow_binaries(self):
# type: () -> None
self.handle_mutual_excludes(
':all:', self.no_binary, self.only_binary,
)
| Django-locallibrary/env/Lib/site-packages/pip/_internal/models/format_control.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/models/format_control.py",
"repo_id": "Django-locallibrary",
"token_count": 1344
} | 5 |
"""Support for installing and building the "wheel" binary package format.
"""
from __future__ import absolute_import
import collections
import compileall
import contextlib
import csv
import importlib
import logging
import os.path
import re
import shutil
import sys
import warnings
from base64 import urlsafe_b64encode
from itertools import chain, starmap
from zipfile import ZipFile
from pip._vendor import pkg_resources
from pip._vendor.distlib.scripts import ScriptMaker
from pip._vendor.distlib.util import get_export_entry
from pip._vendor.six import (
PY2,
ensure_str,
ensure_text,
itervalues,
reraise,
text_type,
)
from pip._vendor.six.moves import filterfalse, map
from pip._internal.exceptions import InstallationError
from pip._internal.locations import get_major_minor_version
from pip._internal.models.direct_url import DIRECT_URL_METADATA_NAME, DirectUrl
from pip._internal.models.scheme import SCHEME_KEYS
from pip._internal.utils.filesystem import adjacent_tmp_file, replace
from pip._internal.utils.misc import (
captured_stdout,
ensure_dir,
hash_file,
partition,
)
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.unpacking import (
current_umask,
is_within_directory,
set_extracted_file_to_default_mode_plus_executable,
zip_item_is_executable,
)
from pip._internal.utils.wheel import (
parse_wheel,
pkg_resources_distribution_for_wheel,
)
# Use the custom cast function at runtime to make cast work,
# and import typing.cast when performing pre-commit and type
# checks
if not MYPY_CHECK_RUNNING:
from pip._internal.utils.typing import cast
else:
from email.message import Message
from typing import (
Any,
Callable,
Dict,
IO,
Iterable,
Iterator,
List,
NewType,
Optional,
Protocol,
Sequence,
Set,
Tuple,
Union,
cast,
)
from zipfile import ZipInfo
from pip._vendor.pkg_resources import Distribution
from pip._internal.models.scheme import Scheme
from pip._internal.utils.filesystem import NamedTemporaryFileResult
RecordPath = NewType('RecordPath', text_type)
InstalledCSVRow = Tuple[RecordPath, str, Union[int, str]]
class File(Protocol):
src_record_path = None # type: RecordPath
dest_path = None # type: text_type
changed = None # type: bool
def save(self):
# type: () -> None
pass
logger = logging.getLogger(__name__)
def rehash(path, blocksize=1 << 20):
# type: (text_type, int) -> Tuple[str, str]
"""Return (encoded_digest, length) for path using hashlib.sha256()"""
h, length = hash_file(path, blocksize)
digest = 'sha256=' + urlsafe_b64encode(
h.digest()
).decode('latin1').rstrip('=')
# unicode/str python2 issues
return (digest, str(length)) # type: ignore
def csv_io_kwargs(mode):
# type: (str) -> Dict[str, Any]
"""Return keyword arguments to properly open a CSV file
in the given mode.
"""
if PY2:
return {'mode': '{}b'.format(mode)}
else:
return {'mode': mode, 'newline': '', 'encoding': 'utf-8'}
def fix_script(path):
# type: (text_type) -> bool
"""Replace #!python with #!/path/to/python
Return True if file was changed.
"""
# XXX RECORD hashes will need to be updated
assert os.path.isfile(path)
with open(path, 'rb') as script:
firstline = script.readline()
if not firstline.startswith(b'#!python'):
return False
exename = sys.executable.encode(sys.getfilesystemencoding())
firstline = b'#!' + exename + os.linesep.encode("ascii")
rest = script.read()
with open(path, 'wb') as script:
script.write(firstline)
script.write(rest)
return True
def wheel_root_is_purelib(metadata):
# type: (Message) -> bool
return metadata.get("Root-Is-Purelib", "").lower() == "true"
def get_entrypoints(distribution):
# type: (Distribution) -> Tuple[Dict[str, str], Dict[str, str]]
# get the entry points and then the script names
try:
console = distribution.get_entry_map('console_scripts')
gui = distribution.get_entry_map('gui_scripts')
except KeyError:
# Our dict-based Distribution raises KeyError if entry_points.txt
# doesn't exist.
return {}, {}
def _split_ep(s):
# type: (pkg_resources.EntryPoint) -> Tuple[str, str]
"""get the string representation of EntryPoint,
remove space and split on '='
"""
split_parts = str(s).replace(" ", "").split("=")
return split_parts[0], split_parts[1]
# convert the EntryPoint objects into strings with module:function
console = dict(_split_ep(v) for v in console.values())
gui = dict(_split_ep(v) for v in gui.values())
return console, gui
def message_about_scripts_not_on_PATH(scripts):
# type: (Sequence[str]) -> Optional[str]
"""Determine if any scripts are not on PATH and format a warning.
Returns a warning message if one or more scripts are not on PATH,
otherwise None.
"""
if not scripts:
return None
# Group scripts by the path they were installed in
grouped_by_dir = collections.defaultdict(set) # type: Dict[str, Set[str]]
for destfile in scripts:
parent_dir = os.path.dirname(destfile)
script_name = os.path.basename(destfile)
grouped_by_dir[parent_dir].add(script_name)
# We don't want to warn for directories that are on PATH.
not_warn_dirs = [
os.path.normcase(i).rstrip(os.sep) for i in
os.environ.get("PATH", "").split(os.pathsep)
]
# If an executable sits with sys.executable, we don't warn for it.
# This covers the case of venv invocations without activating the venv.
not_warn_dirs.append(os.path.normcase(os.path.dirname(sys.executable)))
warn_for = {
parent_dir: scripts for parent_dir, scripts in grouped_by_dir.items()
if os.path.normcase(parent_dir) not in not_warn_dirs
} # type: Dict[str, Set[str]]
if not warn_for:
return None
# Format a message
msg_lines = []
for parent_dir, dir_scripts in warn_for.items():
sorted_scripts = sorted(dir_scripts) # type: List[str]
if len(sorted_scripts) == 1:
start_text = "script {} is".format(sorted_scripts[0])
else:
start_text = "scripts {} are".format(
", ".join(sorted_scripts[:-1]) + " and " + sorted_scripts[-1]
)
msg_lines.append(
"The {} installed in '{}' which is not on PATH."
.format(start_text, parent_dir)
)
last_line_fmt = (
"Consider adding {} to PATH or, if you prefer "
"to suppress this warning, use --no-warn-script-location."
)
if len(msg_lines) == 1:
msg_lines.append(last_line_fmt.format("this directory"))
else:
msg_lines.append(last_line_fmt.format("these directories"))
# Add a note if any directory starts with ~
warn_for_tilde = any(
i[0] == "~" for i in os.environ.get("PATH", "").split(os.pathsep) if i
)
if warn_for_tilde:
tilde_warning_msg = (
"NOTE: The current PATH contains path(s) starting with `~`, "
"which may not be expanded by all applications."
)
msg_lines.append(tilde_warning_msg)
# Returns the formatted multiline message
return "\n".join(msg_lines)
def _normalized_outrows(outrows):
# type: (Iterable[InstalledCSVRow]) -> List[Tuple[str, str, str]]
"""Normalize the given rows of a RECORD file.
Items in each row are converted into str. Rows are then sorted to make
the value more predictable for tests.
Each row is a 3-tuple (path, hash, size) and corresponds to a record of
a RECORD file (see PEP 376 and PEP 427 for details). For the rows
passed to this function, the size can be an integer as an int or string,
or the empty string.
"""
# Normally, there should only be one row per path, in which case the
# second and third elements don't come into play when sorting.
# However, in cases in the wild where a path might happen to occur twice,
# we don't want the sort operation to trigger an error (but still want
# determinism). Since the third element can be an int or string, we
# coerce each element to a string to avoid a TypeError in this case.
# For additional background, see--
# https://github.com/pypa/pip/issues/5868
return sorted(
(ensure_str(record_path, encoding='utf-8'), hash_, str(size))
for record_path, hash_, size in outrows
)
def _record_to_fs_path(record_path):
# type: (RecordPath) -> text_type
return record_path
def _fs_to_record_path(path, relative_to=None):
# type: (text_type, Optional[text_type]) -> RecordPath
if relative_to is not None:
# On Windows, do not handle relative paths if they belong to different
# logical disks
if os.path.splitdrive(path)[0].lower() == \
os.path.splitdrive(relative_to)[0].lower():
path = os.path.relpath(path, relative_to)
path = path.replace(os.path.sep, '/')
return cast('RecordPath', path)
def _parse_record_path(record_column):
# type: (str) -> RecordPath
p = ensure_text(record_column, encoding='utf-8')
return cast('RecordPath', p)
def get_csv_rows_for_installed(
old_csv_rows, # type: List[List[str]]
installed, # type: Dict[RecordPath, RecordPath]
changed, # type: Set[RecordPath]
generated, # type: List[str]
lib_dir, # type: str
):
# type: (...) -> List[InstalledCSVRow]
"""
:param installed: A map from archive RECORD path to installation RECORD
path.
"""
installed_rows = [] # type: List[InstalledCSVRow]
for row in old_csv_rows:
if len(row) > 3:
logger.warning('RECORD line has more than three elements: %s', row)
old_record_path = _parse_record_path(row[0])
new_record_path = installed.pop(old_record_path, old_record_path)
if new_record_path in changed:
digest, length = rehash(_record_to_fs_path(new_record_path))
else:
digest = row[1] if len(row) > 1 else ''
length = row[2] if len(row) > 2 else ''
installed_rows.append((new_record_path, digest, length))
for f in generated:
path = _fs_to_record_path(f, lib_dir)
digest, length = rehash(f)
installed_rows.append((path, digest, length))
for installed_record_path in itervalues(installed):
installed_rows.append((installed_record_path, '', ''))
return installed_rows
def get_console_script_specs(console):
# type: (Dict[str, str]) -> List[str]
"""
Given the mapping from entrypoint name to callable, return the relevant
console script specs.
"""
# Don't mutate caller's version
console = console.copy()
scripts_to_generate = []
# Special case pip and setuptools to generate versioned wrappers
#
# The issue is that some projects (specifically, pip and setuptools) use
# code in setup.py to create "versioned" entry points - pip2.7 on Python
# 2.7, pip3.3 on Python 3.3, etc. But these entry points are baked into
# the wheel metadata at build time, and so if the wheel is installed with
# a *different* version of Python the entry points will be wrong. The
# correct fix for this is to enhance the metadata to be able to describe
# such versioned entry points, but that won't happen till Metadata 2.0 is
# available.
# In the meantime, projects using versioned entry points will either have
# incorrect versioned entry points, or they will not be able to distribute
# "universal" wheels (i.e., they will need a wheel per Python version).
#
# Because setuptools and pip are bundled with _ensurepip and virtualenv,
# we need to use universal wheels. So, as a stopgap until Metadata 2.0, we
# override the versioned entry points in the wheel and generate the
# correct ones. This code is purely a short-term measure until Metadata 2.0
# is available.
#
# To add the level of hack in this section of code, in order to support
# ensurepip this code will look for an ``ENSUREPIP_OPTIONS`` environment
# variable which will control which version scripts get installed.
#
# ENSUREPIP_OPTIONS=altinstall
# - Only pipX.Y and easy_install-X.Y will be generated and installed
# ENSUREPIP_OPTIONS=install
# - pipX.Y, pipX, easy_install-X.Y will be generated and installed. Note
# that this option is technically if ENSUREPIP_OPTIONS is set and is
# not altinstall
# DEFAULT
# - The default behavior is to install pip, pipX, pipX.Y, easy_install
# and easy_install-X.Y.
pip_script = console.pop('pip', None)
if pip_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
scripts_to_generate.append('pip = ' + pip_script)
if os.environ.get("ENSUREPIP_OPTIONS", "") != "altinstall":
scripts_to_generate.append(
'pip{} = {}'.format(sys.version_info[0], pip_script)
)
scripts_to_generate.append(
'pip{} = {}'.format(get_major_minor_version(), pip_script)
)
# Delete any other versioned pip entry points
pip_ep = [k for k in console if re.match(r'pip(\d(\.\d)?)?$', k)]
for k in pip_ep:
del console[k]
easy_install_script = console.pop('easy_install', None)
if easy_install_script:
if "ENSUREPIP_OPTIONS" not in os.environ:
scripts_to_generate.append(
'easy_install = ' + easy_install_script
)
scripts_to_generate.append(
'easy_install-{} = {}'.format(
get_major_minor_version(), easy_install_script
)
)
# Delete any other versioned easy_install entry points
easy_install_ep = [
k for k in console if re.match(r'easy_install(-\d\.\d)?$', k)
]
for k in easy_install_ep:
del console[k]
# Generate the console entry points specified in the wheel
scripts_to_generate.extend(starmap('{} = {}'.format, console.items()))
return scripts_to_generate
class ZipBackedFile(object):
def __init__(self, src_record_path, dest_path, zip_file):
# type: (RecordPath, text_type, ZipFile) -> None
self.src_record_path = src_record_path
self.dest_path = dest_path
self._zip_file = zip_file
self.changed = False
def _getinfo(self):
# type: () -> ZipInfo
if not PY2:
return self._zip_file.getinfo(self.src_record_path)
# Python 2 does not expose a way to detect a ZIP's encoding, but the
# wheel specification (PEP 427) explicitly mandates that paths should
# use UTF-8, so we assume it is true.
return self._zip_file.getinfo(self.src_record_path.encode("utf-8"))
def save(self):
# type: () -> None
# directory creation is lazy and after file filtering
# to ensure we don't install empty dirs; empty dirs can't be
# uninstalled.
parent_dir = os.path.dirname(self.dest_path)
ensure_dir(parent_dir)
# When we open the output file below, any existing file is truncated
# before we start writing the new contents. This is fine in most
# cases, but can cause a segfault if pip has loaded a shared
# object (e.g. from pyopenssl through its vendored urllib3)
# Since the shared object is mmap'd an attempt to call a
# symbol in it will then cause a segfault. Unlinking the file
# allows writing of new contents while allowing the process to
# continue to use the old copy.
if os.path.exists(self.dest_path):
os.unlink(self.dest_path)
zipinfo = self._getinfo()
with self._zip_file.open(zipinfo) as f:
with open(self.dest_path, "wb") as dest:
shutil.copyfileobj(f, dest)
if zip_item_is_executable(zipinfo):
set_extracted_file_to_default_mode_plus_executable(self.dest_path)
class ScriptFile(object):
def __init__(self, file):
# type: (File) -> None
self._file = file
self.src_record_path = self._file.src_record_path
self.dest_path = self._file.dest_path
self.changed = False
def save(self):
# type: () -> None
self._file.save()
self.changed = fix_script(self.dest_path)
class MissingCallableSuffix(InstallationError):
def __init__(self, entry_point):
# type: (str) -> None
super(MissingCallableSuffix, self).__init__(
"Invalid script entry point: {} - A callable "
"suffix is required. Cf https://packaging.python.org/"
"specifications/entry-points/#use-for-scripts for more "
"information.".format(entry_point)
)
def _raise_for_invalid_entrypoint(specification):
# type: (str) -> None
entry = get_export_entry(specification)
if entry is not None and entry.suffix is None:
raise MissingCallableSuffix(str(entry))
class PipScriptMaker(ScriptMaker):
def make(self, specification, options=None):
# type: (str, Dict[str, Any]) -> List[str]
_raise_for_invalid_entrypoint(specification)
return super(PipScriptMaker, self).make(specification, options)
def _install_wheel(
name, # type: str
wheel_zip, # type: ZipFile
wheel_path, # type: str
scheme, # type: Scheme
pycompile=True, # type: bool
warn_script_location=True, # type: bool
direct_url=None, # type: Optional[DirectUrl]
requested=False, # type: bool
):
# type: (...) -> None
"""Install a wheel.
:param name: Name of the project to install
:param wheel_zip: open ZipFile for wheel being installed
:param scheme: Distutils scheme dictating the install directories
:param req_description: String used in place of the requirement, for
logging
:param pycompile: Whether to byte-compile installed Python files
:param warn_script_location: Whether to check that scripts are installed
into a directory on PATH
:raises UnsupportedWheel:
* when the directory holds an unpacked wheel with incompatible
Wheel-Version
* when the .dist-info dir does not match the wheel
"""
info_dir, metadata = parse_wheel(wheel_zip, name)
if wheel_root_is_purelib(metadata):
lib_dir = scheme.purelib
else:
lib_dir = scheme.platlib
# Record details of the files moved
# installed = files copied from the wheel to the destination
# changed = files changed while installing (scripts #! line typically)
# generated = files newly generated during the install (script wrappers)
installed = {} # type: Dict[RecordPath, RecordPath]
changed = set() # type: Set[RecordPath]
generated = [] # type: List[str]
def record_installed(srcfile, destfile, modified=False):
# type: (RecordPath, text_type, bool) -> None
"""Map archive RECORD paths to installation RECORD paths."""
newpath = _fs_to_record_path(destfile, lib_dir)
installed[srcfile] = newpath
if modified:
changed.add(_fs_to_record_path(destfile))
def all_paths():
# type: () -> Iterable[RecordPath]
names = wheel_zip.namelist()
# If a flag is set, names may be unicode in Python 2. We convert to
# text explicitly so these are valid for lookup in RECORD.
decoded_names = map(ensure_text, names)
for name in decoded_names:
yield cast("RecordPath", name)
def is_dir_path(path):
# type: (RecordPath) -> bool
return path.endswith("/")
def assert_no_path_traversal(dest_dir_path, target_path):
# type: (text_type, text_type) -> None
if not is_within_directory(dest_dir_path, target_path):
message = (
"The wheel {!r} has a file {!r} trying to install"
" outside the target directory {!r}"
)
raise InstallationError(
message.format(wheel_path, target_path, dest_dir_path)
)
def root_scheme_file_maker(zip_file, dest):
# type: (ZipFile, text_type) -> Callable[[RecordPath], File]
def make_root_scheme_file(record_path):
# type: (RecordPath) -> File
normed_path = os.path.normpath(record_path)
dest_path = os.path.join(dest, normed_path)
assert_no_path_traversal(dest, dest_path)
return ZipBackedFile(record_path, dest_path, zip_file)
return make_root_scheme_file
def data_scheme_file_maker(zip_file, scheme):
# type: (ZipFile, Scheme) -> Callable[[RecordPath], File]
scheme_paths = {}
for key in SCHEME_KEYS:
encoded_key = ensure_text(key)
scheme_paths[encoded_key] = ensure_text(
getattr(scheme, key), encoding=sys.getfilesystemencoding()
)
def make_data_scheme_file(record_path):
# type: (RecordPath) -> File
normed_path = os.path.normpath(record_path)
try:
_, scheme_key, dest_subpath = normed_path.split(os.path.sep, 2)
except ValueError:
message = (
"Unexpected file in {}: {!r}. .data directory contents"
" should be named like: '<scheme key>/<path>'."
).format(wheel_path, record_path)
raise InstallationError(message)
try:
scheme_path = scheme_paths[scheme_key]
except KeyError:
valid_scheme_keys = ", ".join(sorted(scheme_paths))
message = (
"Unknown scheme key used in {}: {} (for file {!r}). .data"
" directory contents should be in subdirectories named"
" with a valid scheme key ({})"
).format(
wheel_path, scheme_key, record_path, valid_scheme_keys
)
raise InstallationError(message)
dest_path = os.path.join(scheme_path, dest_subpath)
assert_no_path_traversal(scheme_path, dest_path)
return ZipBackedFile(record_path, dest_path, zip_file)
return make_data_scheme_file
def is_data_scheme_path(path):
# type: (RecordPath) -> bool
return path.split("/", 1)[0].endswith(".data")
paths = all_paths()
file_paths = filterfalse(is_dir_path, paths)
root_scheme_paths, data_scheme_paths = partition(
is_data_scheme_path, file_paths
)
make_root_scheme_file = root_scheme_file_maker(
wheel_zip,
ensure_text(lib_dir, encoding=sys.getfilesystemencoding()),
)
files = map(make_root_scheme_file, root_scheme_paths)
def is_script_scheme_path(path):
# type: (RecordPath) -> bool
parts = path.split("/", 2)
return (
len(parts) > 2 and
parts[0].endswith(".data") and
parts[1] == "scripts"
)
other_scheme_paths, script_scheme_paths = partition(
is_script_scheme_path, data_scheme_paths
)
make_data_scheme_file = data_scheme_file_maker(wheel_zip, scheme)
other_scheme_files = map(make_data_scheme_file, other_scheme_paths)
files = chain(files, other_scheme_files)
# Get the defined entry points
distribution = pkg_resources_distribution_for_wheel(
wheel_zip, name, wheel_path
)
console, gui = get_entrypoints(distribution)
def is_entrypoint_wrapper(file):
# type: (File) -> bool
# EP, EP.exe and EP-script.py are scripts generated for
# entry point EP by setuptools
path = file.dest_path
name = os.path.basename(path)
if name.lower().endswith('.exe'):
matchname = name[:-4]
elif name.lower().endswith('-script.py'):
matchname = name[:-10]
elif name.lower().endswith(".pya"):
matchname = name[:-4]
else:
matchname = name
# Ignore setuptools-generated scripts
return (matchname in console or matchname in gui)
script_scheme_files = map(make_data_scheme_file, script_scheme_paths)
script_scheme_files = filterfalse(
is_entrypoint_wrapper, script_scheme_files
)
script_scheme_files = map(ScriptFile, script_scheme_files)
files = chain(files, script_scheme_files)
for file in files:
file.save()
record_installed(file.src_record_path, file.dest_path, file.changed)
def pyc_source_file_paths():
# type: () -> Iterator[text_type]
# We de-duplicate installation paths, since there can be overlap (e.g.
# file in .data maps to same location as file in wheel root).
# Sorting installation paths makes it easier to reproduce and debug
# issues related to permissions on existing files.
for installed_path in sorted(set(installed.values())):
full_installed_path = os.path.join(lib_dir, installed_path)
if not os.path.isfile(full_installed_path):
continue
if not full_installed_path.endswith('.py'):
continue
yield full_installed_path
def pyc_output_path(path):
# type: (text_type) -> text_type
"""Return the path the pyc file would have been written to.
"""
if PY2:
if sys.flags.optimize:
return path + 'o'
else:
return path + 'c'
else:
return importlib.util.cache_from_source(path)
# Compile all of the pyc files for the installed files
if pycompile:
with captured_stdout() as stdout:
with warnings.catch_warnings():
warnings.filterwarnings('ignore')
for path in pyc_source_file_paths():
# Python 2's `compileall.compile_file` requires a str in
# error cases, so we must convert to the native type.
path_arg = ensure_str(
path, encoding=sys.getfilesystemencoding()
)
success = compileall.compile_file(
path_arg, force=True, quiet=True
)
if success:
pyc_path = pyc_output_path(path)
assert os.path.exists(pyc_path)
pyc_record_path = cast(
"RecordPath", pyc_path.replace(os.path.sep, "/")
)
record_installed(pyc_record_path, pyc_path)
logger.debug(stdout.getvalue())
maker = PipScriptMaker(None, scheme.scripts)
# Ensure old scripts are overwritten.
# See https://github.com/pypa/pip/issues/1800
maker.clobber = True
# Ensure we don't generate any variants for scripts because this is almost
# never what somebody wants.
# See https://bitbucket.org/pypa/distlib/issue/35/
maker.variants = {''}
# This is required because otherwise distlib creates scripts that are not
# executable.
# See https://bitbucket.org/pypa/distlib/issue/32/
maker.set_mode = True
# Generate the console and GUI entry points specified in the wheel
scripts_to_generate = get_console_script_specs(console)
gui_scripts_to_generate = list(starmap('{} = {}'.format, gui.items()))
generated_console_scripts = maker.make_multiple(scripts_to_generate)
generated.extend(generated_console_scripts)
generated.extend(
maker.make_multiple(gui_scripts_to_generate, {'gui': True})
)
if warn_script_location:
msg = message_about_scripts_not_on_PATH(generated_console_scripts)
if msg is not None:
logger.warning(msg)
generated_file_mode = 0o666 & ~current_umask()
@contextlib.contextmanager
def _generate_file(path, **kwargs):
# type: (str, **Any) -> Iterator[NamedTemporaryFileResult]
with adjacent_tmp_file(path, **kwargs) as f:
yield f
os.chmod(f.name, generated_file_mode)
replace(f.name, path)
dest_info_dir = os.path.join(lib_dir, info_dir)
# Record pip as the installer
installer_path = os.path.join(dest_info_dir, 'INSTALLER')
with _generate_file(installer_path) as installer_file:
installer_file.write(b'pip\n')
generated.append(installer_path)
# Record the PEP 610 direct URL reference
if direct_url is not None:
direct_url_path = os.path.join(dest_info_dir, DIRECT_URL_METADATA_NAME)
with _generate_file(direct_url_path) as direct_url_file:
direct_url_file.write(direct_url.to_json().encode("utf-8"))
generated.append(direct_url_path)
# Record the REQUESTED file
if requested:
requested_path = os.path.join(dest_info_dir, 'REQUESTED')
with open(requested_path, "w"):
pass
generated.append(requested_path)
record_text = distribution.get_metadata('RECORD')
record_rows = list(csv.reader(record_text.splitlines()))
rows = get_csv_rows_for_installed(
record_rows,
installed=installed,
changed=changed,
generated=generated,
lib_dir=lib_dir)
# Record details of all files installed
record_path = os.path.join(dest_info_dir, 'RECORD')
with _generate_file(record_path, **csv_io_kwargs('w')) as record_file:
# The type mypy infers for record_file is different for Python 3
# (typing.IO[Any]) and Python 2 (typing.BinaryIO). We explicitly
# cast to typing.IO[str] as a workaround.
writer = csv.writer(cast('IO[str]', record_file))
writer.writerows(_normalized_outrows(rows))
@contextlib.contextmanager
def req_error_context(req_description):
# type: (str) -> Iterator[None]
try:
yield
except InstallationError as e:
message = "For req: {}. {}".format(req_description, e.args[0])
reraise(
InstallationError, InstallationError(message), sys.exc_info()[2]
)
def install_wheel(
name, # type: str
wheel_path, # type: str
scheme, # type: Scheme
req_description, # type: str
pycompile=True, # type: bool
warn_script_location=True, # type: bool
direct_url=None, # type: Optional[DirectUrl]
requested=False, # type: bool
):
# type: (...) -> None
with ZipFile(wheel_path, allowZip64=True) as z:
with req_error_context(req_description):
_install_wheel(
name=name,
wheel_zip=z,
wheel_path=wheel_path,
scheme=scheme,
pycompile=pycompile,
warn_script_location=warn_script_location,
direct_url=direct_url,
requested=requested,
)
| Django-locallibrary/env/Lib/site-packages/pip/_internal/operations/install/wheel.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/operations/install/wheel.py",
"repo_id": "Django-locallibrary",
"token_count": 13040
} | 6 |
from __future__ import absolute_import
import csv
import functools
import logging
import os
import sys
import sysconfig
from pip._vendor import pkg_resources
from pip._internal.exceptions import UninstallationError
from pip._internal.locations import bin_py, bin_user
from pip._internal.utils.compat import WINDOWS, cache_from_source, uses_pycache
from pip._internal.utils.logging import indent_log
from pip._internal.utils.misc import (
FakeFile,
ask,
dist_in_usersite,
dist_is_local,
egg_link_path,
is_local,
normalize_path,
renames,
rmtree,
)
from pip._internal.utils.temp_dir import AdjacentTempDirectory, TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import (
Any, Callable, Dict, Iterable, Iterator, List, Optional, Set, Tuple,
)
from pip._vendor.pkg_resources import Distribution
logger = logging.getLogger(__name__)
def _script_names(dist, script_name, is_gui):
# type: (Distribution, str, bool) -> List[str]
"""Create the fully qualified name of the files created by
{console,gui}_scripts for the given ``dist``.
Returns the list of file names
"""
if dist_in_usersite(dist):
bin_dir = bin_user
else:
bin_dir = bin_py
exe_name = os.path.join(bin_dir, script_name)
paths_to_remove = [exe_name]
if WINDOWS:
paths_to_remove.append(exe_name + '.exe')
paths_to_remove.append(exe_name + '.exe.manifest')
if is_gui:
paths_to_remove.append(exe_name + '-script.pyw')
else:
paths_to_remove.append(exe_name + '-script.py')
return paths_to_remove
def _unique(fn):
# type: (Callable[..., Iterator[Any]]) -> Callable[..., Iterator[Any]]
@functools.wraps(fn)
def unique(*args, **kw):
# type: (Any, Any) -> Iterator[Any]
seen = set() # type: Set[Any]
for item in fn(*args, **kw):
if item not in seen:
seen.add(item)
yield item
return unique
@_unique
def uninstallation_paths(dist):
# type: (Distribution) -> Iterator[str]
"""
Yield all the uninstallation paths for dist based on RECORD-without-.py[co]
Yield paths to all the files in RECORD. For each .py file in RECORD, add
the .pyc and .pyo in the same directory.
UninstallPathSet.add() takes care of the __pycache__ .py[co].
"""
r = csv.reader(FakeFile(dist.get_metadata_lines('RECORD')))
for row in r:
path = os.path.join(dist.location, row[0])
yield path
if path.endswith('.py'):
dn, fn = os.path.split(path)
base = fn[:-3]
path = os.path.join(dn, base + '.pyc')
yield path
path = os.path.join(dn, base + '.pyo')
yield path
def compact(paths):
# type: (Iterable[str]) -> Set[str]
"""Compact a path set to contain the minimal number of paths
necessary to contain all paths in the set. If /a/path/ and
/a/path/to/a/file.txt are both in the set, leave only the
shorter path."""
sep = os.path.sep
short_paths = set() # type: Set[str]
for path in sorted(paths, key=len):
should_skip = any(
path.startswith(shortpath.rstrip("*")) and
path[len(shortpath.rstrip("*").rstrip(sep))] == sep
for shortpath in short_paths
)
if not should_skip:
short_paths.add(path)
return short_paths
def compress_for_rename(paths):
# type: (Iterable[str]) -> Set[str]
"""Returns a set containing the paths that need to be renamed.
This set may include directories when the original sequence of paths
included every file on disk.
"""
case_map = dict((os.path.normcase(p), p) for p in paths)
remaining = set(case_map)
unchecked = sorted(set(os.path.split(p)[0]
for p in case_map.values()), key=len)
wildcards = set() # type: Set[str]
def norm_join(*a):
# type: (str) -> str
return os.path.normcase(os.path.join(*a))
for root in unchecked:
if any(os.path.normcase(root).startswith(w)
for w in wildcards):
# This directory has already been handled.
continue
all_files = set() # type: Set[str]
all_subdirs = set() # type: Set[str]
for dirname, subdirs, files in os.walk(root):
all_subdirs.update(norm_join(root, dirname, d)
for d in subdirs)
all_files.update(norm_join(root, dirname, f)
for f in files)
# If all the files we found are in our remaining set of files to
# remove, then remove them from the latter set and add a wildcard
# for the directory.
if not (all_files - remaining):
remaining.difference_update(all_files)
wildcards.add(root + os.sep)
return set(map(case_map.__getitem__, remaining)) | wildcards
def compress_for_output_listing(paths):
# type: (Iterable[str]) -> Tuple[Set[str], Set[str]]
"""Returns a tuple of 2 sets of which paths to display to user
The first set contains paths that would be deleted. Files of a package
are not added and the top-level directory of the package has a '*' added
at the end - to signify that all it's contents are removed.
The second set contains files that would have been skipped in the above
folders.
"""
will_remove = set(paths)
will_skip = set()
# Determine folders and files
folders = set()
files = set()
for path in will_remove:
if path.endswith(".pyc"):
continue
if path.endswith("__init__.py") or ".dist-info" in path:
folders.add(os.path.dirname(path))
files.add(path)
# probably this one https://github.com/python/mypy/issues/390
_normcased_files = set(map(os.path.normcase, files)) # type: ignore
folders = compact(folders)
# This walks the tree using os.walk to not miss extra folders
# that might get added.
for folder in folders:
for dirpath, _, dirfiles in os.walk(folder):
for fname in dirfiles:
if fname.endswith(".pyc"):
continue
file_ = os.path.join(dirpath, fname)
if (os.path.isfile(file_) and
os.path.normcase(file_) not in _normcased_files):
# We are skipping this file. Add it to the set.
will_skip.add(file_)
will_remove = files | {
os.path.join(folder, "*") for folder in folders
}
return will_remove, will_skip
class StashedUninstallPathSet(object):
"""A set of file rename operations to stash files while
tentatively uninstalling them."""
def __init__(self):
# type: () -> None
# Mapping from source file root to [Adjacent]TempDirectory
# for files under that directory.
self._save_dirs = {} # type: Dict[str, TempDirectory]
# (old path, new path) tuples for each move that may need
# to be undone.
self._moves = [] # type: List[Tuple[str, str]]
def _get_directory_stash(self, path):
# type: (str) -> str
"""Stashes a directory.
Directories are stashed adjacent to their original location if
possible, or else moved/copied into the user's temp dir."""
try:
save_dir = AdjacentTempDirectory(path) # type: TempDirectory
except OSError:
save_dir = TempDirectory(kind="uninstall")
self._save_dirs[os.path.normcase(path)] = save_dir
return save_dir.path
def _get_file_stash(self, path):
# type: (str) -> str
"""Stashes a file.
If no root has been provided, one will be created for the directory
in the user's temp directory."""
path = os.path.normcase(path)
head, old_head = os.path.dirname(path), None
save_dir = None
while head != old_head:
try:
save_dir = self._save_dirs[head]
break
except KeyError:
pass
head, old_head = os.path.dirname(head), head
else:
# Did not find any suitable root
head = os.path.dirname(path)
save_dir = TempDirectory(kind='uninstall')
self._save_dirs[head] = save_dir
relpath = os.path.relpath(path, head)
if relpath and relpath != os.path.curdir:
return os.path.join(save_dir.path, relpath)
return save_dir.path
def stash(self, path):
# type: (str) -> str
"""Stashes the directory or file and returns its new location.
Handle symlinks as files to avoid modifying the symlink targets.
"""
path_is_dir = os.path.isdir(path) and not os.path.islink(path)
if path_is_dir:
new_path = self._get_directory_stash(path)
else:
new_path = self._get_file_stash(path)
self._moves.append((path, new_path))
if (path_is_dir and os.path.isdir(new_path)):
# If we're moving a directory, we need to
# remove the destination first or else it will be
# moved to inside the existing directory.
# We just created new_path ourselves, so it will
# be removable.
os.rmdir(new_path)
renames(path, new_path)
return new_path
def commit(self):
# type: () -> None
"""Commits the uninstall by removing stashed files."""
for _, save_dir in self._save_dirs.items():
save_dir.cleanup()
self._moves = []
self._save_dirs = {}
def rollback(self):
# type: () -> None
"""Undoes the uninstall by moving stashed files back."""
for p in self._moves:
logger.info("Moving to %s\n from %s", *p)
for new_path, path in self._moves:
try:
logger.debug('Replacing %s from %s', new_path, path)
if os.path.isfile(new_path) or os.path.islink(new_path):
os.unlink(new_path)
elif os.path.isdir(new_path):
rmtree(new_path)
renames(path, new_path)
except OSError as ex:
logger.error("Failed to restore %s", new_path)
logger.debug("Exception: %s", ex)
self.commit()
@property
def can_rollback(self):
# type: () -> bool
return bool(self._moves)
class UninstallPathSet(object):
"""A set of file paths to be removed in the uninstallation of a
requirement."""
def __init__(self, dist):
# type: (Distribution) -> None
self.paths = set() # type: Set[str]
self._refuse = set() # type: Set[str]
self.pth = {} # type: Dict[str, UninstallPthEntries]
self.dist = dist
self._moved_paths = StashedUninstallPathSet()
def _permitted(self, path):
# type: (str) -> bool
"""
Return True if the given path is one we are permitted to
remove/modify, False otherwise.
"""
return is_local(path)
def add(self, path):
# type: (str) -> None
head, tail = os.path.split(path)
# we normalize the head to resolve parent directory symlinks, but not
# the tail, since we only want to uninstall symlinks, not their targets
path = os.path.join(normalize_path(head), os.path.normcase(tail))
if not os.path.exists(path):
return
if self._permitted(path):
self.paths.add(path)
else:
self._refuse.add(path)
# __pycache__ files can show up after 'installed-files.txt' is created,
# due to imports
if os.path.splitext(path)[1] == '.py' and uses_pycache:
self.add(cache_from_source(path))
def add_pth(self, pth_file, entry):
# type: (str, str) -> None
pth_file = normalize_path(pth_file)
if self._permitted(pth_file):
if pth_file not in self.pth:
self.pth[pth_file] = UninstallPthEntries(pth_file)
self.pth[pth_file].add(entry)
else:
self._refuse.add(pth_file)
def remove(self, auto_confirm=False, verbose=False):
# type: (bool, bool) -> None
"""Remove paths in ``self.paths`` with confirmation (unless
``auto_confirm`` is True)."""
if not self.paths:
logger.info(
"Can't uninstall '%s'. No files were found to uninstall.",
self.dist.project_name,
)
return
dist_name_version = (
self.dist.project_name + "-" + self.dist.version
)
logger.info('Uninstalling %s:', dist_name_version)
with indent_log():
if auto_confirm or self._allowed_to_proceed(verbose):
moved = self._moved_paths
for_rename = compress_for_rename(self.paths)
for path in sorted(compact(for_rename)):
moved.stash(path)
logger.debug('Removing file or directory %s', path)
for pth in self.pth.values():
pth.remove()
logger.info('Successfully uninstalled %s', dist_name_version)
def _allowed_to_proceed(self, verbose):
# type: (bool) -> bool
"""Display which files would be deleted and prompt for confirmation
"""
def _display(msg, paths):
# type: (str, Iterable[str]) -> None
if not paths:
return
logger.info(msg)
with indent_log():
for path in sorted(compact(paths)):
logger.info(path)
if not verbose:
will_remove, will_skip = compress_for_output_listing(self.paths)
else:
# In verbose mode, display all the files that are going to be
# deleted.
will_remove = set(self.paths)
will_skip = set()
_display('Would remove:', will_remove)
_display('Would not remove (might be manually added):', will_skip)
_display('Would not remove (outside of prefix):', self._refuse)
if verbose:
_display('Will actually move:', compress_for_rename(self.paths))
return ask('Proceed (y/n)? ', ('y', 'n')) == 'y'
def rollback(self):
# type: () -> None
"""Rollback the changes previously made by remove()."""
if not self._moved_paths.can_rollback:
logger.error(
"Can't roll back %s; was not uninstalled",
self.dist.project_name,
)
return
logger.info('Rolling back uninstall of %s', self.dist.project_name)
self._moved_paths.rollback()
for pth in self.pth.values():
pth.rollback()
def commit(self):
# type: () -> None
"""Remove temporary save dir: rollback will no longer be possible."""
self._moved_paths.commit()
@classmethod
def from_dist(cls, dist):
# type: (Distribution) -> UninstallPathSet
dist_path = normalize_path(dist.location)
if not dist_is_local(dist):
logger.info(
"Not uninstalling %s at %s, outside environment %s",
dist.key,
dist_path,
sys.prefix,
)
return cls(dist)
if dist_path in {p for p in {sysconfig.get_path("stdlib"),
sysconfig.get_path("platstdlib")}
if p}:
logger.info(
"Not uninstalling %s at %s, as it is in the standard library.",
dist.key,
dist_path,
)
return cls(dist)
paths_to_remove = cls(dist)
develop_egg_link = egg_link_path(dist)
develop_egg_link_egg_info = '{}.egg-info'.format(
pkg_resources.to_filename(dist.project_name))
egg_info_exists = dist.egg_info and os.path.exists(dist.egg_info)
# Special case for distutils installed package
distutils_egg_info = getattr(dist._provider, 'path', None)
# Uninstall cases order do matter as in the case of 2 installs of the
# same package, pip needs to uninstall the currently detected version
if (egg_info_exists and dist.egg_info.endswith('.egg-info') and
not dist.egg_info.endswith(develop_egg_link_egg_info)):
# if dist.egg_info.endswith(develop_egg_link_egg_info), we
# are in fact in the develop_egg_link case
paths_to_remove.add(dist.egg_info)
if dist.has_metadata('installed-files.txt'):
for installed_file in dist.get_metadata(
'installed-files.txt').splitlines():
path = os.path.normpath(
os.path.join(dist.egg_info, installed_file)
)
paths_to_remove.add(path)
# FIXME: need a test for this elif block
# occurs with --single-version-externally-managed/--record outside
# of pip
elif dist.has_metadata('top_level.txt'):
if dist.has_metadata('namespace_packages.txt'):
namespaces = dist.get_metadata('namespace_packages.txt')
else:
namespaces = []
for top_level_pkg in [
p for p
in dist.get_metadata('top_level.txt').splitlines()
if p and p not in namespaces]:
path = os.path.join(dist.location, top_level_pkg)
paths_to_remove.add(path)
paths_to_remove.add(path + '.py')
paths_to_remove.add(path + '.pyc')
paths_to_remove.add(path + '.pyo')
elif distutils_egg_info:
raise UninstallationError(
"Cannot uninstall {!r}. It is a distutils installed project "
"and thus we cannot accurately determine which files belong "
"to it which would lead to only a partial uninstall.".format(
dist.project_name,
)
)
elif dist.location.endswith('.egg'):
# package installed by easy_install
# We cannot match on dist.egg_name because it can slightly vary
# i.e. setuptools-0.6c11-py2.6.egg vs setuptools-0.6rc11-py2.6.egg
paths_to_remove.add(dist.location)
easy_install_egg = os.path.split(dist.location)[1]
easy_install_pth = os.path.join(os.path.dirname(dist.location),
'easy-install.pth')
paths_to_remove.add_pth(easy_install_pth, './' + easy_install_egg)
elif egg_info_exists and dist.egg_info.endswith('.dist-info'):
for path in uninstallation_paths(dist):
paths_to_remove.add(path)
elif develop_egg_link:
# develop egg
with open(develop_egg_link, 'r') as fh:
link_pointer = os.path.normcase(fh.readline().strip())
assert (link_pointer == dist.location), (
'Egg-link {} does not match installed location of {} '
'(at {})'.format(
link_pointer, dist.project_name, dist.location)
)
paths_to_remove.add(develop_egg_link)
easy_install_pth = os.path.join(os.path.dirname(develop_egg_link),
'easy-install.pth')
paths_to_remove.add_pth(easy_install_pth, dist.location)
else:
logger.debug(
'Not sure how to uninstall: %s - Check: %s',
dist, dist.location,
)
# find distutils scripts= scripts
if dist.has_metadata('scripts') and dist.metadata_isdir('scripts'):
for script in dist.metadata_listdir('scripts'):
if dist_in_usersite(dist):
bin_dir = bin_user
else:
bin_dir = bin_py
paths_to_remove.add(os.path.join(bin_dir, script))
if WINDOWS:
paths_to_remove.add(os.path.join(bin_dir, script) + '.bat')
# find console_scripts
_scripts_to_remove = []
console_scripts = dist.get_entry_map(group='console_scripts')
for name in console_scripts.keys():
_scripts_to_remove.extend(_script_names(dist, name, False))
# find gui_scripts
gui_scripts = dist.get_entry_map(group='gui_scripts')
for name in gui_scripts.keys():
_scripts_to_remove.extend(_script_names(dist, name, True))
for s in _scripts_to_remove:
paths_to_remove.add(s)
return paths_to_remove
class UninstallPthEntries(object):
def __init__(self, pth_file):
# type: (str) -> None
self.file = pth_file
self.entries = set() # type: Set[str]
self._saved_lines = None # type: Optional[List[bytes]]
def add(self, entry):
# type: (str) -> None
entry = os.path.normcase(entry)
# On Windows, os.path.normcase converts the entry to use
# backslashes. This is correct for entries that describe absolute
# paths outside of site-packages, but all the others use forward
# slashes.
# os.path.splitdrive is used instead of os.path.isabs because isabs
# treats non-absolute paths with drive letter markings like c:foo\bar
# as absolute paths. It also does not recognize UNC paths if they don't
# have more than "\\sever\share". Valid examples: "\\server\share\" or
# "\\server\share\folder". Python 2.7.8+ support UNC in splitdrive.
if WINDOWS and not os.path.splitdrive(entry)[0]:
entry = entry.replace('\\', '/')
self.entries.add(entry)
def remove(self):
# type: () -> None
logger.debug('Removing pth entries from %s:', self.file)
# If the file doesn't exist, log a warning and return
if not os.path.isfile(self.file):
logger.warning(
"Cannot remove entries from nonexistent file %s", self.file
)
return
with open(self.file, 'rb') as fh:
# windows uses '\r\n' with py3k, but uses '\n' with py2.x
lines = fh.readlines()
self._saved_lines = lines
if any(b'\r\n' in line for line in lines):
endline = '\r\n'
else:
endline = '\n'
# handle missing trailing newline
if lines and not lines[-1].endswith(endline.encode("utf-8")):
lines[-1] = lines[-1] + endline.encode("utf-8")
for entry in self.entries:
try:
logger.debug('Removing entry: %s', entry)
lines.remove((entry + endline).encode("utf-8"))
except ValueError:
pass
with open(self.file, 'wb') as fh:
fh.writelines(lines)
def rollback(self):
# type: () -> bool
if self._saved_lines is None:
logger.error(
'Cannot roll back changes to %s, none were made', self.file
)
return False
logger.debug('Rolling %s back to previous state', self.file)
with open(self.file, 'wb') as fh:
fh.writelines(self._saved_lines)
return True
| Django-locallibrary/env/Lib/site-packages/pip/_internal/req/req_uninstall.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/req/req_uninstall.py",
"repo_id": "Django-locallibrary",
"token_count": 11141
} | 7 |
"""Filetype information.
"""
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Tuple
WHEEL_EXTENSION = '.whl'
BZ2_EXTENSIONS = ('.tar.bz2', '.tbz') # type: Tuple[str, ...]
XZ_EXTENSIONS = ('.tar.xz', '.txz', '.tlz',
'.tar.lz', '.tar.lzma') # type: Tuple[str, ...]
ZIP_EXTENSIONS = ('.zip', WHEEL_EXTENSION) # type: Tuple[str, ...]
TAR_EXTENSIONS = ('.tar.gz', '.tgz', '.tar') # type: Tuple[str, ...]
ARCHIVE_EXTENSIONS = (
ZIP_EXTENSIONS + BZ2_EXTENSIONS + TAR_EXTENSIONS + XZ_EXTENSIONS
)
| Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/filetypes.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/filetypes.py",
"repo_id": "Django-locallibrary",
"token_count": 266
} | 8 |
from __future__ import absolute_import
import io
import logging
import os
import re
import site
import sys
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import List, Optional
logger = logging.getLogger(__name__)
_INCLUDE_SYSTEM_SITE_PACKAGES_REGEX = re.compile(
r"include-system-site-packages\s*=\s*(?P<value>true|false)"
)
def _running_under_venv():
# type: () -> bool
"""Checks if sys.base_prefix and sys.prefix match.
This handles PEP 405 compliant virtual environments.
"""
return sys.prefix != getattr(sys, "base_prefix", sys.prefix)
def _running_under_regular_virtualenv():
# type: () -> bool
"""Checks if sys.real_prefix is set.
This handles virtual environments created with pypa's virtualenv.
"""
# pypa/virtualenv case
return hasattr(sys, 'real_prefix')
def running_under_virtualenv():
# type: () -> bool
"""Return True if we're running inside a virtualenv, False otherwise.
"""
return _running_under_venv() or _running_under_regular_virtualenv()
def _get_pyvenv_cfg_lines():
# type: () -> Optional[List[str]]
"""Reads {sys.prefix}/pyvenv.cfg and returns its contents as list of lines
Returns None, if it could not read/access the file.
"""
pyvenv_cfg_file = os.path.join(sys.prefix, 'pyvenv.cfg')
try:
# Although PEP 405 does not specify, the built-in venv module always
# writes with UTF-8. (pypa/pip#8717)
with io.open(pyvenv_cfg_file, encoding='utf-8') as f:
return f.read().splitlines() # avoids trailing newlines
except IOError:
return None
def _no_global_under_venv():
# type: () -> bool
"""Check `{sys.prefix}/pyvenv.cfg` for system site-packages inclusion
PEP 405 specifies that when system site-packages are not supposed to be
visible from a virtual environment, `pyvenv.cfg` must contain the following
line:
include-system-site-packages = false
Additionally, log a warning if accessing the file fails.
"""
cfg_lines = _get_pyvenv_cfg_lines()
if cfg_lines is None:
# We're not in a "sane" venv, so assume there is no system
# site-packages access (since that's PEP 405's default state).
logger.warning(
"Could not access 'pyvenv.cfg' despite a virtual environment "
"being active. Assuming global site-packages is not accessible "
"in this environment."
)
return True
for line in cfg_lines:
match = _INCLUDE_SYSTEM_SITE_PACKAGES_REGEX.match(line)
if match is not None and match.group('value') == 'false':
return True
return False
def _no_global_under_regular_virtualenv():
# type: () -> bool
"""Check if "no-global-site-packages.txt" exists beside site.py
This mirrors logic in pypa/virtualenv for determining whether system
site-packages are visible in the virtual environment.
"""
site_mod_dir = os.path.dirname(os.path.abspath(site.__file__))
no_global_site_packages_file = os.path.join(
site_mod_dir, 'no-global-site-packages.txt',
)
return os.path.exists(no_global_site_packages_file)
def virtualenv_no_global():
# type: () -> bool
"""Returns a boolean, whether running in venv with no system site-packages.
"""
# PEP 405 compliance needs to be checked first since virtualenv >=20 would
# return True for both checks, but is only able to use the PEP 405 config.
if _running_under_venv():
return _no_global_under_venv()
if _running_under_regular_virtualenv():
return _no_global_under_regular_virtualenv()
return False
| Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/virtualenv.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/virtualenv.py",
"repo_id": "Django-locallibrary",
"token_count": 1373
} | 9 |
import calendar
import time
from email.utils import formatdate, parsedate, parsedate_tz
from datetime import datetime, timedelta
TIME_FMT = "%a, %d %b %Y %H:%M:%S GMT"
def expire_after(delta, date=None):
date = date or datetime.utcnow()
return date + delta
def datetime_to_header(dt):
return formatdate(calendar.timegm(dt.timetuple()))
class BaseHeuristic(object):
def warning(self, response):
"""
Return a valid 1xx warning header value describing the cache
adjustments.
The response is provided too allow warnings like 113
http://tools.ietf.org/html/rfc7234#section-5.5.4 where we need
to explicitly say response is over 24 hours old.
"""
return '110 - "Response is Stale"'
def update_headers(self, response):
"""Update the response headers with any new headers.
NOTE: This SHOULD always include some Warning header to
signify that the response was cached by the client, not
by way of the provided headers.
"""
return {}
def apply(self, response):
updated_headers = self.update_headers(response)
if updated_headers:
response.headers.update(updated_headers)
warning_header_value = self.warning(response)
if warning_header_value is not None:
response.headers.update({"Warning": warning_header_value})
return response
class OneDayCache(BaseHeuristic):
"""
Cache the response by providing an expires 1 day in the
future.
"""
def update_headers(self, response):
headers = {}
if "expires" not in response.headers:
date = parsedate(response.headers["date"])
expires = expire_after(timedelta(days=1), date=datetime(*date[:6]))
headers["expires"] = datetime_to_header(expires)
headers["cache-control"] = "public"
return headers
class ExpiresAfter(BaseHeuristic):
"""
Cache **all** requests for a defined time period.
"""
def __init__(self, **kw):
self.delta = timedelta(**kw)
def update_headers(self, response):
expires = expire_after(self.delta)
return {"expires": datetime_to_header(expires), "cache-control": "public"}
def warning(self, response):
tmpl = "110 - Automatically cached for %s. Response might be stale"
return tmpl % self.delta
class LastModified(BaseHeuristic):
"""
If there is no Expires header already, fall back on Last-Modified
using the heuristic from
http://tools.ietf.org/html/rfc7234#section-4.2.2
to calculate a reasonable value.
Firefox also does something like this per
https://developer.mozilla.org/en-US/docs/Web/HTTP/Caching_FAQ
http://lxr.mozilla.org/mozilla-release/source/netwerk/protocol/http/nsHttpResponseHead.cpp#397
Unlike mozilla we limit this to 24-hr.
"""
cacheable_by_default_statuses = {
200, 203, 204, 206, 300, 301, 404, 405, 410, 414, 501
}
def update_headers(self, resp):
headers = resp.headers
if "expires" in headers:
return {}
if "cache-control" in headers and headers["cache-control"] != "public":
return {}
if resp.status not in self.cacheable_by_default_statuses:
return {}
if "date" not in headers or "last-modified" not in headers:
return {}
date = calendar.timegm(parsedate_tz(headers["date"]))
last_modified = parsedate(headers["last-modified"])
if date is None or last_modified is None:
return {}
now = time.time()
current_age = max(0, now - date)
delta = date - calendar.timegm(last_modified)
freshness_lifetime = max(0, min(delta / 10, 24 * 3600))
if freshness_lifetime <= current_age:
return {}
expires = date + freshness_lifetime
return {"expires": time.strftime(TIME_FMT, time.gmtime(expires))}
def warning(self, resp):
return None
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/cachecontrol/heuristics.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/cachecontrol/heuristics.py",
"repo_id": "Django-locallibrary",
"token_count": 1619
} | 10 |
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .enums import MachineState
HZ_CLS = (
1,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,0,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,4,0,5,2,0, # 78 - 7f
1,1,1,1,1,1,1,1, # 80 - 87
1,1,1,1,1,1,1,1, # 88 - 8f
1,1,1,1,1,1,1,1, # 90 - 97
1,1,1,1,1,1,1,1, # 98 - 9f
1,1,1,1,1,1,1,1, # a0 - a7
1,1,1,1,1,1,1,1, # a8 - af
1,1,1,1,1,1,1,1, # b0 - b7
1,1,1,1,1,1,1,1, # b8 - bf
1,1,1,1,1,1,1,1, # c0 - c7
1,1,1,1,1,1,1,1, # c8 - cf
1,1,1,1,1,1,1,1, # d0 - d7
1,1,1,1,1,1,1,1, # d8 - df
1,1,1,1,1,1,1,1, # e0 - e7
1,1,1,1,1,1,1,1, # e8 - ef
1,1,1,1,1,1,1,1, # f0 - f7
1,1,1,1,1,1,1,1, # f8 - ff
)
HZ_ST = (
MachineState.START,MachineState.ERROR, 3,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.START,MachineState.START, 4,MachineState.ERROR,# 10-17
5,MachineState.ERROR, 6,MachineState.ERROR, 5, 5, 4,MachineState.ERROR,# 18-1f
4,MachineState.ERROR, 4, 4, 4,MachineState.ERROR, 4,MachineState.ERROR,# 20-27
4,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 28-2f
)
HZ_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
HZ_SM_MODEL = {'class_table': HZ_CLS,
'class_factor': 6,
'state_table': HZ_ST,
'char_len_table': HZ_CHAR_LEN_TABLE,
'name': "HZ-GB-2312",
'language': 'Chinese'}
ISO2022CN_CLS = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,0,0,0,0, # 20 - 27
0,3,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,4,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022CN_ST = (
MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,# 18-1f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 20-27
5, 6,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 28-2f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 30-37
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,# 38-3f
)
ISO2022CN_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022CN_SM_MODEL = {'class_table': ISO2022CN_CLS,
'class_factor': 9,
'state_table': ISO2022CN_ST,
'char_len_table': ISO2022CN_CHAR_LEN_TABLE,
'name': "ISO-2022-CN",
'language': 'Chinese'}
ISO2022JP_CLS = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,2,2, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,7,0,0,0, # 20 - 27
3,0,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
6,0,4,0,8,0,0,0, # 40 - 47
0,9,5,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022JP_ST = (
MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 00-07
MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 08-0f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 10-17
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,# 18-1f
MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 20-27
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 6,MachineState.ITS_ME,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,# 28-2f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,# 30-37
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 38-3f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ERROR,MachineState.START,MachineState.START,# 40-47
)
ISO2022JP_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0, 0, 0, 0, 0)
ISO2022JP_SM_MODEL = {'class_table': ISO2022JP_CLS,
'class_factor': 10,
'state_table': ISO2022JP_ST,
'char_len_table': ISO2022JP_CHAR_LEN_TABLE,
'name': "ISO-2022-JP",
'language': 'Japanese'}
ISO2022KR_CLS = (
2,0,0,0,0,0,0,0, # 00 - 07
0,0,0,0,0,0,0,0, # 08 - 0f
0,0,0,0,0,0,0,0, # 10 - 17
0,0,0,1,0,0,0,0, # 18 - 1f
0,0,0,0,3,0,0,0, # 20 - 27
0,4,0,0,0,0,0,0, # 28 - 2f
0,0,0,0,0,0,0,0, # 30 - 37
0,0,0,0,0,0,0,0, # 38 - 3f
0,0,0,5,0,0,0,0, # 40 - 47
0,0,0,0,0,0,0,0, # 48 - 4f
0,0,0,0,0,0,0,0, # 50 - 57
0,0,0,0,0,0,0,0, # 58 - 5f
0,0,0,0,0,0,0,0, # 60 - 67
0,0,0,0,0,0,0,0, # 68 - 6f
0,0,0,0,0,0,0,0, # 70 - 77
0,0,0,0,0,0,0,0, # 78 - 7f
2,2,2,2,2,2,2,2, # 80 - 87
2,2,2,2,2,2,2,2, # 88 - 8f
2,2,2,2,2,2,2,2, # 90 - 97
2,2,2,2,2,2,2,2, # 98 - 9f
2,2,2,2,2,2,2,2, # a0 - a7
2,2,2,2,2,2,2,2, # a8 - af
2,2,2,2,2,2,2,2, # b0 - b7
2,2,2,2,2,2,2,2, # b8 - bf
2,2,2,2,2,2,2,2, # c0 - c7
2,2,2,2,2,2,2,2, # c8 - cf
2,2,2,2,2,2,2,2, # d0 - d7
2,2,2,2,2,2,2,2, # d8 - df
2,2,2,2,2,2,2,2, # e0 - e7
2,2,2,2,2,2,2,2, # e8 - ef
2,2,2,2,2,2,2,2, # f0 - f7
2,2,2,2,2,2,2,2, # f8 - ff
)
ISO2022KR_ST = (
MachineState.START, 3,MachineState.ERROR,MachineState.START,MachineState.START,MachineState.START,MachineState.ERROR,MachineState.ERROR,# 00-07
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ITS_ME,# 08-0f
MachineState.ITS_ME,MachineState.ITS_ME,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 4,MachineState.ERROR,MachineState.ERROR,# 10-17
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR, 5,MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,# 18-1f
MachineState.ERROR,MachineState.ERROR,MachineState.ERROR,MachineState.ITS_ME,MachineState.START,MachineState.START,MachineState.START,MachineState.START,# 20-27
)
ISO2022KR_CHAR_LEN_TABLE = (0, 0, 0, 0, 0, 0)
ISO2022KR_SM_MODEL = {'class_table': ISO2022KR_CLS,
'class_factor': 6,
'state_table': ISO2022KR_ST,
'char_len_table': ISO2022KR_CHAR_LEN_TABLE,
'name': "ISO-2022-KR",
'language': 'Korean'}
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/escsm.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/escsm.py",
"repo_id": "Django-locallibrary",
"token_count": 5595
} | 11 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2012 The Python Software Foundation.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Access to Python's configuration information."""
import codecs
import os
import re
import sys
from os.path import pardir, realpath
try:
import configparser
except ImportError:
import ConfigParser as configparser
__all__ = [
'get_config_h_filename',
'get_config_var',
'get_config_vars',
'get_makefile_filename',
'get_path',
'get_path_names',
'get_paths',
'get_platform',
'get_python_version',
'get_scheme_names',
'parse_config_h',
]
def _safe_realpath(path):
try:
return realpath(path)
except OSError:
return path
if sys.executable:
_PROJECT_BASE = os.path.dirname(_safe_realpath(sys.executable))
else:
# sys.executable can be empty if argv[0] has been changed and Python is
# unable to retrieve the real program name
_PROJECT_BASE = _safe_realpath(os.getcwd())
if os.name == "nt" and "pcbuild" in _PROJECT_BASE[-8:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir))
# PC/VS7.1
if os.name == "nt" and "\\pc\\v" in _PROJECT_BASE[-10:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
# PC/AMD64
if os.name == "nt" and "\\pcbuild\\amd64" in _PROJECT_BASE[-14:].lower():
_PROJECT_BASE = _safe_realpath(os.path.join(_PROJECT_BASE, pardir, pardir))
def is_python_build():
for fn in ("Setup.dist", "Setup.local"):
if os.path.isfile(os.path.join(_PROJECT_BASE, "Modules", fn)):
return True
return False
_PYTHON_BUILD = is_python_build()
_cfg_read = False
def _ensure_cfg_read():
global _cfg_read
if not _cfg_read:
from ..resources import finder
backport_package = __name__.rsplit('.', 1)[0]
_finder = finder(backport_package)
_cfgfile = _finder.find('sysconfig.cfg')
assert _cfgfile, 'sysconfig.cfg exists'
with _cfgfile.as_stream() as s:
_SCHEMES.readfp(s)
if _PYTHON_BUILD:
for scheme in ('posix_prefix', 'posix_home'):
_SCHEMES.set(scheme, 'include', '{srcdir}/Include')
_SCHEMES.set(scheme, 'platinclude', '{projectbase}/.')
_cfg_read = True
_SCHEMES = configparser.RawConfigParser()
_VAR_REPL = re.compile(r'\{([^{]*?)\}')
def _expand_globals(config):
_ensure_cfg_read()
if config.has_section('globals'):
globals = config.items('globals')
else:
globals = tuple()
sections = config.sections()
for section in sections:
if section == 'globals':
continue
for option, value in globals:
if config.has_option(section, option):
continue
config.set(section, option, value)
config.remove_section('globals')
# now expanding local variables defined in the cfg file
#
for section in config.sections():
variables = dict(config.items(section))
def _replacer(matchobj):
name = matchobj.group(1)
if name in variables:
return variables[name]
return matchobj.group(0)
for option, value in config.items(section):
config.set(section, option, _VAR_REPL.sub(_replacer, value))
#_expand_globals(_SCHEMES)
_PY_VERSION = '%s.%s.%s' % sys.version_info[:3]
_PY_VERSION_SHORT = '%s.%s' % sys.version_info[:2]
_PY_VERSION_SHORT_NO_DOT = '%s%s' % sys.version_info[:2]
_PREFIX = os.path.normpath(sys.prefix)
_EXEC_PREFIX = os.path.normpath(sys.exec_prefix)
_CONFIG_VARS = None
_USER_BASE = None
def _subst_vars(path, local_vars):
"""In the string `path`, replace tokens like {some.thing} with the
corresponding value from the map `local_vars`.
If there is no corresponding value, leave the token unchanged.
"""
def _replacer(matchobj):
name = matchobj.group(1)
if name in local_vars:
return local_vars[name]
elif name in os.environ:
return os.environ[name]
return matchobj.group(0)
return _VAR_REPL.sub(_replacer, path)
def _extend_dict(target_dict, other_dict):
target_keys = target_dict.keys()
for key, value in other_dict.items():
if key in target_keys:
continue
target_dict[key] = value
def _expand_vars(scheme, vars):
res = {}
if vars is None:
vars = {}
_extend_dict(vars, get_config_vars())
for key, value in _SCHEMES.items(scheme):
if os.name in ('posix', 'nt'):
value = os.path.expanduser(value)
res[key] = os.path.normpath(_subst_vars(value, vars))
return res
def format_value(value, vars):
def _replacer(matchobj):
name = matchobj.group(1)
if name in vars:
return vars[name]
return matchobj.group(0)
return _VAR_REPL.sub(_replacer, value)
def _get_default_scheme():
if os.name == 'posix':
# the default scheme for posix is posix_prefix
return 'posix_prefix'
return os.name
def _getuserbase():
env_base = os.environ.get("PYTHONUSERBASE", None)
def joinuser(*args):
return os.path.expanduser(os.path.join(*args))
# what about 'os2emx', 'riscos' ?
if os.name == "nt":
base = os.environ.get("APPDATA") or "~"
if env_base:
return env_base
else:
return joinuser(base, "Python")
if sys.platform == "darwin":
framework = get_config_var("PYTHONFRAMEWORK")
if framework:
if env_base:
return env_base
else:
return joinuser("~", "Library", framework, "%d.%d" %
sys.version_info[:2])
if env_base:
return env_base
else:
return joinuser("~", ".local")
def _parse_makefile(filename, vars=None):
"""Parse a Makefile-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
# Regexes needed for parsing Makefile (and similar syntaxes,
# like old-style Setup files).
_variable_rx = re.compile(r"([a-zA-Z][a-zA-Z0-9_]+)\s*=\s*(.*)")
_findvar1_rx = re.compile(r"\$\(([A-Za-z][A-Za-z0-9_]*)\)")
_findvar2_rx = re.compile(r"\${([A-Za-z][A-Za-z0-9_]*)}")
if vars is None:
vars = {}
done = {}
notdone = {}
with codecs.open(filename, encoding='utf-8', errors="surrogateescape") as f:
lines = f.readlines()
for line in lines:
if line.startswith('#') or line.strip() == '':
continue
m = _variable_rx.match(line)
if m:
n, v = m.group(1, 2)
v = v.strip()
# `$$' is a literal `$' in make
tmpv = v.replace('$$', '')
if "$" in tmpv:
notdone[n] = v
else:
try:
v = int(v)
except ValueError:
# insert literal `$'
done[n] = v.replace('$$', '$')
else:
done[n] = v
# do variable interpolation here
variables = list(notdone.keys())
# Variables with a 'PY_' prefix in the makefile. These need to
# be made available without that prefix through sysconfig.
# Special care is needed to ensure that variable expansion works, even
# if the expansion uses the name without a prefix.
renamed_variables = ('CFLAGS', 'LDFLAGS', 'CPPFLAGS')
while len(variables) > 0:
for name in tuple(variables):
value = notdone[name]
m = _findvar1_rx.search(value) or _findvar2_rx.search(value)
if m is not None:
n = m.group(1)
found = True
if n in done:
item = str(done[n])
elif n in notdone:
# get it on a subsequent round
found = False
elif n in os.environ:
# do it like make: fall back to environment
item = os.environ[n]
elif n in renamed_variables:
if (name.startswith('PY_') and
name[3:] in renamed_variables):
item = ""
elif 'PY_' + n in notdone:
found = False
else:
item = str(done['PY_' + n])
else:
done[n] = item = ""
if found:
after = value[m.end():]
value = value[:m.start()] + item + after
if "$" in after:
notdone[name] = value
else:
try:
value = int(value)
except ValueError:
done[name] = value.strip()
else:
done[name] = value
variables.remove(name)
if (name.startswith('PY_') and
name[3:] in renamed_variables):
name = name[3:]
if name not in done:
done[name] = value
else:
# bogus variable reference (e.g. "prefix=$/opt/python");
# just drop it since we can't deal
done[name] = value
variables.remove(name)
# strip spurious spaces
for k, v in done.items():
if isinstance(v, str):
done[k] = v.strip()
# save the results in the global dictionary
vars.update(done)
return vars
def get_makefile_filename():
"""Return the path of the Makefile."""
if _PYTHON_BUILD:
return os.path.join(_PROJECT_BASE, "Makefile")
if hasattr(sys, 'abiflags'):
config_dir_name = 'config-%s%s' % (_PY_VERSION_SHORT, sys.abiflags)
else:
config_dir_name = 'config'
return os.path.join(get_path('stdlib'), config_dir_name, 'Makefile')
def _init_posix(vars):
"""Initialize the module as appropriate for POSIX systems."""
# load the installed Makefile:
makefile = get_makefile_filename()
try:
_parse_makefile(makefile, vars)
except IOError as e:
msg = "invalid Python installation: unable to open %s" % makefile
if hasattr(e, "strerror"):
msg = msg + " (%s)" % e.strerror
raise IOError(msg)
# load the installed pyconfig.h:
config_h = get_config_h_filename()
try:
with open(config_h) as f:
parse_config_h(f, vars)
except IOError as e:
msg = "invalid Python installation: unable to open %s" % config_h
if hasattr(e, "strerror"):
msg = msg + " (%s)" % e.strerror
raise IOError(msg)
# On AIX, there are wrong paths to the linker scripts in the Makefile
# -- these paths are relative to the Python source, but when installed
# the scripts are in another directory.
if _PYTHON_BUILD:
vars['LDSHARED'] = vars['BLDSHARED']
def _init_non_posix(vars):
"""Initialize the module as appropriate for NT"""
# set basic install directories
vars['LIBDEST'] = get_path('stdlib')
vars['BINLIBDEST'] = get_path('platstdlib')
vars['INCLUDEPY'] = get_path('include')
vars['SO'] = '.pyd'
vars['EXE'] = '.exe'
vars['VERSION'] = _PY_VERSION_SHORT_NO_DOT
vars['BINDIR'] = os.path.dirname(_safe_realpath(sys.executable))
#
# public APIs
#
def parse_config_h(fp, vars=None):
"""Parse a config.h-style file.
A dictionary containing name/value pairs is returned. If an
optional dictionary is passed in as the second argument, it is
used instead of a new dictionary.
"""
if vars is None:
vars = {}
define_rx = re.compile("#define ([A-Z][A-Za-z0-9_]+) (.*)\n")
undef_rx = re.compile("/[*] #undef ([A-Z][A-Za-z0-9_]+) [*]/\n")
while True:
line = fp.readline()
if not line:
break
m = define_rx.match(line)
if m:
n, v = m.group(1, 2)
try:
v = int(v)
except ValueError:
pass
vars[n] = v
else:
m = undef_rx.match(line)
if m:
vars[m.group(1)] = 0
return vars
def get_config_h_filename():
"""Return the path of pyconfig.h."""
if _PYTHON_BUILD:
if os.name == "nt":
inc_dir = os.path.join(_PROJECT_BASE, "PC")
else:
inc_dir = _PROJECT_BASE
else:
inc_dir = get_path('platinclude')
return os.path.join(inc_dir, 'pyconfig.h')
def get_scheme_names():
"""Return a tuple containing the schemes names."""
return tuple(sorted(_SCHEMES.sections()))
def get_path_names():
"""Return a tuple containing the paths names."""
# xxx see if we want a static list
return _SCHEMES.options('posix_prefix')
def get_paths(scheme=_get_default_scheme(), vars=None, expand=True):
"""Return a mapping containing an install scheme.
``scheme`` is the install scheme name. If not provided, it will
return the default scheme for the current platform.
"""
_ensure_cfg_read()
if expand:
return _expand_vars(scheme, vars)
else:
return dict(_SCHEMES.items(scheme))
def get_path(name, scheme=_get_default_scheme(), vars=None, expand=True):
"""Return a path corresponding to the scheme.
``scheme`` is the install scheme name.
"""
return get_paths(scheme, vars, expand)[name]
def get_config_vars(*args):
"""With no arguments, return a dictionary of all configuration
variables relevant for the current platform.
On Unix, this means every variable defined in Python's installed Makefile;
On Windows and Mac OS it's a much smaller set.
With arguments, return a list of values that result from looking up
each argument in the configuration variable dictionary.
"""
global _CONFIG_VARS
if _CONFIG_VARS is None:
_CONFIG_VARS = {}
# Normalized versions of prefix and exec_prefix are handy to have;
# in fact, these are the standard versions used most places in the
# distutils2 module.
_CONFIG_VARS['prefix'] = _PREFIX
_CONFIG_VARS['exec_prefix'] = _EXEC_PREFIX
_CONFIG_VARS['py_version'] = _PY_VERSION
_CONFIG_VARS['py_version_short'] = _PY_VERSION_SHORT
_CONFIG_VARS['py_version_nodot'] = _PY_VERSION[0] + _PY_VERSION[2]
_CONFIG_VARS['base'] = _PREFIX
_CONFIG_VARS['platbase'] = _EXEC_PREFIX
_CONFIG_VARS['projectbase'] = _PROJECT_BASE
try:
_CONFIG_VARS['abiflags'] = sys.abiflags
except AttributeError:
# sys.abiflags may not be defined on all platforms.
_CONFIG_VARS['abiflags'] = ''
if os.name in ('nt', 'os2'):
_init_non_posix(_CONFIG_VARS)
if os.name == 'posix':
_init_posix(_CONFIG_VARS)
# Setting 'userbase' is done below the call to the
# init function to enable using 'get_config_var' in
# the init-function.
if sys.version >= '2.6':
_CONFIG_VARS['userbase'] = _getuserbase()
if 'srcdir' not in _CONFIG_VARS:
_CONFIG_VARS['srcdir'] = _PROJECT_BASE
else:
_CONFIG_VARS['srcdir'] = _safe_realpath(_CONFIG_VARS['srcdir'])
# Convert srcdir into an absolute path if it appears necessary.
# Normally it is relative to the build directory. However, during
# testing, for example, we might be running a non-installed python
# from a different directory.
if _PYTHON_BUILD and os.name == "posix":
base = _PROJECT_BASE
try:
cwd = os.getcwd()
except OSError:
cwd = None
if (not os.path.isabs(_CONFIG_VARS['srcdir']) and
base != cwd):
# srcdir is relative and we are not in the same directory
# as the executable. Assume executable is in the build
# directory and make srcdir absolute.
srcdir = os.path.join(base, _CONFIG_VARS['srcdir'])
_CONFIG_VARS['srcdir'] = os.path.normpath(srcdir)
if sys.platform == 'darwin':
kernel_version = os.uname()[2] # Kernel version (8.4.3)
major_version = int(kernel_version.split('.')[0])
if major_version < 8:
# On Mac OS X before 10.4, check if -arch and -isysroot
# are in CFLAGS or LDFLAGS and remove them if they are.
# This is needed when building extensions on a 10.3 system
# using a universal build of python.
for key in ('LDFLAGS', 'BASECFLAGS',
# a number of derived variables. These need to be
# patched up as well.
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
flags = _CONFIG_VARS[key]
flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
flags = re.sub('-isysroot [^ \t]*', ' ', flags)
_CONFIG_VARS[key] = flags
else:
# Allow the user to override the architecture flags using
# an environment variable.
# NOTE: This name was introduced by Apple in OSX 10.5 and
# is used by several scripting languages distributed with
# that OS release.
if 'ARCHFLAGS' in os.environ:
arch = os.environ['ARCHFLAGS']
for key in ('LDFLAGS', 'BASECFLAGS',
# a number of derived variables. These need to be
# patched up as well.
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
flags = _CONFIG_VARS[key]
flags = re.sub(r'-arch\s+\w+\s', ' ', flags)
flags = flags + ' ' + arch
_CONFIG_VARS[key] = flags
# If we're on OSX 10.5 or later and the user tries to
# compiles an extension using an SDK that is not present
# on the current machine it is better to not use an SDK
# than to fail.
#
# The major usecase for this is users using a Python.org
# binary installer on OSX 10.6: that installer uses
# the 10.4u SDK, but that SDK is not installed by default
# when you install Xcode.
#
CFLAGS = _CONFIG_VARS.get('CFLAGS', '')
m = re.search(r'-isysroot\s+(\S+)', CFLAGS)
if m is not None:
sdk = m.group(1)
if not os.path.exists(sdk):
for key in ('LDFLAGS', 'BASECFLAGS',
# a number of derived variables. These need to be
# patched up as well.
'CFLAGS', 'PY_CFLAGS', 'BLDSHARED'):
flags = _CONFIG_VARS[key]
flags = re.sub(r'-isysroot\s+\S+(\s|$)', ' ', flags)
_CONFIG_VARS[key] = flags
if args:
vals = []
for name in args:
vals.append(_CONFIG_VARS.get(name))
return vals
else:
return _CONFIG_VARS
def get_config_var(name):
"""Return the value of a single variable using the dictionary returned by
'get_config_vars()'.
Equivalent to get_config_vars().get(name)
"""
return get_config_vars().get(name)
def get_platform():
"""Return a string that identifies the current platform.
This is used mainly to distinguish platform-specific build directories and
platform-specific built distributions. Typically includes the OS name
and version and the architecture (as supplied by 'os.uname()'),
although the exact information included depends on the OS; eg. for IRIX
the architecture isn't particularly important (IRIX only runs on SGI
hardware), but for Linux the kernel version isn't particularly
important.
Examples of returned values:
linux-i586
linux-alpha (?)
solaris-2.6-sun4u
irix-5.3
irix64-6.2
Windows will return one of:
win-amd64 (64bit Windows on AMD64 (aka x86_64, Intel64, EM64T, etc)
win-ia64 (64bit Windows on Itanium)
win32 (all others - specifically, sys.platform is returned)
For other non-POSIX platforms, currently just returns 'sys.platform'.
"""
if os.name == 'nt':
# sniff sys.version for architecture.
prefix = " bit ("
i = sys.version.find(prefix)
if i == -1:
return sys.platform
j = sys.version.find(")", i)
look = sys.version[i+len(prefix):j].lower()
if look == 'amd64':
return 'win-amd64'
if look == 'itanium':
return 'win-ia64'
return sys.platform
if os.name != "posix" or not hasattr(os, 'uname'):
# XXX what about the architecture? NT is Intel or Alpha,
# Mac OS is M68k or PPC, etc.
return sys.platform
# Try to distinguish various flavours of Unix
osname, host, release, version, machine = os.uname()
# Convert the OS name to lowercase, remove '/' characters
# (to accommodate BSD/OS), and translate spaces (for "Power Macintosh")
osname = osname.lower().replace('/', '')
machine = machine.replace(' ', '_')
machine = machine.replace('/', '-')
if osname[:5] == "linux":
# At least on Linux/Intel, 'machine' is the processor --
# i386, etc.
# XXX what about Alpha, SPARC, etc?
return "%s-%s" % (osname, machine)
elif osname[:5] == "sunos":
if release[0] >= "5": # SunOS 5 == Solaris 2
osname = "solaris"
release = "%d.%s" % (int(release[0]) - 3, release[2:])
# fall through to standard osname-release-machine representation
elif osname[:4] == "irix": # could be "irix64"!
return "%s-%s" % (osname, release)
elif osname[:3] == "aix":
return "%s-%s.%s" % (osname, version, release)
elif osname[:6] == "cygwin":
osname = "cygwin"
rel_re = re.compile(r'[\d.]+')
m = rel_re.match(release)
if m:
release = m.group()
elif osname[:6] == "darwin":
#
# For our purposes, we'll assume that the system version from
# distutils' perspective is what MACOSX_DEPLOYMENT_TARGET is set
# to. This makes the compatibility story a bit more sane because the
# machine is going to compile and link as if it were
# MACOSX_DEPLOYMENT_TARGET.
cfgvars = get_config_vars()
macver = cfgvars.get('MACOSX_DEPLOYMENT_TARGET')
if True:
# Always calculate the release of the running machine,
# needed to determine if we can build fat binaries or not.
macrelease = macver
# Get the system version. Reading this plist is a documented
# way to get the system version (see the documentation for
# the Gestalt Manager)
try:
f = open('/System/Library/CoreServices/SystemVersion.plist')
except IOError:
# We're on a plain darwin box, fall back to the default
# behaviour.
pass
else:
try:
m = re.search(r'<key>ProductUserVisibleVersion</key>\s*'
r'<string>(.*?)</string>', f.read())
finally:
f.close()
if m is not None:
macrelease = '.'.join(m.group(1).split('.')[:2])
# else: fall back to the default behaviour
if not macver:
macver = macrelease
if macver:
release = macver
osname = "macosx"
if ((macrelease + '.') >= '10.4.' and
'-arch' in get_config_vars().get('CFLAGS', '').strip()):
# The universal build will build fat binaries, but not on
# systems before 10.4
#
# Try to detect 4-way universal builds, those have machine-type
# 'universal' instead of 'fat'.
machine = 'fat'
cflags = get_config_vars().get('CFLAGS')
archs = re.findall(r'-arch\s+(\S+)', cflags)
archs = tuple(sorted(set(archs)))
if len(archs) == 1:
machine = archs[0]
elif archs == ('i386', 'ppc'):
machine = 'fat'
elif archs == ('i386', 'x86_64'):
machine = 'intel'
elif archs == ('i386', 'ppc', 'x86_64'):
machine = 'fat3'
elif archs == ('ppc64', 'x86_64'):
machine = 'fat64'
elif archs == ('i386', 'ppc', 'ppc64', 'x86_64'):
machine = 'universal'
else:
raise ValueError(
"Don't know machine value for archs=%r" % (archs,))
elif machine == 'i386':
# On OSX the machine type returned by uname is always the
# 32-bit variant, even if the executable architecture is
# the 64-bit variant
if sys.maxsize >= 2**32:
machine = 'x86_64'
elif machine in ('PowerPC', 'Power_Macintosh'):
# Pick a sane name for the PPC architecture.
# See 'i386' case
if sys.maxsize >= 2**32:
machine = 'ppc64'
else:
machine = 'ppc'
return "%s-%s-%s" % (osname, release, machine)
def get_python_version():
return _PY_VERSION_SHORT
def _print_dict(title, data):
for index, (key, value) in enumerate(sorted(data.items())):
if index == 0:
print('%s: ' % (title))
print('\t%s = "%s"' % (key, value))
def _main():
"""Display all information sysconfig detains."""
print('Platform: "%s"' % get_platform())
print('Python version: "%s"' % get_python_version())
print('Current installation scheme: "%s"' % _get_default_scheme())
print()
_print_dict('Paths', get_paths())
print()
_print_dict('Variables', get_config_vars())
if __name__ == '__main__':
_main()
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/distlib/_backport/sysconfig.py",
"repo_id": "Django-locallibrary",
"token_count": 13065
} | 12 |
from __future__ import absolute_import, division, unicode_literals
from . import base
from collections import OrderedDict
def _attr_key(attr):
"""Return an appropriate key for an attribute for sorting
Attributes have a namespace that can be either ``None`` or a string. We
can't compare the two because they're different types, so we convert
``None`` to an empty string first.
"""
return (attr[0][0] or ''), attr[0][1]
class Filter(base.Filter):
"""Alphabetizes attributes for elements"""
def __iter__(self):
for token in base.Filter.__iter__(self):
if token["type"] in ("StartTag", "EmptyTag"):
attrs = OrderedDict()
for name, value in sorted(token["data"].items(),
key=_attr_key):
attrs[name] = value
token["data"] = attrs
yield token
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/filters/alphabeticalattributes.py",
"repo_id": "Django-locallibrary",
"token_count": 381
} | 13 |
from __future__ import absolute_import, division, unicode_literals
from xml.dom import Node
from ..constants import namespaces, voidElements, spaceCharacters
__all__ = ["DOCUMENT", "DOCTYPE", "TEXT", "ELEMENT", "COMMENT", "ENTITY", "UNKNOWN",
"TreeWalker", "NonRecursiveTreeWalker"]
DOCUMENT = Node.DOCUMENT_NODE
DOCTYPE = Node.DOCUMENT_TYPE_NODE
TEXT = Node.TEXT_NODE
ELEMENT = Node.ELEMENT_NODE
COMMENT = Node.COMMENT_NODE
ENTITY = Node.ENTITY_NODE
UNKNOWN = "<#UNKNOWN#>"
spaceCharacters = "".join(spaceCharacters)
class TreeWalker(object):
"""Walks a tree yielding tokens
Tokens are dicts that all have a ``type`` field specifying the type of the
token.
"""
def __init__(self, tree):
"""Creates a TreeWalker
:arg tree: the tree to walk
"""
self.tree = tree
def __iter__(self):
raise NotImplementedError
def error(self, msg):
"""Generates an error token with the given message
:arg msg: the error message
:returns: SerializeError token
"""
return {"type": "SerializeError", "data": msg}
def emptyTag(self, namespace, name, attrs, hasChildren=False):
"""Generates an EmptyTag token
:arg namespace: the namespace of the token--can be ``None``
:arg name: the name of the element
:arg attrs: the attributes of the element as a dict
:arg hasChildren: whether or not to yield a SerializationError because
this tag shouldn't have children
:returns: EmptyTag token
"""
yield {"type": "EmptyTag", "name": name,
"namespace": namespace,
"data": attrs}
if hasChildren:
yield self.error("Void element has children")
def startTag(self, namespace, name, attrs):
"""Generates a StartTag token
:arg namespace: the namespace of the token--can be ``None``
:arg name: the name of the element
:arg attrs: the attributes of the element as a dict
:returns: StartTag token
"""
return {"type": "StartTag",
"name": name,
"namespace": namespace,
"data": attrs}
def endTag(self, namespace, name):
"""Generates an EndTag token
:arg namespace: the namespace of the token--can be ``None``
:arg name: the name of the element
:returns: EndTag token
"""
return {"type": "EndTag",
"name": name,
"namespace": namespace}
def text(self, data):
"""Generates SpaceCharacters and Characters tokens
Depending on what's in the data, this generates one or more
``SpaceCharacters`` and ``Characters`` tokens.
For example:
>>> from html5lib.treewalkers.base import TreeWalker
>>> # Give it an empty tree just so it instantiates
>>> walker = TreeWalker([])
>>> list(walker.text(''))
[]
>>> list(walker.text(' '))
[{u'data': ' ', u'type': u'SpaceCharacters'}]
>>> list(walker.text(' abc ')) # doctest: +NORMALIZE_WHITESPACE
[{u'data': ' ', u'type': u'SpaceCharacters'},
{u'data': u'abc', u'type': u'Characters'},
{u'data': u' ', u'type': u'SpaceCharacters'}]
:arg data: the text data
:returns: one or more ``SpaceCharacters`` and ``Characters`` tokens
"""
data = data
middle = data.lstrip(spaceCharacters)
left = data[:len(data) - len(middle)]
if left:
yield {"type": "SpaceCharacters", "data": left}
data = middle
middle = data.rstrip(spaceCharacters)
right = data[len(middle):]
if middle:
yield {"type": "Characters", "data": middle}
if right:
yield {"type": "SpaceCharacters", "data": right}
def comment(self, data):
"""Generates a Comment token
:arg data: the comment
:returns: Comment token
"""
return {"type": "Comment", "data": data}
def doctype(self, name, publicId=None, systemId=None):
"""Generates a Doctype token
:arg name:
:arg publicId:
:arg systemId:
:returns: the Doctype token
"""
return {"type": "Doctype",
"name": name,
"publicId": publicId,
"systemId": systemId}
def entity(self, name):
"""Generates an Entity token
:arg name: the entity name
:returns: an Entity token
"""
return {"type": "Entity", "name": name}
def unknown(self, nodeType):
"""Handles unknown node types"""
return self.error("Unknown node type: " + nodeType)
class NonRecursiveTreeWalker(TreeWalker):
def getNodeDetails(self, node):
raise NotImplementedError
def getFirstChild(self, node):
raise NotImplementedError
def getNextSibling(self, node):
raise NotImplementedError
def getParentNode(self, node):
raise NotImplementedError
def __iter__(self):
currentNode = self.tree
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
hasChildren = False
if type == DOCTYPE:
yield self.doctype(*details)
elif type == TEXT:
for token in self.text(*details):
yield token
elif type == ELEMENT:
namespace, name, attributes, hasChildren = details
if (not namespace or namespace == namespaces["html"]) and name in voidElements:
for token in self.emptyTag(namespace, name, attributes,
hasChildren):
yield token
hasChildren = False
else:
yield self.startTag(namespace, name, attributes)
elif type == COMMENT:
yield self.comment(details[0])
elif type == ENTITY:
yield self.entity(details[0])
elif type == DOCUMENT:
hasChildren = True
else:
yield self.unknown(details[0])
if hasChildren:
firstChild = self.getFirstChild(currentNode)
else:
firstChild = None
if firstChild is not None:
currentNode = firstChild
else:
while currentNode is not None:
details = self.getNodeDetails(currentNode)
type, details = details[0], details[1:]
if type == ELEMENT:
namespace, name, attributes, hasChildren = details
if (namespace and namespace != namespaces["html"]) or name not in voidElements:
yield self.endTag(namespace, name)
if self.tree is currentNode:
currentNode = None
break
nextSibling = self.getNextSibling(currentNode)
if nextSibling is not None:
currentNode = nextSibling
break
else:
currentNode = self.getParentNode(currentNode)
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/base.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/base.py",
"repo_id": "Django-locallibrary",
"token_count": 3440
} | 14 |
from . import idnadata
import bisect
import unicodedata
import re
import sys
from .intranges import intranges_contain
_virama_combining_class = 9
_alabel_prefix = b'xn--'
_unicode_dots_re = re.compile(u'[\u002e\u3002\uff0e\uff61]')
if sys.version_info[0] >= 3:
unicode = str
unichr = chr
class IDNAError(UnicodeError):
""" Base exception for all IDNA-encoding related problems """
pass
class IDNABidiError(IDNAError):
""" Exception when bidirectional requirements are not satisfied """
pass
class InvalidCodepoint(IDNAError):
""" Exception when a disallowed or unallocated codepoint is used """
pass
class InvalidCodepointContext(IDNAError):
""" Exception when the codepoint is not valid in the context it is used """
pass
def _combining_class(cp):
v = unicodedata.combining(unichr(cp))
if v == 0:
if not unicodedata.name(unichr(cp)):
raise ValueError("Unknown character in unicodedata")
return v
def _is_script(cp, script):
return intranges_contain(ord(cp), idnadata.scripts[script])
def _punycode(s):
return s.encode('punycode')
def _unot(s):
return 'U+{0:04X}'.format(s)
def valid_label_length(label):
if len(label) > 63:
return False
return True
def valid_string_length(label, trailing_dot):
if len(label) > (254 if trailing_dot else 253):
return False
return True
def check_bidi(label, check_ltr=False):
# Bidi rules should only be applied if string contains RTL characters
bidi_label = False
for (idx, cp) in enumerate(label, 1):
direction = unicodedata.bidirectional(cp)
if direction == '':
# String likely comes from a newer version of Unicode
raise IDNABidiError('Unknown directionality in label {0} at position {1}'.format(repr(label), idx))
if direction in ['R', 'AL', 'AN']:
bidi_label = True
if not bidi_label and not check_ltr:
return True
# Bidi rule 1
direction = unicodedata.bidirectional(label[0])
if direction in ['R', 'AL']:
rtl = True
elif direction == 'L':
rtl = False
else:
raise IDNABidiError('First codepoint in label {0} must be directionality L, R or AL'.format(repr(label)))
valid_ending = False
number_type = False
for (idx, cp) in enumerate(label, 1):
direction = unicodedata.bidirectional(cp)
if rtl:
# Bidi rule 2
if not direction in ['R', 'AL', 'AN', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
raise IDNABidiError('Invalid direction for codepoint at position {0} in a right-to-left label'.format(idx))
# Bidi rule 3
if direction in ['R', 'AL', 'EN', 'AN']:
valid_ending = True
elif direction != 'NSM':
valid_ending = False
# Bidi rule 4
if direction in ['AN', 'EN']:
if not number_type:
number_type = direction
else:
if number_type != direction:
raise IDNABidiError('Can not mix numeral types in a right-to-left label')
else:
# Bidi rule 5
if not direction in ['L', 'EN', 'ES', 'CS', 'ET', 'ON', 'BN', 'NSM']:
raise IDNABidiError('Invalid direction for codepoint at position {0} in a left-to-right label'.format(idx))
# Bidi rule 6
if direction in ['L', 'EN']:
valid_ending = True
elif direction != 'NSM':
valid_ending = False
if not valid_ending:
raise IDNABidiError('Label ends with illegal codepoint directionality')
return True
def check_initial_combiner(label):
if unicodedata.category(label[0])[0] == 'M':
raise IDNAError('Label begins with an illegal combining character')
return True
def check_hyphen_ok(label):
if label[2:4] == '--':
raise IDNAError('Label has disallowed hyphens in 3rd and 4th position')
if label[0] == '-' or label[-1] == '-':
raise IDNAError('Label must not start or end with a hyphen')
return True
def check_nfc(label):
if unicodedata.normalize('NFC', label) != label:
raise IDNAError('Label must be in Normalization Form C')
def valid_contextj(label, pos):
cp_value = ord(label[pos])
if cp_value == 0x200c:
if pos > 0:
if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
return True
ok = False
for i in range(pos-1, -1, -1):
joining_type = idnadata.joining_types.get(ord(label[i]))
if joining_type == ord('T'):
continue
if joining_type in [ord('L'), ord('D')]:
ok = True
break
if not ok:
return False
ok = False
for i in range(pos+1, len(label)):
joining_type = idnadata.joining_types.get(ord(label[i]))
if joining_type == ord('T'):
continue
if joining_type in [ord('R'), ord('D')]:
ok = True
break
return ok
if cp_value == 0x200d:
if pos > 0:
if _combining_class(ord(label[pos - 1])) == _virama_combining_class:
return True
return False
else:
return False
def valid_contexto(label, pos, exception=False):
cp_value = ord(label[pos])
if cp_value == 0x00b7:
if 0 < pos < len(label)-1:
if ord(label[pos - 1]) == 0x006c and ord(label[pos + 1]) == 0x006c:
return True
return False
elif cp_value == 0x0375:
if pos < len(label)-1 and len(label) > 1:
return _is_script(label[pos + 1], 'Greek')
return False
elif cp_value == 0x05f3 or cp_value == 0x05f4:
if pos > 0:
return _is_script(label[pos - 1], 'Hebrew')
return False
elif cp_value == 0x30fb:
for cp in label:
if cp == u'\u30fb':
continue
if _is_script(cp, 'Hiragana') or _is_script(cp, 'Katakana') or _is_script(cp, 'Han'):
return True
return False
elif 0x660 <= cp_value <= 0x669:
for cp in label:
if 0x6f0 <= ord(cp) <= 0x06f9:
return False
return True
elif 0x6f0 <= cp_value <= 0x6f9:
for cp in label:
if 0x660 <= ord(cp) <= 0x0669:
return False
return True
def check_label(label):
if isinstance(label, (bytes, bytearray)):
label = label.decode('utf-8')
if len(label) == 0:
raise IDNAError('Empty Label')
check_nfc(label)
check_hyphen_ok(label)
check_initial_combiner(label)
for (pos, cp) in enumerate(label):
cp_value = ord(cp)
if intranges_contain(cp_value, idnadata.codepoint_classes['PVALID']):
continue
elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTJ']):
try:
if not valid_contextj(label, pos):
raise InvalidCodepointContext('Joiner {0} not allowed at position {1} in {2}'.format(
_unot(cp_value), pos+1, repr(label)))
except ValueError:
raise IDNAError('Unknown codepoint adjacent to joiner {0} at position {1} in {2}'.format(
_unot(cp_value), pos+1, repr(label)))
elif intranges_contain(cp_value, idnadata.codepoint_classes['CONTEXTO']):
if not valid_contexto(label, pos):
raise InvalidCodepointContext('Codepoint {0} not allowed at position {1} in {2}'.format(_unot(cp_value), pos+1, repr(label)))
else:
raise InvalidCodepoint('Codepoint {0} at position {1} of {2} not allowed'.format(_unot(cp_value), pos+1, repr(label)))
check_bidi(label)
def alabel(label):
try:
label = label.encode('ascii')
ulabel(label)
if not valid_label_length(label):
raise IDNAError('Label too long')
return label
except UnicodeEncodeError:
pass
if not label:
raise IDNAError('No Input')
label = unicode(label)
check_label(label)
label = _punycode(label)
label = _alabel_prefix + label
if not valid_label_length(label):
raise IDNAError('Label too long')
return label
def ulabel(label):
if not isinstance(label, (bytes, bytearray)):
try:
label = label.encode('ascii')
except UnicodeEncodeError:
check_label(label)
return label
label = label.lower()
if label.startswith(_alabel_prefix):
label = label[len(_alabel_prefix):]
if not label:
raise IDNAError('Malformed A-label, no Punycode eligible content found')
if label.decode('ascii')[-1] == '-':
raise IDNAError('A-label must not end with a hyphen')
else:
check_label(label)
return label.decode('ascii')
label = label.decode('punycode')
check_label(label)
return label
def uts46_remap(domain, std3_rules=True, transitional=False):
"""Re-map the characters in the string according to UTS46 processing."""
from .uts46data import uts46data
output = u""
try:
for pos, char in enumerate(domain):
code_point = ord(char)
uts46row = uts46data[code_point if code_point < 256 else
bisect.bisect_left(uts46data, (code_point, "Z")) - 1]
status = uts46row[1]
replacement = uts46row[2] if len(uts46row) == 3 else None
if (status == "V" or
(status == "D" and not transitional) or
(status == "3" and not std3_rules and replacement is None)):
output += char
elif replacement is not None and (status == "M" or
(status == "3" and not std3_rules) or
(status == "D" and transitional)):
output += replacement
elif status != "I":
raise IndexError()
return unicodedata.normalize("NFC", output)
except IndexError:
raise InvalidCodepoint(
"Codepoint {0} not allowed at position {1} in {2}".format(
_unot(code_point), pos + 1, repr(domain)))
def encode(s, strict=False, uts46=False, std3_rules=False, transitional=False):
if isinstance(s, (bytes, bytearray)):
s = s.decode("ascii")
if uts46:
s = uts46_remap(s, std3_rules, transitional)
trailing_dot = False
result = []
if strict:
labels = s.split('.')
else:
labels = _unicode_dots_re.split(s)
if not labels or labels == ['']:
raise IDNAError('Empty domain')
if labels[-1] == '':
del labels[-1]
trailing_dot = True
for label in labels:
s = alabel(label)
if s:
result.append(s)
else:
raise IDNAError('Empty label')
if trailing_dot:
result.append(b'')
s = b'.'.join(result)
if not valid_string_length(s, trailing_dot):
raise IDNAError('Domain too long')
return s
def decode(s, strict=False, uts46=False, std3_rules=False):
if isinstance(s, (bytes, bytearray)):
s = s.decode("ascii")
if uts46:
s = uts46_remap(s, std3_rules, False)
trailing_dot = False
result = []
if not strict:
labels = _unicode_dots_re.split(s)
else:
labels = s.split(u'.')
if not labels or labels == ['']:
raise IDNAError('Empty domain')
if not labels[-1]:
del labels[-1]
trailing_dot = True
for label in labels:
s = ulabel(label)
if s:
result.append(s)
else:
raise IDNAError('Empty label')
if trailing_dot:
result.append(u'')
return u'.'.join(result)
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/idna/core.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/idna/core.py",
"repo_id": "Django-locallibrary",
"token_count": 5562
} | 15 |
# This file is dual licensed under the terms of the Apache License, Version
# 2.0, and the BSD License. See the LICENSE file in the root of this repository
# for complete details.
from __future__ import absolute_import, division, print_function
import operator
import os
import platform
import sys
from pip._vendor.pyparsing import ParseException, ParseResults, stringStart, stringEnd
from pip._vendor.pyparsing import ZeroOrMore, Group, Forward, QuotedString
from pip._vendor.pyparsing import Literal as L # noqa
from ._compat import string_types
from ._typing import TYPE_CHECKING
from .specifiers import Specifier, InvalidSpecifier
if TYPE_CHECKING: # pragma: no cover
from typing import Any, Callable, Dict, List, Optional, Tuple, Union
Operator = Callable[[str, str], bool]
__all__ = [
"InvalidMarker",
"UndefinedComparison",
"UndefinedEnvironmentName",
"Marker",
"default_environment",
]
class InvalidMarker(ValueError):
"""
An invalid marker was found, users should refer to PEP 508.
"""
class UndefinedComparison(ValueError):
"""
An invalid operation was attempted on a value that doesn't support it.
"""
class UndefinedEnvironmentName(ValueError):
"""
A name was attempted to be used that does not exist inside of the
environment.
"""
class Node(object):
def __init__(self, value):
# type: (Any) -> None
self.value = value
def __str__(self):
# type: () -> str
return str(self.value)
def __repr__(self):
# type: () -> str
return "<{0}({1!r})>".format(self.__class__.__name__, str(self))
def serialize(self):
# type: () -> str
raise NotImplementedError
class Variable(Node):
def serialize(self):
# type: () -> str
return str(self)
class Value(Node):
def serialize(self):
# type: () -> str
return '"{0}"'.format(self)
class Op(Node):
def serialize(self):
# type: () -> str
return str(self)
VARIABLE = (
L("implementation_version")
| L("platform_python_implementation")
| L("implementation_name")
| L("python_full_version")
| L("platform_release")
| L("platform_version")
| L("platform_machine")
| L("platform_system")
| L("python_version")
| L("sys_platform")
| L("os_name")
| L("os.name") # PEP-345
| L("sys.platform") # PEP-345
| L("platform.version") # PEP-345
| L("platform.machine") # PEP-345
| L("platform.python_implementation") # PEP-345
| L("python_implementation") # undocumented setuptools legacy
| L("extra") # PEP-508
)
ALIASES = {
"os.name": "os_name",
"sys.platform": "sys_platform",
"platform.version": "platform_version",
"platform.machine": "platform_machine",
"platform.python_implementation": "platform_python_implementation",
"python_implementation": "platform_python_implementation",
}
VARIABLE.setParseAction(lambda s, l, t: Variable(ALIASES.get(t[0], t[0])))
VERSION_CMP = (
L("===") | L("==") | L(">=") | L("<=") | L("!=") | L("~=") | L(">") | L("<")
)
MARKER_OP = VERSION_CMP | L("not in") | L("in")
MARKER_OP.setParseAction(lambda s, l, t: Op(t[0]))
MARKER_VALUE = QuotedString("'") | QuotedString('"')
MARKER_VALUE.setParseAction(lambda s, l, t: Value(t[0]))
BOOLOP = L("and") | L("or")
MARKER_VAR = VARIABLE | MARKER_VALUE
MARKER_ITEM = Group(MARKER_VAR + MARKER_OP + MARKER_VAR)
MARKER_ITEM.setParseAction(lambda s, l, t: tuple(t[0]))
LPAREN = L("(").suppress()
RPAREN = L(")").suppress()
MARKER_EXPR = Forward()
MARKER_ATOM = MARKER_ITEM | Group(LPAREN + MARKER_EXPR + RPAREN)
MARKER_EXPR << MARKER_ATOM + ZeroOrMore(BOOLOP + MARKER_EXPR)
MARKER = stringStart + MARKER_EXPR + stringEnd
def _coerce_parse_result(results):
# type: (Union[ParseResults, List[Any]]) -> List[Any]
if isinstance(results, ParseResults):
return [_coerce_parse_result(i) for i in results]
else:
return results
def _format_marker(marker, first=True):
# type: (Union[List[str], Tuple[Node, ...], str], Optional[bool]) -> str
assert isinstance(marker, (list, tuple, string_types))
# Sometimes we have a structure like [[...]] which is a single item list
# where the single item is itself it's own list. In that case we want skip
# the rest of this function so that we don't get extraneous () on the
# outside.
if (
isinstance(marker, list)
and len(marker) == 1
and isinstance(marker[0], (list, tuple))
):
return _format_marker(marker[0])
if isinstance(marker, list):
inner = (_format_marker(m, first=False) for m in marker)
if first:
return " ".join(inner)
else:
return "(" + " ".join(inner) + ")"
elif isinstance(marker, tuple):
return " ".join([m.serialize() for m in marker])
else:
return marker
_operators = {
"in": lambda lhs, rhs: lhs in rhs,
"not in": lambda lhs, rhs: lhs not in rhs,
"<": operator.lt,
"<=": operator.le,
"==": operator.eq,
"!=": operator.ne,
">=": operator.ge,
">": operator.gt,
} # type: Dict[str, Operator]
def _eval_op(lhs, op, rhs):
# type: (str, Op, str) -> bool
try:
spec = Specifier("".join([op.serialize(), rhs]))
except InvalidSpecifier:
pass
else:
return spec.contains(lhs)
oper = _operators.get(op.serialize()) # type: Optional[Operator]
if oper is None:
raise UndefinedComparison(
"Undefined {0!r} on {1!r} and {2!r}.".format(op, lhs, rhs)
)
return oper(lhs, rhs)
class Undefined(object):
pass
_undefined = Undefined()
def _get_env(environment, name):
# type: (Dict[str, str], str) -> str
value = environment.get(name, _undefined) # type: Union[str, Undefined]
if isinstance(value, Undefined):
raise UndefinedEnvironmentName(
"{0!r} does not exist in evaluation environment.".format(name)
)
return value
def _evaluate_markers(markers, environment):
# type: (List[Any], Dict[str, str]) -> bool
groups = [[]] # type: List[List[bool]]
for marker in markers:
assert isinstance(marker, (list, tuple, string_types))
if isinstance(marker, list):
groups[-1].append(_evaluate_markers(marker, environment))
elif isinstance(marker, tuple):
lhs, op, rhs = marker
if isinstance(lhs, Variable):
lhs_value = _get_env(environment, lhs.value)
rhs_value = rhs.value
else:
lhs_value = lhs.value
rhs_value = _get_env(environment, rhs.value)
groups[-1].append(_eval_op(lhs_value, op, rhs_value))
else:
assert marker in ["and", "or"]
if marker == "or":
groups.append([])
return any(all(item) for item in groups)
def format_full_version(info):
# type: (sys._version_info) -> str
version = "{0.major}.{0.minor}.{0.micro}".format(info)
kind = info.releaselevel
if kind != "final":
version += kind[0] + str(info.serial)
return version
def default_environment():
# type: () -> Dict[str, str]
if hasattr(sys, "implementation"):
# Ignoring the `sys.implementation` reference for type checking due to
# mypy not liking that the attribute doesn't exist in Python 2.7 when
# run with the `--py27` flag.
iver = format_full_version(sys.implementation.version) # type: ignore
implementation_name = sys.implementation.name # type: ignore
else:
iver = "0"
implementation_name = ""
return {
"implementation_name": implementation_name,
"implementation_version": iver,
"os_name": os.name,
"platform_machine": platform.machine(),
"platform_release": platform.release(),
"platform_system": platform.system(),
"platform_version": platform.version(),
"python_full_version": platform.python_version(),
"platform_python_implementation": platform.python_implementation(),
"python_version": ".".join(platform.python_version_tuple()[:2]),
"sys_platform": sys.platform,
}
class Marker(object):
def __init__(self, marker):
# type: (str) -> None
try:
self._markers = _coerce_parse_result(MARKER.parseString(marker))
except ParseException as e:
err_str = "Invalid marker: {0!r}, parse error at {1!r}".format(
marker, marker[e.loc : e.loc + 8]
)
raise InvalidMarker(err_str)
def __str__(self):
# type: () -> str
return _format_marker(self._markers)
def __repr__(self):
# type: () -> str
return "<Marker({0!r})>".format(str(self))
def evaluate(self, environment=None):
# type: (Optional[Dict[str, str]]) -> bool
"""Evaluate a marker.
Return the boolean from evaluating the given marker against the
environment. environment is an optional argument to override all or
part of the determined environment.
The environment is determined from the current Python process.
"""
current_environment = default_environment()
if environment is not None:
current_environment.update(environment)
return _evaluate_markers(self._markers, current_environment)
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/packaging/markers.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/packaging/markers.py",
"repo_id": "Django-locallibrary",
"token_count": 3910
} | 16 |
# -*- coding: utf-8 -*-
"""
requests.compat
~~~~~~~~~~~~~~~
This module handles import compatibility issues between Python 2 and
Python 3.
"""
from pip._vendor import chardet
import sys
# -------
# Pythons
# -------
# Syntax sugar.
_ver = sys.version_info
#: Python 2.x?
is_py2 = (_ver[0] == 2)
#: Python 3.x?
is_py3 = (_ver[0] == 3)
# Note: We've patched out simplejson support in pip because it prevents
# upgrading simplejson on Windows.
# try:
# import simplejson as json
# except (ImportError, SyntaxError):
# # simplejson does not support Python 3.2, it throws a SyntaxError
# # because of u'...' Unicode literals.
import json
# ---------
# Specifics
# ---------
if is_py2:
from urllib import (
quote, unquote, quote_plus, unquote_plus, urlencode, getproxies,
proxy_bypass, proxy_bypass_environment, getproxies_environment)
from urlparse import urlparse, urlunparse, urljoin, urlsplit, urldefrag
from urllib2 import parse_http_list
import cookielib
from Cookie import Morsel
from StringIO import StringIO
# Keep OrderedDict for backwards compatibility.
from collections import Callable, Mapping, MutableMapping, OrderedDict
builtin_str = str
bytes = str
str = unicode
basestring = basestring
numeric_types = (int, long, float)
integer_types = (int, long)
elif is_py3:
from urllib.parse import urlparse, urlunparse, urljoin, urlsplit, urlencode, quote, unquote, quote_plus, unquote_plus, urldefrag
from urllib.request import parse_http_list, getproxies, proxy_bypass, proxy_bypass_environment, getproxies_environment
from http import cookiejar as cookielib
from http.cookies import Morsel
from io import StringIO
# Keep OrderedDict for backwards compatibility.
from collections import OrderedDict
from collections.abc import Callable, Mapping, MutableMapping
builtin_str = str
str = str
bytes = bytes
basestring = (str, bytes)
numeric_types = (int, float)
integer_types = (int,)
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/requests/compat.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/requests/compat.py",
"repo_id": "Django-locallibrary",
"token_count": 727
} | 17 |
# coding: utf-8
"""
webencodings
~~~~~~~~~~~~
This is a Python implementation of the `WHATWG Encoding standard
<http://encoding.spec.whatwg.org/>`. See README for details.
:copyright: Copyright 2012 by Simon Sapin
:license: BSD, see LICENSE for details.
"""
from __future__ import unicode_literals
import codecs
from .labels import LABELS
VERSION = '0.5.1'
# Some names in Encoding are not valid Python aliases. Remap these.
PYTHON_NAMES = {
'iso-8859-8-i': 'iso-8859-8',
'x-mac-cyrillic': 'mac-cyrillic',
'macintosh': 'mac-roman',
'windows-874': 'cp874'}
CACHE = {}
def ascii_lower(string):
r"""Transform (only) ASCII letters to lower case: A-Z is mapped to a-z.
:param string: An Unicode string.
:returns: A new Unicode string.
This is used for `ASCII case-insensitive
<http://encoding.spec.whatwg.org/#ascii-case-insensitive>`_
matching of encoding labels.
The same matching is also used, among other things,
for `CSS keywords <http://dev.w3.org/csswg/css-values/#keywords>`_.
This is different from the :meth:`~py:str.lower` method of Unicode strings
which also affect non-ASCII characters,
sometimes mapping them into the ASCII range:
>>> keyword = u'Bac\N{KELVIN SIGN}ground'
>>> assert keyword.lower() == u'background'
>>> assert ascii_lower(keyword) != keyword.lower()
>>> assert ascii_lower(keyword) == u'bac\N{KELVIN SIGN}ground'
"""
# This turns out to be faster than unicode.translate()
return string.encode('utf8').lower().decode('utf8')
def lookup(label):
"""
Look for an encoding by its label.
This is the spec’s `get an encoding
<http://encoding.spec.whatwg.org/#concept-encoding-get>`_ algorithm.
Supported labels are listed there.
:param label: A string.
:returns:
An :class:`Encoding` object, or :obj:`None` for an unknown label.
"""
# Only strip ASCII whitespace: U+0009, U+000A, U+000C, U+000D, and U+0020.
label = ascii_lower(label.strip('\t\n\f\r '))
name = LABELS.get(label)
if name is None:
return None
encoding = CACHE.get(name)
if encoding is None:
if name == 'x-user-defined':
from .x_user_defined import codec_info
else:
python_name = PYTHON_NAMES.get(name, name)
# Any python_name value that gets to here should be valid.
codec_info = codecs.lookup(python_name)
encoding = Encoding(name, codec_info)
CACHE[name] = encoding
return encoding
def _get_encoding(encoding_or_label):
"""
Accept either an encoding object or label.
:param encoding: An :class:`Encoding` object or a label string.
:returns: An :class:`Encoding` object.
:raises: :exc:`~exceptions.LookupError` for an unknown label.
"""
if hasattr(encoding_or_label, 'codec_info'):
return encoding_or_label
encoding = lookup(encoding_or_label)
if encoding is None:
raise LookupError('Unknown encoding label: %r' % encoding_or_label)
return encoding
class Encoding(object):
"""Reresents a character encoding such as UTF-8,
that can be used for decoding or encoding.
.. attribute:: name
Canonical name of the encoding
.. attribute:: codec_info
The actual implementation of the encoding,
a stdlib :class:`~codecs.CodecInfo` object.
See :func:`codecs.register`.
"""
def __init__(self, name, codec_info):
self.name = name
self.codec_info = codec_info
def __repr__(self):
return '<Encoding %s>' % self.name
#: The UTF-8 encoding. Should be used for new content and formats.
UTF8 = lookup('utf-8')
_UTF16LE = lookup('utf-16le')
_UTF16BE = lookup('utf-16be')
def decode(input, fallback_encoding, errors='replace'):
"""
Decode a single string.
:param input: A byte string
:param fallback_encoding:
An :class:`Encoding` object or a label string.
The encoding to use if :obj:`input` does note have a BOM.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
:return:
A ``(output, encoding)`` tuple of an Unicode string
and an :obj:`Encoding`.
"""
# Fail early if `encoding` is an invalid label.
fallback_encoding = _get_encoding(fallback_encoding)
bom_encoding, input = _detect_bom(input)
encoding = bom_encoding or fallback_encoding
return encoding.codec_info.decode(input, errors)[0], encoding
def _detect_bom(input):
"""Return (bom_encoding, input), with any BOM removed from the input."""
if input.startswith(b'\xFF\xFE'):
return _UTF16LE, input[2:]
if input.startswith(b'\xFE\xFF'):
return _UTF16BE, input[2:]
if input.startswith(b'\xEF\xBB\xBF'):
return UTF8, input[3:]
return None, input
def encode(input, encoding=UTF8, errors='strict'):
"""
Encode a single string.
:param input: An Unicode string.
:param encoding: An :class:`Encoding` object or a label string.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
:return: A byte string.
"""
return _get_encoding(encoding).codec_info.encode(input, errors)[0]
def iter_decode(input, fallback_encoding, errors='replace'):
"""
"Pull"-based decoder.
:param input:
An iterable of byte strings.
The input is first consumed just enough to determine the encoding
based on the precense of a BOM,
then consumed on demand when the return value is.
:param fallback_encoding:
An :class:`Encoding` object or a label string.
The encoding to use if :obj:`input` does note have a BOM.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
:returns:
An ``(output, encoding)`` tuple.
:obj:`output` is an iterable of Unicode strings,
:obj:`encoding` is the :obj:`Encoding` that is being used.
"""
decoder = IncrementalDecoder(fallback_encoding, errors)
generator = _iter_decode_generator(input, decoder)
encoding = next(generator)
return generator, encoding
def _iter_decode_generator(input, decoder):
"""Return a generator that first yields the :obj:`Encoding`,
then yields output chukns as Unicode strings.
"""
decode = decoder.decode
input = iter(input)
for chunck in input:
output = decode(chunck)
if output:
assert decoder.encoding is not None
yield decoder.encoding
yield output
break
else:
# Input exhausted without determining the encoding
output = decode(b'', final=True)
assert decoder.encoding is not None
yield decoder.encoding
if output:
yield output
return
for chunck in input:
output = decode(chunck)
if output:
yield output
output = decode(b'', final=True)
if output:
yield output
def iter_encode(input, encoding=UTF8, errors='strict'):
"""
“Pull”-based encoder.
:param input: An iterable of Unicode strings.
:param encoding: An :class:`Encoding` object or a label string.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
:returns: An iterable of byte strings.
"""
# Fail early if `encoding` is an invalid label.
encode = IncrementalEncoder(encoding, errors).encode
return _iter_encode_generator(input, encode)
def _iter_encode_generator(input, encode):
for chunck in input:
output = encode(chunck)
if output:
yield output
output = encode('', final=True)
if output:
yield output
class IncrementalDecoder(object):
"""
“Push”-based decoder.
:param fallback_encoding:
An :class:`Encoding` object or a label string.
The encoding to use if :obj:`input` does note have a BOM.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
"""
def __init__(self, fallback_encoding, errors='replace'):
# Fail early if `encoding` is an invalid label.
self._fallback_encoding = _get_encoding(fallback_encoding)
self._errors = errors
self._buffer = b''
self._decoder = None
#: The actual :class:`Encoding` that is being used,
#: or :obj:`None` if that is not determined yet.
#: (Ie. if there is not enough input yet to determine
#: if there is a BOM.)
self.encoding = None # Not known yet.
def decode(self, input, final=False):
"""Decode one chunk of the input.
:param input: A byte string.
:param final:
Indicate that no more input is available.
Must be :obj:`True` if this is the last call.
:returns: An Unicode string.
"""
decoder = self._decoder
if decoder is not None:
return decoder(input, final)
input = self._buffer + input
encoding, input = _detect_bom(input)
if encoding is None:
if len(input) < 3 and not final: # Not enough data yet.
self._buffer = input
return ''
else: # No BOM
encoding = self._fallback_encoding
decoder = encoding.codec_info.incrementaldecoder(self._errors).decode
self._decoder = decoder
self.encoding = encoding
return decoder(input, final)
class IncrementalEncoder(object):
"""
“Push”-based encoder.
:param encoding: An :class:`Encoding` object or a label string.
:param errors: Type of error handling. See :func:`codecs.register`.
:raises: :exc:`~exceptions.LookupError` for an unknown encoding label.
.. method:: encode(input, final=False)
:param input: An Unicode string.
:param final:
Indicate that no more input is available.
Must be :obj:`True` if this is the last call.
:returns: A byte string.
"""
def __init__(self, encoding=UTF8, errors='strict'):
encoding = _get_encoding(encoding)
self.encode = encoding.codec_info.incrementalencoder(errors).encode
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/webencodings/__init__.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/webencodings/__init__.py",
"repo_id": "Django-locallibrary",
"token_count": 4219
} | 18 |
# Copyright (C) 2005, 2006 Martin von Löwis
# Licensed to PSF under a Contributor Agreement.
# The bdist_wininst command proper
# based on bdist_wininst
"""
Implements the bdist_msi command.
"""
import os
import sys
import warnings
from distutils.core import Command
from distutils.dir_util import remove_tree
from distutils.sysconfig import get_python_version
from distutils.version import StrictVersion
from distutils.errors import DistutilsOptionError
from distutils.util import get_platform
from distutils import log
import msilib
from msilib import schema, sequence, text
from msilib import Directory, Feature, Dialog, add_data
class PyDialog(Dialog):
"""Dialog class with a fixed layout: controls at the top, then a ruler,
then a list of buttons: back, next, cancel. Optionally a bitmap at the
left."""
def __init__(self, *args, **kw):
"""Dialog(database, name, x, y, w, h, attributes, title, first,
default, cancel, bitmap=true)"""
Dialog.__init__(self, *args)
ruler = self.h - 36
bmwidth = 152*ruler/328
#if kw.get("bitmap", True):
# self.bitmap("Bitmap", 0, 0, bmwidth, ruler, "PythonWin")
self.line("BottomLine", 0, ruler, self.w, 0)
def title(self, title):
"Set the title text of the dialog at the top."
# name, x, y, w, h, flags=Visible|Enabled|Transparent|NoPrefix,
# text, in VerdanaBold10
self.text("Title", 15, 10, 320, 60, 0x30003,
r"{\VerdanaBold10}%s" % title)
def back(self, title, next, name = "Back", active = 1):
"""Add a back button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 180, self.h-27 , 56, 17, flags, title, next)
def cancel(self, title, next, name = "Cancel", active = 1):
"""Add a cancel button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 304, self.h-27, 56, 17, flags, title, next)
def next(self, title, next, name = "Next", active = 1):
"""Add a Next button with a given title, the tab-next button,
its name in the Control table, possibly initially disabled.
Return the button, so that events can be associated"""
if active:
flags = 3 # Visible|Enabled
else:
flags = 1 # Visible
return self.pushbutton(name, 236, self.h-27, 56, 17, flags, title, next)
def xbutton(self, name, title, next, xpos):
"""Add a button with a given title, the tab-next button,
its name in the Control table, giving its x position; the
y-position is aligned with the other buttons.
Return the button, so that events can be associated"""
return self.pushbutton(name, int(self.w*xpos - 28), self.h-27, 56, 17, 3, title, next)
class bdist_msi(Command):
description = "create a Microsoft Installer (.msi) binary distribution"
user_options = [('bdist-dir=', None,
"temporary directory for creating the distribution"),
('plat-name=', 'p',
"platform name to embed in generated filenames "
"(default: %s)" % get_platform()),
('keep-temp', 'k',
"keep the pseudo-installation tree around after " +
"creating the distribution archive"),
('target-version=', None,
"require a specific python version" +
" on the target system"),
('no-target-compile', 'c',
"do not compile .py to .pyc on the target system"),
('no-target-optimize', 'o',
"do not compile .py to .pyo (optimized) "
"on the target system"),
('dist-dir=', 'd',
"directory to put final built distributions in"),
('skip-build', None,
"skip rebuilding everything (for testing/debugging)"),
('install-script=', None,
"basename of installation script to be run after "
"installation or before deinstallation"),
('pre-install-script=', None,
"Fully qualified filename of a script to be run before "
"any files are installed. This script need not be in the "
"distribution"),
]
boolean_options = ['keep-temp', 'no-target-compile', 'no-target-optimize',
'skip-build']
all_versions = ['2.0', '2.1', '2.2', '2.3', '2.4',
'2.5', '2.6', '2.7', '2.8', '2.9',
'3.0', '3.1', '3.2', '3.3', '3.4',
'3.5', '3.6', '3.7', '3.8', '3.9']
other_version = 'X'
def __init__(self, *args, **kw):
super().__init__(*args, **kw)
warnings.warn("bdist_msi command is deprecated since Python 3.9, "
"use bdist_wheel (wheel packages) instead",
DeprecationWarning, 2)
def initialize_options(self):
self.bdist_dir = None
self.plat_name = None
self.keep_temp = 0
self.no_target_compile = 0
self.no_target_optimize = 0
self.target_version = None
self.dist_dir = None
self.skip_build = None
self.install_script = None
self.pre_install_script = None
self.versions = None
def finalize_options(self):
self.set_undefined_options('bdist', ('skip_build', 'skip_build'))
if self.bdist_dir is None:
bdist_base = self.get_finalized_command('bdist').bdist_base
self.bdist_dir = os.path.join(bdist_base, 'msi')
short_version = get_python_version()
if (not self.target_version) and self.distribution.has_ext_modules():
self.target_version = short_version
if self.target_version:
self.versions = [self.target_version]
if not self.skip_build and self.distribution.has_ext_modules()\
and self.target_version != short_version:
raise DistutilsOptionError(
"target version can only be %s, or the '--skip-build'"
" option must be specified" % (short_version,))
else:
self.versions = list(self.all_versions)
self.set_undefined_options('bdist',
('dist_dir', 'dist_dir'),
('plat_name', 'plat_name'),
)
if self.pre_install_script:
raise DistutilsOptionError(
"the pre-install-script feature is not yet implemented")
if self.install_script:
for script in self.distribution.scripts:
if self.install_script == os.path.basename(script):
break
else:
raise DistutilsOptionError(
"install_script '%s' not found in scripts"
% self.install_script)
self.install_script_key = None
def run(self):
if not self.skip_build:
self.run_command('build')
install = self.reinitialize_command('install', reinit_subcommands=1)
install.prefix = self.bdist_dir
install.skip_build = self.skip_build
install.warn_dir = 0
install_lib = self.reinitialize_command('install_lib')
# we do not want to include pyc or pyo files
install_lib.compile = 0
install_lib.optimize = 0
if self.distribution.has_ext_modules():
# If we are building an installer for a Python version other
# than the one we are currently running, then we need to ensure
# our build_lib reflects the other Python version rather than ours.
# Note that for target_version!=sys.version, we must have skipped the
# build step, so there is no issue with enforcing the build of this
# version.
target_version = self.target_version
if not target_version:
assert self.skip_build, "Should have already checked this"
target_version = '%d.%d' % sys.version_info[:2]
plat_specifier = ".%s-%s" % (self.plat_name, target_version)
build = self.get_finalized_command('build')
build.build_lib = os.path.join(build.build_base,
'lib' + plat_specifier)
log.info("installing to %s", self.bdist_dir)
install.ensure_finalized()
# avoid warning of 'install_lib' about installing
# into a directory not in sys.path
sys.path.insert(0, os.path.join(self.bdist_dir, 'PURELIB'))
install.run()
del sys.path[0]
self.mkpath(self.dist_dir)
fullname = self.distribution.get_fullname()
installer_name = self.get_installer_filename(fullname)
installer_name = os.path.abspath(installer_name)
if os.path.exists(installer_name): os.unlink(installer_name)
metadata = self.distribution.metadata
author = metadata.author
if not author:
author = metadata.maintainer
if not author:
author = "UNKNOWN"
version = metadata.get_version()
# ProductVersion must be strictly numeric
# XXX need to deal with prerelease versions
sversion = "%d.%d.%d" % StrictVersion(version).version
# Prefix ProductName with Python x.y, so that
# it sorts together with the other Python packages
# in Add-Remove-Programs (APR)
fullname = self.distribution.get_fullname()
if self.target_version:
product_name = "Python %s %s" % (self.target_version, fullname)
else:
product_name = "Python %s" % (fullname)
self.db = msilib.init_database(installer_name, schema,
product_name, msilib.gen_uuid(),
sversion, author)
msilib.add_tables(self.db, sequence)
props = [('DistVersion', version)]
email = metadata.author_email or metadata.maintainer_email
if email:
props.append(("ARPCONTACT", email))
if metadata.url:
props.append(("ARPURLINFOABOUT", metadata.url))
if props:
add_data(self.db, 'Property', props)
self.add_find_python()
self.add_files()
self.add_scripts()
self.add_ui()
self.db.Commit()
if hasattr(self.distribution, 'dist_files'):
tup = 'bdist_msi', self.target_version or 'any', fullname
self.distribution.dist_files.append(tup)
if not self.keep_temp:
remove_tree(self.bdist_dir, dry_run=self.dry_run)
def add_files(self):
db = self.db
cab = msilib.CAB("distfiles")
rootdir = os.path.abspath(self.bdist_dir)
root = Directory(db, cab, None, rootdir, "TARGETDIR", "SourceDir")
f = Feature(db, "Python", "Python", "Everything",
0, 1, directory="TARGETDIR")
items = [(f, root, '')]
for version in self.versions + [self.other_version]:
target = "TARGETDIR" + version
name = default = "Python" + version
desc = "Everything"
if version is self.other_version:
title = "Python from another location"
level = 2
else:
title = "Python %s from registry" % version
level = 1
f = Feature(db, name, title, desc, 1, level, directory=target)
dir = Directory(db, cab, root, rootdir, target, default)
items.append((f, dir, version))
db.Commit()
seen = {}
for feature, dir, version in items:
todo = [dir]
while todo:
dir = todo.pop()
for file in os.listdir(dir.absolute):
afile = os.path.join(dir.absolute, file)
if os.path.isdir(afile):
short = "%s|%s" % (dir.make_short(file), file)
default = file + version
newdir = Directory(db, cab, dir, file, default, short)
todo.append(newdir)
else:
if not dir.component:
dir.start_component(dir.logical, feature, 0)
if afile not in seen:
key = seen[afile] = dir.add_file(file)
if file==self.install_script:
if self.install_script_key:
raise DistutilsOptionError(
"Multiple files with name %s" % file)
self.install_script_key = '[#%s]' % key
else:
key = seen[afile]
add_data(self.db, "DuplicateFile",
[(key + version, dir.component, key, None, dir.logical)])
db.Commit()
cab.commit(db)
def add_find_python(self):
"""Adds code to the installer to compute the location of Python.
Properties PYTHON.MACHINE.X.Y and PYTHON.USER.X.Y will be set from the
registry for each version of Python.
Properties TARGETDIRX.Y will be set from PYTHON.USER.X.Y if defined,
else from PYTHON.MACHINE.X.Y.
Properties PYTHONX.Y will be set to TARGETDIRX.Y\\python.exe"""
start = 402
for ver in self.versions:
install_path = r"SOFTWARE\Python\PythonCore\%s\InstallPath" % ver
machine_reg = "python.machine." + ver
user_reg = "python.user." + ver
machine_prop = "PYTHON.MACHINE." + ver
user_prop = "PYTHON.USER." + ver
machine_action = "PythonFromMachine" + ver
user_action = "PythonFromUser" + ver
exe_action = "PythonExe" + ver
target_dir_prop = "TARGETDIR" + ver
exe_prop = "PYTHON" + ver
if msilib.Win64:
# type: msidbLocatorTypeRawValue + msidbLocatorType64bit
Type = 2+16
else:
Type = 2
add_data(self.db, "RegLocator",
[(machine_reg, 2, install_path, None, Type),
(user_reg, 1, install_path, None, Type)])
add_data(self.db, "AppSearch",
[(machine_prop, machine_reg),
(user_prop, user_reg)])
add_data(self.db, "CustomAction",
[(machine_action, 51+256, target_dir_prop, "[" + machine_prop + "]"),
(user_action, 51+256, target_dir_prop, "[" + user_prop + "]"),
(exe_action, 51+256, exe_prop, "[" + target_dir_prop + "]\\python.exe"),
])
add_data(self.db, "InstallExecuteSequence",
[(machine_action, machine_prop, start),
(user_action, user_prop, start + 1),
(exe_action, None, start + 2),
])
add_data(self.db, "InstallUISequence",
[(machine_action, machine_prop, start),
(user_action, user_prop, start + 1),
(exe_action, None, start + 2),
])
add_data(self.db, "Condition",
[("Python" + ver, 0, "NOT TARGETDIR" + ver)])
start += 4
assert start < 500
def add_scripts(self):
if self.install_script:
start = 6800
for ver in self.versions + [self.other_version]:
install_action = "install_script." + ver
exe_prop = "PYTHON" + ver
add_data(self.db, "CustomAction",
[(install_action, 50, exe_prop, self.install_script_key)])
add_data(self.db, "InstallExecuteSequence",
[(install_action, "&Python%s=3" % ver, start)])
start += 1
# XXX pre-install scripts are currently refused in finalize_options()
# but if this feature is completed, it will also need to add
# entries for each version as the above code does
if self.pre_install_script:
scriptfn = os.path.join(self.bdist_dir, "preinstall.bat")
with open(scriptfn, "w") as f:
# The batch file will be executed with [PYTHON], so that %1
# is the path to the Python interpreter; %0 will be the path
# of the batch file.
# rem ="""
# %1 %0
# exit
# """
# <actual script>
f.write('rem ="""\n%1 %0\nexit\n"""\n')
with open(self.pre_install_script) as fin:
f.write(fin.read())
add_data(self.db, "Binary",
[("PreInstall", msilib.Binary(scriptfn))
])
add_data(self.db, "CustomAction",
[("PreInstall", 2, "PreInstall", None)
])
add_data(self.db, "InstallExecuteSequence",
[("PreInstall", "NOT Installed", 450)])
def add_ui(self):
db = self.db
x = y = 50
w = 370
h = 300
title = "[ProductName] Setup"
# see "Dialog Style Bits"
modal = 3 # visible | modal
modeless = 1 # visible
track_disk_space = 32
# UI customization properties
add_data(db, "Property",
# See "DefaultUIFont Property"
[("DefaultUIFont", "DlgFont8"),
# See "ErrorDialog Style Bit"
("ErrorDialog", "ErrorDlg"),
("Progress1", "Install"), # modified in maintenance type dlg
("Progress2", "installs"),
("MaintenanceForm_Action", "Repair"),
# possible values: ALL, JUSTME
("WhichUsers", "ALL")
])
# Fonts, see "TextStyle Table"
add_data(db, "TextStyle",
[("DlgFont8", "Tahoma", 9, None, 0),
("DlgFontBold8", "Tahoma", 8, None, 1), #bold
("VerdanaBold10", "Verdana", 10, None, 1),
("VerdanaRed9", "Verdana", 9, 255, 0),
])
# UI Sequences, see "InstallUISequence Table", "Using a Sequence Table"
# Numbers indicate sequence; see sequence.py for how these action integrate
add_data(db, "InstallUISequence",
[("PrepareDlg", "Not Privileged or Windows9x or Installed", 140),
("WhichUsersDlg", "Privileged and not Windows9x and not Installed", 141),
# In the user interface, assume all-users installation if privileged.
("SelectFeaturesDlg", "Not Installed", 1230),
# XXX no support for resume installations yet
#("ResumeDlg", "Installed AND (RESUME OR Preselected)", 1240),
("MaintenanceTypeDlg", "Installed AND NOT RESUME AND NOT Preselected", 1250),
("ProgressDlg", None, 1280)])
add_data(db, 'ActionText', text.ActionText)
add_data(db, 'UIText', text.UIText)
#####################################################################
# Standard dialogs: FatalError, UserExit, ExitDialog
fatal=PyDialog(db, "FatalError", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
fatal.title("[ProductName] Installer ended prematurely")
fatal.back("< Back", "Finish", active = 0)
fatal.cancel("Cancel", "Back", active = 0)
fatal.text("Description1", 15, 70, 320, 80, 0x30003,
"[ProductName] setup ended prematurely because of an error. Your system has not been modified. To install this program at a later time, please run the installation again.")
fatal.text("Description2", 15, 155, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c=fatal.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Exit")
user_exit=PyDialog(db, "UserExit", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
user_exit.title("[ProductName] Installer was interrupted")
user_exit.back("< Back", "Finish", active = 0)
user_exit.cancel("Cancel", "Back", active = 0)
user_exit.text("Description1", 15, 70, 320, 80, 0x30003,
"[ProductName] setup was interrupted. Your system has not been modified. "
"To install this program at a later time, please run the installation again.")
user_exit.text("Description2", 15, 155, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c = user_exit.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Exit")
exit_dialog = PyDialog(db, "ExitDialog", x, y, w, h, modal, title,
"Finish", "Finish", "Finish")
exit_dialog.title("Completing the [ProductName] Installer")
exit_dialog.back("< Back", "Finish", active = 0)
exit_dialog.cancel("Cancel", "Back", active = 0)
exit_dialog.text("Description", 15, 235, 320, 20, 0x30003,
"Click the Finish button to exit the Installer.")
c = exit_dialog.next("Finish", "Cancel", name="Finish")
c.event("EndDialog", "Return")
#####################################################################
# Required dialog: FilesInUse, ErrorDlg
inuse = PyDialog(db, "FilesInUse",
x, y, w, h,
19, # KeepModeless|Modal|Visible
title,
"Retry", "Retry", "Retry", bitmap=False)
inuse.text("Title", 15, 6, 200, 15, 0x30003,
r"{\DlgFontBold8}Files in Use")
inuse.text("Description", 20, 23, 280, 20, 0x30003,
"Some files that need to be updated are currently in use.")
inuse.text("Text", 20, 55, 330, 50, 3,
"The following applications are using files that need to be updated by this setup. Close these applications and then click Retry to continue the installation or Cancel to exit it.")
inuse.control("List", "ListBox", 20, 107, 330, 130, 7, "FileInUseProcess",
None, None, None)
c=inuse.back("Exit", "Ignore", name="Exit")
c.event("EndDialog", "Exit")
c=inuse.next("Ignore", "Retry", name="Ignore")
c.event("EndDialog", "Ignore")
c=inuse.cancel("Retry", "Exit", name="Retry")
c.event("EndDialog","Retry")
# See "Error Dialog". See "ICE20" for the required names of the controls.
error = Dialog(db, "ErrorDlg",
50, 10, 330, 101,
65543, # Error|Minimize|Modal|Visible
title,
"ErrorText", None, None)
error.text("ErrorText", 50,9,280,48,3, "")
#error.control("ErrorIcon", "Icon", 15, 9, 24, 24, 5242881, None, "py.ico", None, None)
error.pushbutton("N",120,72,81,21,3,"No",None).event("EndDialog","ErrorNo")
error.pushbutton("Y",240,72,81,21,3,"Yes",None).event("EndDialog","ErrorYes")
error.pushbutton("A",0,72,81,21,3,"Abort",None).event("EndDialog","ErrorAbort")
error.pushbutton("C",42,72,81,21,3,"Cancel",None).event("EndDialog","ErrorCancel")
error.pushbutton("I",81,72,81,21,3,"Ignore",None).event("EndDialog","ErrorIgnore")
error.pushbutton("O",159,72,81,21,3,"Ok",None).event("EndDialog","ErrorOk")
error.pushbutton("R",198,72,81,21,3,"Retry",None).event("EndDialog","ErrorRetry")
#####################################################################
# Global "Query Cancel" dialog
cancel = Dialog(db, "CancelDlg", 50, 10, 260, 85, 3, title,
"No", "No", "No")
cancel.text("Text", 48, 15, 194, 30, 3,
"Are you sure you want to cancel [ProductName] installation?")
#cancel.control("Icon", "Icon", 15, 15, 24, 24, 5242881, None,
# "py.ico", None, None)
c=cancel.pushbutton("Yes", 72, 57, 56, 17, 3, "Yes", "No")
c.event("EndDialog", "Exit")
c=cancel.pushbutton("No", 132, 57, 56, 17, 3, "No", "Yes")
c.event("EndDialog", "Return")
#####################################################################
# Global "Wait for costing" dialog
costing = Dialog(db, "WaitForCostingDlg", 50, 10, 260, 85, modal, title,
"Return", "Return", "Return")
costing.text("Text", 48, 15, 194, 30, 3,
"Please wait while the installer finishes determining your disk space requirements.")
c = costing.pushbutton("Return", 102, 57, 56, 17, 3, "Return", None)
c.event("EndDialog", "Exit")
#####################################################################
# Preparation dialog: no user input except cancellation
prep = PyDialog(db, "PrepareDlg", x, y, w, h, modeless, title,
"Cancel", "Cancel", "Cancel")
prep.text("Description", 15, 70, 320, 40, 0x30003,
"Please wait while the Installer prepares to guide you through the installation.")
prep.title("Welcome to the [ProductName] Installer")
c=prep.text("ActionText", 15, 110, 320, 20, 0x30003, "Pondering...")
c.mapping("ActionText", "Text")
c=prep.text("ActionData", 15, 135, 320, 30, 0x30003, None)
c.mapping("ActionData", "Text")
prep.back("Back", None, active=0)
prep.next("Next", None, active=0)
c=prep.cancel("Cancel", None)
c.event("SpawnDialog", "CancelDlg")
#####################################################################
# Feature (Python directory) selection
seldlg = PyDialog(db, "SelectFeaturesDlg", x, y, w, h, modal, title,
"Next", "Next", "Cancel")
seldlg.title("Select Python Installations")
seldlg.text("Hint", 15, 30, 300, 20, 3,
"Select the Python locations where %s should be installed."
% self.distribution.get_fullname())
seldlg.back("< Back", None, active=0)
c = seldlg.next("Next >", "Cancel")
order = 1
c.event("[TARGETDIR]", "[SourceDir]", ordering=order)
for version in self.versions + [self.other_version]:
order += 1
c.event("[TARGETDIR]", "[TARGETDIR%s]" % version,
"FEATURE_SELECTED AND &Python%s=3" % version,
ordering=order)
c.event("SpawnWaitDialog", "WaitForCostingDlg", ordering=order + 1)
c.event("EndDialog", "Return", ordering=order + 2)
c = seldlg.cancel("Cancel", "Features")
c.event("SpawnDialog", "CancelDlg")
c = seldlg.control("Features", "SelectionTree", 15, 60, 300, 120, 3,
"FEATURE", None, "PathEdit", None)
c.event("[FEATURE_SELECTED]", "1")
ver = self.other_version
install_other_cond = "FEATURE_SELECTED AND &Python%s=3" % ver
dont_install_other_cond = "FEATURE_SELECTED AND &Python%s<>3" % ver
c = seldlg.text("Other", 15, 200, 300, 15, 3,
"Provide an alternate Python location")
c.condition("Enable", install_other_cond)
c.condition("Show", install_other_cond)
c.condition("Disable", dont_install_other_cond)
c.condition("Hide", dont_install_other_cond)
c = seldlg.control("PathEdit", "PathEdit", 15, 215, 300, 16, 1,
"TARGETDIR" + ver, None, "Next", None)
c.condition("Enable", install_other_cond)
c.condition("Show", install_other_cond)
c.condition("Disable", dont_install_other_cond)
c.condition("Hide", dont_install_other_cond)
#####################################################################
# Disk cost
cost = PyDialog(db, "DiskCostDlg", x, y, w, h, modal, title,
"OK", "OK", "OK", bitmap=False)
cost.text("Title", 15, 6, 200, 15, 0x30003,
r"{\DlgFontBold8}Disk Space Requirements")
cost.text("Description", 20, 20, 280, 20, 0x30003,
"The disk space required for the installation of the selected features.")
cost.text("Text", 20, 53, 330, 60, 3,
"The highlighted volumes (if any) do not have enough disk space "
"available for the currently selected features. You can either "
"remove some files from the highlighted volumes, or choose to "
"install less features onto local drive(s), or select different "
"destination drive(s).")
cost.control("VolumeList", "VolumeCostList", 20, 100, 330, 150, 393223,
None, "{120}{70}{70}{70}{70}", None, None)
cost.xbutton("OK", "Ok", None, 0.5).event("EndDialog", "Return")
#####################################################################
# WhichUsers Dialog. Only available on NT, and for privileged users.
# This must be run before FindRelatedProducts, because that will
# take into account whether the previous installation was per-user
# or per-machine. We currently don't support going back to this
# dialog after "Next" was selected; to support this, we would need to
# find how to reset the ALLUSERS property, and how to re-run
# FindRelatedProducts.
# On Windows9x, the ALLUSERS property is ignored on the command line
# and in the Property table, but installer fails according to the documentation
# if a dialog attempts to set ALLUSERS.
whichusers = PyDialog(db, "WhichUsersDlg", x, y, w, h, modal, title,
"AdminInstall", "Next", "Cancel")
whichusers.title("Select whether to install [ProductName] for all users of this computer.")
# A radio group with two options: allusers, justme
g = whichusers.radiogroup("AdminInstall", 15, 60, 260, 50, 3,
"WhichUsers", "", "Next")
g.add("ALL", 0, 5, 150, 20, "Install for all users")
g.add("JUSTME", 0, 25, 150, 20, "Install just for me")
whichusers.back("Back", None, active=0)
c = whichusers.next("Next >", "Cancel")
c.event("[ALLUSERS]", "1", 'WhichUsers="ALL"', 1)
c.event("EndDialog", "Return", ordering = 2)
c = whichusers.cancel("Cancel", "AdminInstall")
c.event("SpawnDialog", "CancelDlg")
#####################################################################
# Installation Progress dialog (modeless)
progress = PyDialog(db, "ProgressDlg", x, y, w, h, modeless, title,
"Cancel", "Cancel", "Cancel", bitmap=False)
progress.text("Title", 20, 15, 200, 15, 0x30003,
r"{\DlgFontBold8}[Progress1] [ProductName]")
progress.text("Text", 35, 65, 300, 30, 3,
"Please wait while the Installer [Progress2] [ProductName]. "
"This may take several minutes.")
progress.text("StatusLabel", 35, 100, 35, 20, 3, "Status:")
c=progress.text("ActionText", 70, 100, w-70, 20, 3, "Pondering...")
c.mapping("ActionText", "Text")
#c=progress.text("ActionData", 35, 140, 300, 20, 3, None)
#c.mapping("ActionData", "Text")
c=progress.control("ProgressBar", "ProgressBar", 35, 120, 300, 10, 65537,
None, "Progress done", None, None)
c.mapping("SetProgress", "Progress")
progress.back("< Back", "Next", active=False)
progress.next("Next >", "Cancel", active=False)
progress.cancel("Cancel", "Back").event("SpawnDialog", "CancelDlg")
###################################################################
# Maintenance type: repair/uninstall
maint = PyDialog(db, "MaintenanceTypeDlg", x, y, w, h, modal, title,
"Next", "Next", "Cancel")
maint.title("Welcome to the [ProductName] Setup Wizard")
maint.text("BodyText", 15, 63, 330, 42, 3,
"Select whether you want to repair or remove [ProductName].")
g=maint.radiogroup("RepairRadioGroup", 15, 108, 330, 60, 3,
"MaintenanceForm_Action", "", "Next")
#g.add("Change", 0, 0, 200, 17, "&Change [ProductName]")
g.add("Repair", 0, 18, 200, 17, "&Repair [ProductName]")
g.add("Remove", 0, 36, 200, 17, "Re&move [ProductName]")
maint.back("< Back", None, active=False)
c=maint.next("Finish", "Cancel")
# Change installation: Change progress dialog to "Change", then ask
# for feature selection
#c.event("[Progress1]", "Change", 'MaintenanceForm_Action="Change"', 1)
#c.event("[Progress2]", "changes", 'MaintenanceForm_Action="Change"', 2)
# Reinstall: Change progress dialog to "Repair", then invoke reinstall
# Also set list of reinstalled features to "ALL"
c.event("[REINSTALL]", "ALL", 'MaintenanceForm_Action="Repair"', 5)
c.event("[Progress1]", "Repairing", 'MaintenanceForm_Action="Repair"', 6)
c.event("[Progress2]", "repairs", 'MaintenanceForm_Action="Repair"', 7)
c.event("Reinstall", "ALL", 'MaintenanceForm_Action="Repair"', 8)
# Uninstall: Change progress to "Remove", then invoke uninstall
# Also set list of removed features to "ALL"
c.event("[REMOVE]", "ALL", 'MaintenanceForm_Action="Remove"', 11)
c.event("[Progress1]", "Removing", 'MaintenanceForm_Action="Remove"', 12)
c.event("[Progress2]", "removes", 'MaintenanceForm_Action="Remove"', 13)
c.event("Remove", "ALL", 'MaintenanceForm_Action="Remove"', 14)
# Close dialog when maintenance action scheduled
c.event("EndDialog", "Return", 'MaintenanceForm_Action<>"Change"', 20)
#c.event("NewDialog", "SelectFeaturesDlg", 'MaintenanceForm_Action="Change"', 21)
maint.cancel("Cancel", "RepairRadioGroup").event("SpawnDialog", "CancelDlg")
def get_installer_filename(self, fullname):
# Factored out to allow overriding in subclasses
if self.target_version:
base_name = "%s.%s-py%s.msi" % (fullname, self.plat_name,
self.target_version)
else:
base_name = "%s.%s.msi" % (fullname, self.plat_name)
installer_name = os.path.join(self.dist_dir, base_name)
return installer_name
| Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/command/bdist_msi.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/command/bdist_msi.py",
"repo_id": "Django-locallibrary",
"token_count": 16611
} | 19 |
"""distutils.command.install_scripts
Implements the Distutils 'install_scripts' command, for installing
Python scripts."""
# contributed by Bastian Kleineidam
import os
from distutils.core import Command
from distutils import log
from stat import ST_MODE
class install_scripts(Command):
description = "install scripts (Python or otherwise)"
user_options = [
('install-dir=', 'd', "directory to install scripts to"),
('build-dir=','b', "build directory (where to install from)"),
('force', 'f', "force installation (overwrite existing files)"),
('skip-build', None, "skip the build steps"),
]
boolean_options = ['force', 'skip-build']
def initialize_options(self):
self.install_dir = None
self.force = 0
self.build_dir = None
self.skip_build = None
def finalize_options(self):
self.set_undefined_options('build', ('build_scripts', 'build_dir'))
self.set_undefined_options('install',
('install_scripts', 'install_dir'),
('force', 'force'),
('skip_build', 'skip_build'),
)
def run(self):
if not self.skip_build:
self.run_command('build_scripts')
self.outfiles = self.copy_tree(self.build_dir, self.install_dir)
if os.name == 'posix':
# Set the executable bits (owner, group, and world) on
# all the scripts we just installed.
for file in self.get_outputs():
if self.dry_run:
log.info("changing mode of %s", file)
else:
mode = ((os.stat(file)[ST_MODE]) | 0o555) & 0o7777
log.info("changing mode of %s to %o", file, mode)
os.chmod(file, mode)
def get_inputs(self):
return self.distribution.scripts or []
def get_outputs(self):
return self.outfiles or []
| Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/command/install_scripts.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/command/install_scripts.py",
"repo_id": "Django-locallibrary",
"token_count": 913
} | 20 |
"""A simple log mechanism styled after PEP 282."""
# The class here is styled after PEP 282 so that it could later be
# replaced with a standard Python logging implementation.
DEBUG = 1
INFO = 2
WARN = 3
ERROR = 4
FATAL = 5
import sys
class Log:
def __init__(self, threshold=WARN):
self.threshold = threshold
def _log(self, level, msg, args):
if level not in (DEBUG, INFO, WARN, ERROR, FATAL):
raise ValueError('%s wrong log level' % str(level))
if level >= self.threshold:
if args:
msg = msg % args
if level in (WARN, ERROR, FATAL):
stream = sys.stderr
else:
stream = sys.stdout
try:
stream.write('%s\n' % msg)
except UnicodeEncodeError:
# emulate backslashreplace error handler
encoding = stream.encoding
msg = msg.encode(encoding, "backslashreplace").decode(encoding)
stream.write('%s\n' % msg)
stream.flush()
def log(self, level, msg, *args):
self._log(level, msg, args)
def debug(self, msg, *args):
self._log(DEBUG, msg, args)
def info(self, msg, *args):
self._log(INFO, msg, args)
def warn(self, msg, *args):
self._log(WARN, msg, args)
def error(self, msg, *args):
self._log(ERROR, msg, args)
def fatal(self, msg, *args):
self._log(FATAL, msg, args)
_global_log = Log()
log = _global_log.log
debug = _global_log.debug
info = _global_log.info
warn = _global_log.warn
error = _global_log.error
fatal = _global_log.fatal
def set_threshold(level):
# return the old threshold for use from tests
old = _global_log.threshold
_global_log.threshold = level
return old
def set_verbosity(v):
if v <= 0:
set_threshold(WARN)
elif v == 1:
set_threshold(INFO)
elif v >= 2:
set_threshold(DEBUG)
| Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/log.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/_distutils/log.py",
"repo_id": "Django-locallibrary",
"token_count": 891
} | 21 |
"""
An OrderedSet is a custom MutableSet that remembers its order, so that every
entry has an index that can be looked up.
Based on a recipe originally posted to ActiveState Recipes by Raymond Hettiger,
and released under the MIT license.
"""
import itertools as it
from collections import deque
try:
# Python 3
from collections.abc import MutableSet, Sequence
except ImportError:
# Python 2.7
from collections import MutableSet, Sequence
SLICE_ALL = slice(None)
__version__ = "3.1"
def is_iterable(obj):
"""
Are we being asked to look up a list of things, instead of a single thing?
We check for the `__iter__` attribute so that this can cover types that
don't have to be known by this module, such as NumPy arrays.
Strings, however, should be considered as atomic values to look up, not
iterables. The same goes for tuples, since they are immutable and therefore
valid entries.
We don't need to check for the Python 2 `unicode` type, because it doesn't
have an `__iter__` attribute anyway.
"""
return (
hasattr(obj, "__iter__")
and not isinstance(obj, str)
and not isinstance(obj, tuple)
)
class OrderedSet(MutableSet, Sequence):
"""
An OrderedSet is a custom MutableSet that remembers its order, so that
every entry has an index that can be looked up.
Example:
>>> OrderedSet([1, 1, 2, 3, 2])
OrderedSet([1, 2, 3])
"""
def __init__(self, iterable=None):
self.items = []
self.map = {}
if iterable is not None:
self |= iterable
def __len__(self):
"""
Returns the number of unique elements in the ordered set
Example:
>>> len(OrderedSet([]))
0
>>> len(OrderedSet([1, 2]))
2
"""
return len(self.items)
def __getitem__(self, index):
"""
Get the item at a given index.
If `index` is a slice, you will get back that slice of items, as a
new OrderedSet.
If `index` is a list or a similar iterable, you'll get a list of
items corresponding to those indices. This is similar to NumPy's
"fancy indexing". The result is not an OrderedSet because you may ask
for duplicate indices, and the number of elements returned should be
the number of elements asked for.
Example:
>>> oset = OrderedSet([1, 2, 3])
>>> oset[1]
2
"""
if isinstance(index, slice) and index == SLICE_ALL:
return self.copy()
elif is_iterable(index):
return [self.items[i] for i in index]
elif hasattr(index, "__index__") or isinstance(index, slice):
result = self.items[index]
if isinstance(result, list):
return self.__class__(result)
else:
return result
else:
raise TypeError("Don't know how to index an OrderedSet by %r" % index)
def copy(self):
"""
Return a shallow copy of this object.
Example:
>>> this = OrderedSet([1, 2, 3])
>>> other = this.copy()
>>> this == other
True
>>> this is other
False
"""
return self.__class__(self)
def __getstate__(self):
if len(self) == 0:
# The state can't be an empty list.
# We need to return a truthy value, or else __setstate__ won't be run.
#
# This could have been done more gracefully by always putting the state
# in a tuple, but this way is backwards- and forwards- compatible with
# previous versions of OrderedSet.
return (None,)
else:
return list(self)
def __setstate__(self, state):
if state == (None,):
self.__init__([])
else:
self.__init__(state)
def __contains__(self, key):
"""
Test if the item is in this ordered set
Example:
>>> 1 in OrderedSet([1, 3, 2])
True
>>> 5 in OrderedSet([1, 3, 2])
False
"""
return key in self.map
def add(self, key):
"""
Add `key` as an item to this OrderedSet, then return its index.
If `key` is already in the OrderedSet, return the index it already
had.
Example:
>>> oset = OrderedSet()
>>> oset.append(3)
0
>>> print(oset)
OrderedSet([3])
"""
if key not in self.map:
self.map[key] = len(self.items)
self.items.append(key)
return self.map[key]
append = add
def update(self, sequence):
"""
Update the set with the given iterable sequence, then return the index
of the last element inserted.
Example:
>>> oset = OrderedSet([1, 2, 3])
>>> oset.update([3, 1, 5, 1, 4])
4
>>> print(oset)
OrderedSet([1, 2, 3, 5, 4])
"""
item_index = None
try:
for item in sequence:
item_index = self.add(item)
except TypeError:
raise ValueError(
"Argument needs to be an iterable, got %s" % type(sequence)
)
return item_index
def index(self, key):
"""
Get the index of a given entry, raising an IndexError if it's not
present.
`key` can be an iterable of entries that is not a string, in which case
this returns a list of indices.
Example:
>>> oset = OrderedSet([1, 2, 3])
>>> oset.index(2)
1
"""
if is_iterable(key):
return [self.index(subkey) for subkey in key]
return self.map[key]
# Provide some compatibility with pd.Index
get_loc = index
get_indexer = index
def pop(self):
"""
Remove and return the last element from the set.
Raises KeyError if the set is empty.
Example:
>>> oset = OrderedSet([1, 2, 3])
>>> oset.pop()
3
"""
if not self.items:
raise KeyError("Set is empty")
elem = self.items[-1]
del self.items[-1]
del self.map[elem]
return elem
def discard(self, key):
"""
Remove an element. Do not raise an exception if absent.
The MutableSet mixin uses this to implement the .remove() method, which
*does* raise an error when asked to remove a non-existent item.
Example:
>>> oset = OrderedSet([1, 2, 3])
>>> oset.discard(2)
>>> print(oset)
OrderedSet([1, 3])
>>> oset.discard(2)
>>> print(oset)
OrderedSet([1, 3])
"""
if key in self:
i = self.map[key]
del self.items[i]
del self.map[key]
for k, v in self.map.items():
if v >= i:
self.map[k] = v - 1
def clear(self):
"""
Remove all items from this OrderedSet.
"""
del self.items[:]
self.map.clear()
def __iter__(self):
"""
Example:
>>> list(iter(OrderedSet([1, 2, 3])))
[1, 2, 3]
"""
return iter(self.items)
def __reversed__(self):
"""
Example:
>>> list(reversed(OrderedSet([1, 2, 3])))
[3, 2, 1]
"""
return reversed(self.items)
def __repr__(self):
if not self:
return "%s()" % (self.__class__.__name__,)
return "%s(%r)" % (self.__class__.__name__, list(self))
def __eq__(self, other):
"""
Returns true if the containers have the same items. If `other` is a
Sequence, then order is checked, otherwise it is ignored.
Example:
>>> oset = OrderedSet([1, 3, 2])
>>> oset == [1, 3, 2]
True
>>> oset == [1, 2, 3]
False
>>> oset == [2, 3]
False
>>> oset == OrderedSet([3, 2, 1])
False
"""
# In Python 2 deque is not a Sequence, so treat it as one for
# consistent behavior with Python 3.
if isinstance(other, (Sequence, deque)):
# Check that this OrderedSet contains the same elements, in the
# same order, as the other object.
return list(self) == list(other)
try:
other_as_set = set(other)
except TypeError:
# If `other` can't be converted into a set, it's not equal.
return False
else:
return set(self) == other_as_set
def union(self, *sets):
"""
Combines all unique items.
Each items order is defined by its first appearance.
Example:
>>> oset = OrderedSet.union(OrderedSet([3, 1, 4, 1, 5]), [1, 3], [2, 0])
>>> print(oset)
OrderedSet([3, 1, 4, 5, 2, 0])
>>> oset.union([8, 9])
OrderedSet([3, 1, 4, 5, 2, 0, 8, 9])
>>> oset | {10}
OrderedSet([3, 1, 4, 5, 2, 0, 10])
"""
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
containers = map(list, it.chain([self], sets))
items = it.chain.from_iterable(containers)
return cls(items)
def __and__(self, other):
# the parent implementation of this is backwards
return self.intersection(other)
def intersection(self, *sets):
"""
Returns elements in common between all sets. Order is defined only
by the first set.
Example:
>>> oset = OrderedSet.intersection(OrderedSet([0, 1, 2, 3]), [1, 2, 3])
>>> print(oset)
OrderedSet([1, 2, 3])
>>> oset.intersection([2, 4, 5], [1, 2, 3, 4])
OrderedSet([2])
>>> oset.intersection()
OrderedSet([1, 2, 3])
"""
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
if sets:
common = set.intersection(*map(set, sets))
items = (item for item in self if item in common)
else:
items = self
return cls(items)
def difference(self, *sets):
"""
Returns all elements that are in this set but not the others.
Example:
>>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]))
OrderedSet([1, 3])
>>> OrderedSet([1, 2, 3]).difference(OrderedSet([2]), OrderedSet([3]))
OrderedSet([1])
>>> OrderedSet([1, 2, 3]) - OrderedSet([2])
OrderedSet([1, 3])
>>> OrderedSet([1, 2, 3]).difference()
OrderedSet([1, 2, 3])
"""
cls = self.__class__
if sets:
other = set.union(*map(set, sets))
items = (item for item in self if item not in other)
else:
items = self
return cls(items)
def issubset(self, other):
"""
Report whether another set contains this set.
Example:
>>> OrderedSet([1, 2, 3]).issubset({1, 2})
False
>>> OrderedSet([1, 2, 3]).issubset({1, 2, 3, 4})
True
>>> OrderedSet([1, 2, 3]).issubset({1, 4, 3, 5})
False
"""
if len(self) > len(other): # Fast check for obvious cases
return False
return all(item in other for item in self)
def issuperset(self, other):
"""
Report whether this set contains another set.
Example:
>>> OrderedSet([1, 2]).issuperset([1, 2, 3])
False
>>> OrderedSet([1, 2, 3, 4]).issuperset({1, 2, 3})
True
>>> OrderedSet([1, 4, 3, 5]).issuperset({1, 2, 3})
False
"""
if len(self) < len(other): # Fast check for obvious cases
return False
return all(item in self for item in other)
def symmetric_difference(self, other):
"""
Return the symmetric difference of two OrderedSets as a new set.
That is, the new set will contain all elements that are in exactly
one of the sets.
Their order will be preserved, with elements from `self` preceding
elements from `other`.
Example:
>>> this = OrderedSet([1, 4, 3, 5, 7])
>>> other = OrderedSet([9, 7, 1, 3, 2])
>>> this.symmetric_difference(other)
OrderedSet([4, 5, 9, 2])
"""
cls = self.__class__ if isinstance(self, OrderedSet) else OrderedSet
diff1 = cls(self).difference(other)
diff2 = cls(other).difference(self)
return diff1.union(diff2)
def _update_items(self, items):
"""
Replace the 'items' list of this OrderedSet with a new one, updating
self.map accordingly.
"""
self.items = items
self.map = {item: idx for (idx, item) in enumerate(items)}
def difference_update(self, *sets):
"""
Update this OrderedSet to remove items from one or more other sets.
Example:
>>> this = OrderedSet([1, 2, 3])
>>> this.difference_update(OrderedSet([2, 4]))
>>> print(this)
OrderedSet([1, 3])
>>> this = OrderedSet([1, 2, 3, 4, 5])
>>> this.difference_update(OrderedSet([2, 4]), OrderedSet([1, 4, 6]))
>>> print(this)
OrderedSet([3, 5])
"""
items_to_remove = set()
for other in sets:
items_to_remove |= set(other)
self._update_items([item for item in self.items if item not in items_to_remove])
def intersection_update(self, other):
"""
Update this OrderedSet to keep only items in another set, preserving
their order in this set.
Example:
>>> this = OrderedSet([1, 4, 3, 5, 7])
>>> other = OrderedSet([9, 7, 1, 3, 2])
>>> this.intersection_update(other)
>>> print(this)
OrderedSet([1, 3, 7])
"""
other = set(other)
self._update_items([item for item in self.items if item in other])
def symmetric_difference_update(self, other):
"""
Update this OrderedSet to remove items from another set, then
add items from the other set that were not present in this set.
Example:
>>> this = OrderedSet([1, 4, 3, 5, 7])
>>> other = OrderedSet([9, 7, 1, 3, 2])
>>> this.symmetric_difference_update(other)
>>> print(this)
OrderedSet([4, 5, 9, 2])
"""
items_to_add = [item for item in other if item not in self]
items_to_remove = set(other)
self._update_items(
[item for item in self.items if item not in items_to_remove] + items_to_add
)
| Django-locallibrary/env/Lib/site-packages/setuptools/_vendor/ordered_set.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/_vendor/ordered_set.py",
"repo_id": "Django-locallibrary",
"token_count": 7212
} | 22 |
"""setuptools.command.egg_info
Create a distribution's .egg-info directory and contents"""
from distutils.filelist import FileList as _FileList
from distutils.errors import DistutilsInternalError
from distutils.util import convert_path
from distutils import log
import distutils.errors
import distutils.filelist
import os
import re
import sys
import io
import warnings
import time
import collections
from setuptools.extern import six
from setuptools.extern.six.moves import map
from setuptools import Command
from setuptools.command.sdist import sdist
from setuptools.command.sdist import walk_revctrl
from setuptools.command.setopt import edit_config
from setuptools.command import bdist_egg
from pkg_resources import (
parse_requirements, safe_name, parse_version,
safe_version, yield_lines, EntryPoint, iter_entry_points, to_filename)
import setuptools.unicode_utils as unicode_utils
from setuptools.glob import glob
from setuptools.extern import packaging
from setuptools import SetuptoolsDeprecationWarning
def translate_pattern(glob):
"""
Translate a file path glob like '*.txt' in to a regular expression.
This differs from fnmatch.translate which allows wildcards to match
directory separators. It also knows about '**/' which matches any number of
directories.
"""
pat = ''
# This will split on '/' within [character classes]. This is deliberate.
chunks = glob.split(os.path.sep)
sep = re.escape(os.sep)
valid_char = '[^%s]' % (sep,)
for c, chunk in enumerate(chunks):
last_chunk = c == len(chunks) - 1
# Chunks that are a literal ** are globstars. They match anything.
if chunk == '**':
if last_chunk:
# Match anything if this is the last component
pat += '.*'
else:
# Match '(name/)*'
pat += '(?:%s+%s)*' % (valid_char, sep)
continue # Break here as the whole path component has been handled
# Find any special characters in the remainder
i = 0
chunk_len = len(chunk)
while i < chunk_len:
char = chunk[i]
if char == '*':
# Match any number of name characters
pat += valid_char + '*'
elif char == '?':
# Match a name character
pat += valid_char
elif char == '[':
# Character class
inner_i = i + 1
# Skip initial !/] chars
if inner_i < chunk_len and chunk[inner_i] == '!':
inner_i = inner_i + 1
if inner_i < chunk_len and chunk[inner_i] == ']':
inner_i = inner_i + 1
# Loop till the closing ] is found
while inner_i < chunk_len and chunk[inner_i] != ']':
inner_i = inner_i + 1
if inner_i >= chunk_len:
# Got to the end of the string without finding a closing ]
# Do not treat this as a matching group, but as a literal [
pat += re.escape(char)
else:
# Grab the insides of the [brackets]
inner = chunk[i + 1:inner_i]
char_class = ''
# Class negation
if inner[0] == '!':
char_class = '^'
inner = inner[1:]
char_class += re.escape(inner)
pat += '[%s]' % (char_class,)
# Skip to the end ]
i = inner_i
else:
pat += re.escape(char)
i += 1
# Join each chunk with the dir separator
if not last_chunk:
pat += sep
pat += r'\Z'
return re.compile(pat, flags=re.MULTILINE | re.DOTALL)
class InfoCommon:
tag_build = None
tag_date = None
@property
def name(self):
return safe_name(self.distribution.get_name())
def tagged_version(self):
version = self.distribution.get_version()
# egg_info may be called more than once for a distribution,
# in which case the version string already contains all tags.
if self.vtags and version.endswith(self.vtags):
return safe_version(version)
return safe_version(version + self.vtags)
def tags(self):
version = ''
if self.tag_build:
version += self.tag_build
if self.tag_date:
version += time.strftime("-%Y%m%d")
return version
vtags = property(tags)
class egg_info(InfoCommon, Command):
description = "create a distribution's .egg-info directory"
user_options = [
('egg-base=', 'e', "directory containing .egg-info directories"
" (default: top of the source tree)"),
('tag-date', 'd', "Add date stamp (e.g. 20050528) to version number"),
('tag-build=', 'b', "Specify explicit tag to add to version number"),
('no-date', 'D', "Don't include date stamp [default]"),
]
boolean_options = ['tag-date']
negative_opt = {
'no-date': 'tag-date',
}
def initialize_options(self):
self.egg_base = None
self.egg_name = None
self.egg_info = None
self.egg_version = None
self.broken_egg_info = False
####################################
# allow the 'tag_svn_revision' to be detected and
# set, supporting sdists built on older Setuptools.
@property
def tag_svn_revision(self):
pass
@tag_svn_revision.setter
def tag_svn_revision(self, value):
pass
####################################
def save_version_info(self, filename):
"""
Materialize the value of date into the
build tag. Install build keys in a deterministic order
to avoid arbitrary reordering on subsequent builds.
"""
egg_info = collections.OrderedDict()
# follow the order these keys would have been added
# when PYTHONHASHSEED=0
egg_info['tag_build'] = self.tags()
egg_info['tag_date'] = 0
edit_config(filename, dict(egg_info=egg_info))
def finalize_options(self):
# Note: we need to capture the current value returned
# by `self.tagged_version()`, so we can later update
# `self.distribution.metadata.version` without
# repercussions.
self.egg_name = self.name
self.egg_version = self.tagged_version()
parsed_version = parse_version(self.egg_version)
try:
is_version = isinstance(parsed_version, packaging.version.Version)
spec = (
"%s==%s" if is_version else "%s===%s"
)
list(
parse_requirements(spec % (self.egg_name, self.egg_version))
)
except ValueError as e:
raise distutils.errors.DistutilsOptionError(
"Invalid distribution name or version syntax: %s-%s" %
(self.egg_name, self.egg_version)
) from e
if self.egg_base is None:
dirs = self.distribution.package_dir
self.egg_base = (dirs or {}).get('', os.curdir)
self.ensure_dirname('egg_base')
self.egg_info = to_filename(self.egg_name) + '.egg-info'
if self.egg_base != os.curdir:
self.egg_info = os.path.join(self.egg_base, self.egg_info)
if '-' in self.egg_name:
self.check_broken_egg_info()
# Set package version for the benefit of dumber commands
# (e.g. sdist, bdist_wininst, etc.)
#
self.distribution.metadata.version = self.egg_version
# If we bootstrapped around the lack of a PKG-INFO, as might be the
# case in a fresh checkout, make sure that any special tags get added
# to the version info
#
pd = self.distribution._patched_dist
if pd is not None and pd.key == self.egg_name.lower():
pd._version = self.egg_version
pd._parsed_version = parse_version(self.egg_version)
self.distribution._patched_dist = None
def write_or_delete_file(self, what, filename, data, force=False):
"""Write `data` to `filename` or delete if empty
If `data` is non-empty, this routine is the same as ``write_file()``.
If `data` is empty but not ``None``, this is the same as calling
``delete_file(filename)`. If `data` is ``None``, then this is a no-op
unless `filename` exists, in which case a warning is issued about the
orphaned file (if `force` is false), or deleted (if `force` is true).
"""
if data:
self.write_file(what, filename, data)
elif os.path.exists(filename):
if data is None and not force:
log.warn(
"%s not set in setup(), but %s exists", what, filename
)
return
else:
self.delete_file(filename)
def write_file(self, what, filename, data):
"""Write `data` to `filename` (if not a dry run) after announcing it
`what` is used in a log message to identify what is being written
to the file.
"""
log.info("writing %s to %s", what, filename)
if not six.PY2:
data = data.encode("utf-8")
if not self.dry_run:
f = open(filename, 'wb')
f.write(data)
f.close()
def delete_file(self, filename):
"""Delete `filename` (if not a dry run) after announcing it"""
log.info("deleting %s", filename)
if not self.dry_run:
os.unlink(filename)
def run(self):
self.mkpath(self.egg_info)
os.utime(self.egg_info, None)
installer = self.distribution.fetch_build_egg
for ep in iter_entry_points('egg_info.writers'):
ep.require(installer=installer)
writer = ep.resolve()
writer(self, ep.name, os.path.join(self.egg_info, ep.name))
# Get rid of native_libs.txt if it was put there by older bdist_egg
nl = os.path.join(self.egg_info, "native_libs.txt")
if os.path.exists(nl):
self.delete_file(nl)
self.find_sources()
def find_sources(self):
"""Generate SOURCES.txt manifest file"""
manifest_filename = os.path.join(self.egg_info, "SOURCES.txt")
mm = manifest_maker(self.distribution)
mm.manifest = manifest_filename
mm.run()
self.filelist = mm.filelist
def check_broken_egg_info(self):
bei = self.egg_name + '.egg-info'
if self.egg_base != os.curdir:
bei = os.path.join(self.egg_base, bei)
if os.path.exists(bei):
log.warn(
"-" * 78 + '\n'
"Note: Your current .egg-info directory has a '-' in its name;"
'\nthis will not work correctly with "setup.py develop".\n\n'
'Please rename %s to %s to correct this problem.\n' + '-' * 78,
bei, self.egg_info
)
self.broken_egg_info = self.egg_info
self.egg_info = bei # make it work for now
class FileList(_FileList):
# Implementations of the various MANIFEST.in commands
def process_template_line(self, line):
# Parse the line: split it up, make sure the right number of words
# is there, and return the relevant words. 'action' is always
# defined: it's the first word of the line. Which of the other
# three are defined depends on the action; it'll be either
# patterns, (dir and patterns), or (dir_pattern).
(action, patterns, dir, dir_pattern) = self._parse_template_line(line)
# OK, now we know that the action is valid and we have the
# right number of words on the line for that action -- so we
# can proceed with minimal error-checking.
if action == 'include':
self.debug_print("include " + ' '.join(patterns))
for pattern in patterns:
if not self.include(pattern):
log.warn("warning: no files found matching '%s'", pattern)
elif action == 'exclude':
self.debug_print("exclude " + ' '.join(patterns))
for pattern in patterns:
if not self.exclude(pattern):
log.warn(("warning: no previously-included files "
"found matching '%s'"), pattern)
elif action == 'global-include':
self.debug_print("global-include " + ' '.join(patterns))
for pattern in patterns:
if not self.global_include(pattern):
log.warn(("warning: no files found matching '%s' "
"anywhere in distribution"), pattern)
elif action == 'global-exclude':
self.debug_print("global-exclude " + ' '.join(patterns))
for pattern in patterns:
if not self.global_exclude(pattern):
log.warn(("warning: no previously-included files matching "
"'%s' found anywhere in distribution"),
pattern)
elif action == 'recursive-include':
self.debug_print("recursive-include %s %s" %
(dir, ' '.join(patterns)))
for pattern in patterns:
if not self.recursive_include(dir, pattern):
log.warn(("warning: no files found matching '%s' "
"under directory '%s'"),
pattern, dir)
elif action == 'recursive-exclude':
self.debug_print("recursive-exclude %s %s" %
(dir, ' '.join(patterns)))
for pattern in patterns:
if not self.recursive_exclude(dir, pattern):
log.warn(("warning: no previously-included files matching "
"'%s' found under directory '%s'"),
pattern, dir)
elif action == 'graft':
self.debug_print("graft " + dir_pattern)
if not self.graft(dir_pattern):
log.warn("warning: no directories found matching '%s'",
dir_pattern)
elif action == 'prune':
self.debug_print("prune " + dir_pattern)
if not self.prune(dir_pattern):
log.warn(("no previously-included directories found "
"matching '%s'"), dir_pattern)
else:
raise DistutilsInternalError(
"this cannot happen: invalid action '%s'" % action)
def _remove_files(self, predicate):
"""
Remove all files from the file list that match the predicate.
Return True if any matching files were removed
"""
found = False
for i in range(len(self.files) - 1, -1, -1):
if predicate(self.files[i]):
self.debug_print(" removing " + self.files[i])
del self.files[i]
found = True
return found
def include(self, pattern):
"""Include files that match 'pattern'."""
found = [f for f in glob(pattern) if not os.path.isdir(f)]
self.extend(found)
return bool(found)
def exclude(self, pattern):
"""Exclude files that match 'pattern'."""
match = translate_pattern(pattern)
return self._remove_files(match.match)
def recursive_include(self, dir, pattern):
"""
Include all files anywhere in 'dir/' that match the pattern.
"""
full_pattern = os.path.join(dir, '**', pattern)
found = [f for f in glob(full_pattern, recursive=True)
if not os.path.isdir(f)]
self.extend(found)
return bool(found)
def recursive_exclude(self, dir, pattern):
"""
Exclude any file anywhere in 'dir/' that match the pattern.
"""
match = translate_pattern(os.path.join(dir, '**', pattern))
return self._remove_files(match.match)
def graft(self, dir):
"""Include all files from 'dir/'."""
found = [
item
for match_dir in glob(dir)
for item in distutils.filelist.findall(match_dir)
]
self.extend(found)
return bool(found)
def prune(self, dir):
"""Filter out files from 'dir/'."""
match = translate_pattern(os.path.join(dir, '**'))
return self._remove_files(match.match)
def global_include(self, pattern):
"""
Include all files anywhere in the current directory that match the
pattern. This is very inefficient on large file trees.
"""
if self.allfiles is None:
self.findall()
match = translate_pattern(os.path.join('**', pattern))
found = [f for f in self.allfiles if match.match(f)]
self.extend(found)
return bool(found)
def global_exclude(self, pattern):
"""
Exclude all files anywhere that match the pattern.
"""
match = translate_pattern(os.path.join('**', pattern))
return self._remove_files(match.match)
def append(self, item):
if item.endswith('\r'): # Fix older sdists built on Windows
item = item[:-1]
path = convert_path(item)
if self._safe_path(path):
self.files.append(path)
def extend(self, paths):
self.files.extend(filter(self._safe_path, paths))
def _repair(self):
"""
Replace self.files with only safe paths
Because some owners of FileList manipulate the underlying
``files`` attribute directly, this method must be called to
repair those paths.
"""
self.files = list(filter(self._safe_path, self.files))
def _safe_path(self, path):
enc_warn = "'%s' not %s encodable -- skipping"
# To avoid accidental trans-codings errors, first to unicode
u_path = unicode_utils.filesys_decode(path)
if u_path is None:
log.warn("'%s' in unexpected encoding -- skipping" % path)
return False
# Must ensure utf-8 encodability
utf8_path = unicode_utils.try_encode(u_path, "utf-8")
if utf8_path is None:
log.warn(enc_warn, path, 'utf-8')
return False
try:
# accept is either way checks out
if os.path.exists(u_path) or os.path.exists(utf8_path):
return True
# this will catch any encode errors decoding u_path
except UnicodeEncodeError:
log.warn(enc_warn, path, sys.getfilesystemencoding())
class manifest_maker(sdist):
template = "MANIFEST.in"
def initialize_options(self):
self.use_defaults = 1
self.prune = 1
self.manifest_only = 1
self.force_manifest = 1
def finalize_options(self):
pass
def run(self):
self.filelist = FileList()
if not os.path.exists(self.manifest):
self.write_manifest() # it must exist so it'll get in the list
self.add_defaults()
if os.path.exists(self.template):
self.read_template()
self.prune_file_list()
self.filelist.sort()
self.filelist.remove_duplicates()
self.write_manifest()
def _manifest_normalize(self, path):
path = unicode_utils.filesys_decode(path)
return path.replace(os.sep, '/')
def write_manifest(self):
"""
Write the file list in 'self.filelist' to the manifest file
named by 'self.manifest'.
"""
self.filelist._repair()
# Now _repairs should encodability, but not unicode
files = [self._manifest_normalize(f) for f in self.filelist.files]
msg = "writing manifest file '%s'" % self.manifest
self.execute(write_file, (self.manifest, files), msg)
def warn(self, msg):
if not self._should_suppress_warning(msg):
sdist.warn(self, msg)
@staticmethod
def _should_suppress_warning(msg):
"""
suppress missing-file warnings from sdist
"""
return re.match(r"standard file .*not found", msg)
def add_defaults(self):
sdist.add_defaults(self)
self.check_license()
self.filelist.append(self.template)
self.filelist.append(self.manifest)
rcfiles = list(walk_revctrl())
if rcfiles:
self.filelist.extend(rcfiles)
elif os.path.exists(self.manifest):
self.read_manifest()
if os.path.exists("setup.py"):
# setup.py should be included by default, even if it's not
# the script called to create the sdist
self.filelist.append("setup.py")
ei_cmd = self.get_finalized_command('egg_info')
self.filelist.graft(ei_cmd.egg_info)
def prune_file_list(self):
build = self.get_finalized_command('build')
base_dir = self.distribution.get_fullname()
self.filelist.prune(build.build_base)
self.filelist.prune(base_dir)
sep = re.escape(os.sep)
self.filelist.exclude_pattern(r'(^|' + sep + r')(RCS|CVS|\.svn)' + sep,
is_regex=1)
def write_file(filename, contents):
"""Create a file with the specified name and write 'contents' (a
sequence of strings without line terminators) to it.
"""
contents = "\n".join(contents)
# assuming the contents has been vetted for utf-8 encoding
contents = contents.encode("utf-8")
with open(filename, "wb") as f: # always write POSIX-style manifest
f.write(contents)
def write_pkg_info(cmd, basename, filename):
log.info("writing %s", filename)
if not cmd.dry_run:
metadata = cmd.distribution.metadata
metadata.version, oldver = cmd.egg_version, metadata.version
metadata.name, oldname = cmd.egg_name, metadata.name
try:
# write unescaped data to PKG-INFO, so older pkg_resources
# can still parse it
metadata.write_pkg_info(cmd.egg_info)
finally:
metadata.name, metadata.version = oldname, oldver
safe = getattr(cmd.distribution, 'zip_safe', None)
bdist_egg.write_safety_flag(cmd.egg_info, safe)
def warn_depends_obsolete(cmd, basename, filename):
if os.path.exists(filename):
log.warn(
"WARNING: 'depends.txt' is not used by setuptools 0.6!\n"
"Use the install_requires/extras_require setup() args instead."
)
def _write_requirements(stream, reqs):
lines = yield_lines(reqs or ())
def append_cr(line):
return line + '\n'
lines = map(append_cr, lines)
stream.writelines(lines)
def write_requirements(cmd, basename, filename):
dist = cmd.distribution
data = six.StringIO()
_write_requirements(data, dist.install_requires)
extras_require = dist.extras_require or {}
for extra in sorted(extras_require):
data.write('\n[{extra}]\n'.format(**vars()))
_write_requirements(data, extras_require[extra])
cmd.write_or_delete_file("requirements", filename, data.getvalue())
def write_setup_requirements(cmd, basename, filename):
data = io.StringIO()
_write_requirements(data, cmd.distribution.setup_requires)
cmd.write_or_delete_file("setup-requirements", filename, data.getvalue())
def write_toplevel_names(cmd, basename, filename):
pkgs = dict.fromkeys(
[
k.split('.', 1)[0]
for k in cmd.distribution.iter_distribution_names()
]
)
cmd.write_file("top-level names", filename, '\n'.join(sorted(pkgs)) + '\n')
def overwrite_arg(cmd, basename, filename):
write_arg(cmd, basename, filename, True)
def write_arg(cmd, basename, filename, force=False):
argname = os.path.splitext(basename)[0]
value = getattr(cmd.distribution, argname, None)
if value is not None:
value = '\n'.join(value) + '\n'
cmd.write_or_delete_file(argname, filename, value, force)
def write_entries(cmd, basename, filename):
ep = cmd.distribution.entry_points
if isinstance(ep, six.string_types) or ep is None:
data = ep
elif ep is not None:
data = []
for section, contents in sorted(ep.items()):
if not isinstance(contents, six.string_types):
contents = EntryPoint.parse_group(section, contents)
contents = '\n'.join(sorted(map(str, contents.values())))
data.append('[%s]\n%s\n\n' % (section, contents))
data = ''.join(data)
cmd.write_or_delete_file('entry points', filename, data, True)
def get_pkg_info_revision():
"""
Get a -r### off of PKG-INFO Version in case this is an sdist of
a subversion revision.
"""
warnings.warn(
"get_pkg_info_revision is deprecated.", EggInfoDeprecationWarning)
if os.path.exists('PKG-INFO'):
with io.open('PKG-INFO') as f:
for line in f:
match = re.match(r"Version:.*-r(\d+)\s*$", line)
if match:
return int(match.group(1))
return 0
class EggInfoDeprecationWarning(SetuptoolsDeprecationWarning):
"""Deprecated behavior warning for EggInfo, bypassing suppression."""
| Django-locallibrary/env/Lib/site-packages/setuptools/command/egg_info.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/command/egg_info.py",
"repo_id": "Django-locallibrary",
"token_count": 11485
} | 23 |
from distutils.dep_util import newer_group
# yes, this is was almost entirely copy-pasted from
# 'newer_pairwise()', this is just another convenience
# function.
def newer_pairwise_group(sources_groups, targets):
"""Walk both arguments in parallel, testing if each source group is newer
than its corresponding target. Returns a pair of lists (sources_groups,
targets) where sources is newer than target, according to the semantics
of 'newer_group()'.
"""
if len(sources_groups) != len(targets):
raise ValueError(
"'sources_group' and 'targets' must be the same length")
# build a pair of lists (sources_groups, targets) where source is newer
n_sources = []
n_targets = []
for i in range(len(sources_groups)):
if newer_group(sources_groups[i], targets[i]):
n_sources.append(sources_groups[i])
n_targets.append(targets[i])
return n_sources, n_targets
| Django-locallibrary/env/Lib/site-packages/setuptools/dep_util.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/dep_util.py",
"repo_id": "Django-locallibrary",
"token_count": 347
} | 24 |
"""
Improved support for Microsoft Visual C++ compilers.
Known supported compilers:
--------------------------
Microsoft Visual C++ 9.0:
Microsoft Visual C++ Compiler for Python 2.7 (x86, amd64)
Microsoft Windows SDK 6.1 (x86, x64, ia64)
Microsoft Windows SDK 7.0 (x86, x64, ia64)
Microsoft Visual C++ 10.0:
Microsoft Windows SDK 7.1 (x86, x64, ia64)
Microsoft Visual C++ 14.X:
Microsoft Visual C++ Build Tools 2015 (x86, x64, arm)
Microsoft Visual Studio Build Tools 2017 (x86, x64, arm, arm64)
Microsoft Visual Studio Build Tools 2019 (x86, x64, arm, arm64)
This may also support compilers shipped with compatible Visual Studio versions.
"""
import json
from io import open
from os import listdir, pathsep
from os.path import join, isfile, isdir, dirname
import sys
import platform
import itertools
import subprocess
import distutils.errors
from setuptools.extern.packaging.version import LegacyVersion
from setuptools.extern.six.moves import filterfalse
from .monkey import get_unpatched
if platform.system() == 'Windows':
from setuptools.extern.six.moves import winreg
from os import environ
else:
# Mock winreg and environ so the module can be imported on this platform.
class winreg:
HKEY_USERS = None
HKEY_CURRENT_USER = None
HKEY_LOCAL_MACHINE = None
HKEY_CLASSES_ROOT = None
environ = dict()
_msvc9_suppress_errors = (
# msvc9compiler isn't available on some platforms
ImportError,
# msvc9compiler raises DistutilsPlatformError in some
# environments. See #1118.
distutils.errors.DistutilsPlatformError,
)
try:
from distutils.msvc9compiler import Reg
except _msvc9_suppress_errors:
pass
def msvc9_find_vcvarsall(version):
"""
Patched "distutils.msvc9compiler.find_vcvarsall" to use the standalone
compiler build for Python
(VCForPython / Microsoft Visual C++ Compiler for Python 2.7).
Fall back to original behavior when the standalone compiler is not
available.
Redirect the path of "vcvarsall.bat".
Parameters
----------
version: float
Required Microsoft Visual C++ version.
Return
------
str
vcvarsall.bat path
"""
vc_base = r'Software\%sMicrosoft\DevDiv\VCForPython\%0.1f'
key = vc_base % ('', version)
try:
# Per-user installs register the compiler path here
productdir = Reg.get_value(key, "installdir")
except KeyError:
try:
# All-user installs on a 64-bit system register here
key = vc_base % ('Wow6432Node\\', version)
productdir = Reg.get_value(key, "installdir")
except KeyError:
productdir = None
if productdir:
vcvarsall = join(productdir, "vcvarsall.bat")
if isfile(vcvarsall):
return vcvarsall
return get_unpatched(msvc9_find_vcvarsall)(version)
def msvc9_query_vcvarsall(ver, arch='x86', *args, **kwargs):
"""
Patched "distutils.msvc9compiler.query_vcvarsall" for support extra
Microsoft Visual C++ 9.0 and 10.0 compilers.
Set environment without use of "vcvarsall.bat".
Parameters
----------
ver: float
Required Microsoft Visual C++ version.
arch: str
Target architecture.
Return
------
dict
environment
"""
# Try to get environment from vcvarsall.bat (Classical way)
try:
orig = get_unpatched(msvc9_query_vcvarsall)
return orig(ver, arch, *args, **kwargs)
except distutils.errors.DistutilsPlatformError:
# Pass error if Vcvarsall.bat is missing
pass
except ValueError:
# Pass error if environment not set after executing vcvarsall.bat
pass
# If error, try to set environment directly
try:
return EnvironmentInfo(arch, ver).return_env()
except distutils.errors.DistutilsPlatformError as exc:
_augment_exception(exc, ver, arch)
raise
def _msvc14_find_vc2015():
"""Python 3.8 "distutils/_msvccompiler.py" backport"""
try:
key = winreg.OpenKey(
winreg.HKEY_LOCAL_MACHINE,
r"Software\Microsoft\VisualStudio\SxS\VC7",
0,
winreg.KEY_READ | winreg.KEY_WOW64_32KEY
)
except OSError:
return None, None
best_version = 0
best_dir = None
with key:
for i in itertools.count():
try:
v, vc_dir, vt = winreg.EnumValue(key, i)
except OSError:
break
if v and vt == winreg.REG_SZ and isdir(vc_dir):
try:
version = int(float(v))
except (ValueError, TypeError):
continue
if version >= 14 and version > best_version:
best_version, best_dir = version, vc_dir
return best_version, best_dir
def _msvc14_find_vc2017():
"""Python 3.8 "distutils/_msvccompiler.py" backport
Returns "15, path" based on the result of invoking vswhere.exe
If no install is found, returns "None, None"
The version is returned to avoid unnecessarily changing the function
result. It may be ignored when the path is not None.
If vswhere.exe is not available, by definition, VS 2017 is not
installed.
"""
root = environ.get("ProgramFiles(x86)") or environ.get("ProgramFiles")
if not root:
return None, None
try:
path = subprocess.check_output([
join(root, "Microsoft Visual Studio", "Installer", "vswhere.exe"),
"-latest",
"-prerelease",
"-requires", "Microsoft.VisualStudio.Component.VC.Tools.x86.x64",
"-property", "installationPath",
"-products", "*",
]).decode(encoding="mbcs", errors="strict").strip()
except (subprocess.CalledProcessError, OSError, UnicodeDecodeError):
return None, None
path = join(path, "VC", "Auxiliary", "Build")
if isdir(path):
return 15, path
return None, None
PLAT_SPEC_TO_RUNTIME = {
'x86': 'x86',
'x86_amd64': 'x64',
'x86_arm': 'arm',
'x86_arm64': 'arm64'
}
def _msvc14_find_vcvarsall(plat_spec):
"""Python 3.8 "distutils/_msvccompiler.py" backport"""
_, best_dir = _msvc14_find_vc2017()
vcruntime = None
if plat_spec in PLAT_SPEC_TO_RUNTIME:
vcruntime_plat = PLAT_SPEC_TO_RUNTIME[plat_spec]
else:
vcruntime_plat = 'x64' if 'amd64' in plat_spec else 'x86'
if best_dir:
vcredist = join(best_dir, "..", "..", "redist", "MSVC", "**",
vcruntime_plat, "Microsoft.VC14*.CRT",
"vcruntime140.dll")
try:
import glob
vcruntime = glob.glob(vcredist, recursive=True)[-1]
except (ImportError, OSError, LookupError):
vcruntime = None
if not best_dir:
best_version, best_dir = _msvc14_find_vc2015()
if best_version:
vcruntime = join(best_dir, 'redist', vcruntime_plat,
"Microsoft.VC140.CRT", "vcruntime140.dll")
if not best_dir:
return None, None
vcvarsall = join(best_dir, "vcvarsall.bat")
if not isfile(vcvarsall):
return None, None
if not vcruntime or not isfile(vcruntime):
vcruntime = None
return vcvarsall, vcruntime
def _msvc14_get_vc_env(plat_spec):
"""Python 3.8 "distutils/_msvccompiler.py" backport"""
if "DISTUTILS_USE_SDK" in environ:
return {
key.lower(): value
for key, value in environ.items()
}
vcvarsall, vcruntime = _msvc14_find_vcvarsall(plat_spec)
if not vcvarsall:
raise distutils.errors.DistutilsPlatformError(
"Unable to find vcvarsall.bat"
)
try:
out = subprocess.check_output(
'cmd /u /c "{}" {} && set'.format(vcvarsall, plat_spec),
stderr=subprocess.STDOUT,
).decode('utf-16le', errors='replace')
except subprocess.CalledProcessError as exc:
raise distutils.errors.DistutilsPlatformError(
"Error executing {}".format(exc.cmd)
) from exc
env = {
key.lower(): value
for key, _, value in
(line.partition('=') for line in out.splitlines())
if key and value
}
if vcruntime:
env['py_vcruntime_redist'] = vcruntime
return env
def msvc14_get_vc_env(plat_spec):
"""
Patched "distutils._msvccompiler._get_vc_env" for support extra
Microsoft Visual C++ 14.X compilers.
Set environment without use of "vcvarsall.bat".
Parameters
----------
plat_spec: str
Target architecture.
Return
------
dict
environment
"""
# Always use backport from CPython 3.8
try:
return _msvc14_get_vc_env(plat_spec)
except distutils.errors.DistutilsPlatformError as exc:
_augment_exception(exc, 14.0)
raise
def msvc14_gen_lib_options(*args, **kwargs):
"""
Patched "distutils._msvccompiler.gen_lib_options" for fix
compatibility between "numpy.distutils" and "distutils._msvccompiler"
(for Numpy < 1.11.2)
"""
if "numpy.distutils" in sys.modules:
import numpy as np
if LegacyVersion(np.__version__) < LegacyVersion('1.11.2'):
return np.distutils.ccompiler.gen_lib_options(*args, **kwargs)
return get_unpatched(msvc14_gen_lib_options)(*args, **kwargs)
def _augment_exception(exc, version, arch=''):
"""
Add details to the exception message to help guide the user
as to what action will resolve it.
"""
# Error if MSVC++ directory not found or environment not set
message = exc.args[0]
if "vcvarsall" in message.lower() or "visual c" in message.lower():
# Special error message if MSVC++ not installed
tmpl = 'Microsoft Visual C++ {version:0.1f} is required.'
message = tmpl.format(**locals())
msdownload = 'www.microsoft.com/download/details.aspx?id=%d'
if version == 9.0:
if arch.lower().find('ia64') > -1:
# For VC++ 9.0, if IA64 support is needed, redirect user
# to Windows SDK 7.0.
# Note: No download link available from Microsoft.
message += ' Get it with "Microsoft Windows SDK 7.0"'
else:
# For VC++ 9.0 redirect user to Vc++ for Python 2.7 :
# This redirection link is maintained by Microsoft.
# Contact [email protected] if it needs updating.
message += ' Get it from http://aka.ms/vcpython27'
elif version == 10.0:
# For VC++ 10.0 Redirect user to Windows SDK 7.1
message += ' Get it with "Microsoft Windows SDK 7.1": '
message += msdownload % 8279
elif version >= 14.0:
# For VC++ 14.X Redirect user to latest Visual C++ Build Tools
message += (' Get it with "Build Tools for Visual Studio": '
r'https://visualstudio.microsoft.com/downloads/')
exc.args = (message, )
class PlatformInfo:
"""
Current and Target Architectures information.
Parameters
----------
arch: str
Target architecture.
"""
current_cpu = environ.get('processor_architecture', '').lower()
def __init__(self, arch):
self.arch = arch.lower().replace('x64', 'amd64')
@property
def target_cpu(self):
"""
Return Target CPU architecture.
Return
------
str
Target CPU
"""
return self.arch[self.arch.find('_') + 1:]
def target_is_x86(self):
"""
Return True if target CPU is x86 32 bits..
Return
------
bool
CPU is x86 32 bits
"""
return self.target_cpu == 'x86'
def current_is_x86(self):
"""
Return True if current CPU is x86 32 bits..
Return
------
bool
CPU is x86 32 bits
"""
return self.current_cpu == 'x86'
def current_dir(self, hidex86=False, x64=False):
"""
Current platform specific subfolder.
Parameters
----------
hidex86: bool
return '' and not '\x86' if architecture is x86.
x64: bool
return '\x64' and not '\amd64' if architecture is amd64.
Return
------
str
subfolder: '\target', or '' (see hidex86 parameter)
"""
return (
'' if (self.current_cpu == 'x86' and hidex86) else
r'\x64' if (self.current_cpu == 'amd64' and x64) else
r'\%s' % self.current_cpu
)
def target_dir(self, hidex86=False, x64=False):
r"""
Target platform specific subfolder.
Parameters
----------
hidex86: bool
return '' and not '\x86' if architecture is x86.
x64: bool
return '\x64' and not '\amd64' if architecture is amd64.
Return
------
str
subfolder: '\current', or '' (see hidex86 parameter)
"""
return (
'' if (self.target_cpu == 'x86' and hidex86) else
r'\x64' if (self.target_cpu == 'amd64' and x64) else
r'\%s' % self.target_cpu
)
def cross_dir(self, forcex86=False):
r"""
Cross platform specific subfolder.
Parameters
----------
forcex86: bool
Use 'x86' as current architecture even if current architecture is
not x86.
Return
------
str
subfolder: '' if target architecture is current architecture,
'\current_target' if not.
"""
current = 'x86' if forcex86 else self.current_cpu
return (
'' if self.target_cpu == current else
self.target_dir().replace('\\', '\\%s_' % current)
)
class RegistryInfo:
"""
Microsoft Visual Studio related registry information.
Parameters
----------
platform_info: PlatformInfo
"PlatformInfo" instance.
"""
HKEYS = (winreg.HKEY_USERS,
winreg.HKEY_CURRENT_USER,
winreg.HKEY_LOCAL_MACHINE,
winreg.HKEY_CLASSES_ROOT)
def __init__(self, platform_info):
self.pi = platform_info
@property
def visualstudio(self):
"""
Microsoft Visual Studio root registry key.
Return
------
str
Registry key
"""
return 'VisualStudio'
@property
def sxs(self):
"""
Microsoft Visual Studio SxS registry key.
Return
------
str
Registry key
"""
return join(self.visualstudio, 'SxS')
@property
def vc(self):
"""
Microsoft Visual C++ VC7 registry key.
Return
------
str
Registry key
"""
return join(self.sxs, 'VC7')
@property
def vs(self):
"""
Microsoft Visual Studio VS7 registry key.
Return
------
str
Registry key
"""
return join(self.sxs, 'VS7')
@property
def vc_for_python(self):
"""
Microsoft Visual C++ for Python registry key.
Return
------
str
Registry key
"""
return r'DevDiv\VCForPython'
@property
def microsoft_sdk(self):
"""
Microsoft SDK registry key.
Return
------
str
Registry key
"""
return 'Microsoft SDKs'
@property
def windows_sdk(self):
"""
Microsoft Windows/Platform SDK registry key.
Return
------
str
Registry key
"""
return join(self.microsoft_sdk, 'Windows')
@property
def netfx_sdk(self):
"""
Microsoft .NET Framework SDK registry key.
Return
------
str
Registry key
"""
return join(self.microsoft_sdk, 'NETFXSDK')
@property
def windows_kits_roots(self):
"""
Microsoft Windows Kits Roots registry key.
Return
------
str
Registry key
"""
return r'Windows Kits\Installed Roots'
def microsoft(self, key, x86=False):
"""
Return key in Microsoft software registry.
Parameters
----------
key: str
Registry key path where look.
x86: str
Force x86 software registry.
Return
------
str
Registry key
"""
node64 = '' if self.pi.current_is_x86() or x86 else 'Wow6432Node'
return join('Software', node64, 'Microsoft', key)
def lookup(self, key, name):
"""
Look for values in registry in Microsoft software registry.
Parameters
----------
key: str
Registry key path where look.
name: str
Value name to find.
Return
------
str
value
"""
key_read = winreg.KEY_READ
openkey = winreg.OpenKey
closekey = winreg.CloseKey
ms = self.microsoft
for hkey in self.HKEYS:
bkey = None
try:
bkey = openkey(hkey, ms(key), 0, key_read)
except (OSError, IOError):
if not self.pi.current_is_x86():
try:
bkey = openkey(hkey, ms(key, True), 0, key_read)
except (OSError, IOError):
continue
else:
continue
try:
return winreg.QueryValueEx(bkey, name)[0]
except (OSError, IOError):
pass
finally:
if bkey:
closekey(bkey)
class SystemInfo:
"""
Microsoft Windows and Visual Studio related system information.
Parameters
----------
registry_info: RegistryInfo
"RegistryInfo" instance.
vc_ver: float
Required Microsoft Visual C++ version.
"""
# Variables and properties in this class use originals CamelCase variables
# names from Microsoft source files for more easy comparison.
WinDir = environ.get('WinDir', '')
ProgramFiles = environ.get('ProgramFiles', '')
ProgramFilesx86 = environ.get('ProgramFiles(x86)', ProgramFiles)
def __init__(self, registry_info, vc_ver=None):
self.ri = registry_info
self.pi = self.ri.pi
self.known_vs_paths = self.find_programdata_vs_vers()
# Except for VS15+, VC version is aligned with VS version
self.vs_ver = self.vc_ver = (
vc_ver or self._find_latest_available_vs_ver())
def _find_latest_available_vs_ver(self):
"""
Find the latest VC version
Return
------
float
version
"""
reg_vc_vers = self.find_reg_vs_vers()
if not (reg_vc_vers or self.known_vs_paths):
raise distutils.errors.DistutilsPlatformError(
'No Microsoft Visual C++ version found')
vc_vers = set(reg_vc_vers)
vc_vers.update(self.known_vs_paths)
return sorted(vc_vers)[-1]
def find_reg_vs_vers(self):
"""
Find Microsoft Visual Studio versions available in registry.
Return
------
list of float
Versions
"""
ms = self.ri.microsoft
vckeys = (self.ri.vc, self.ri.vc_for_python, self.ri.vs)
vs_vers = []
for hkey in self.ri.HKEYS:
for key in vckeys:
try:
bkey = winreg.OpenKey(hkey, ms(key), 0, winreg.KEY_READ)
except (OSError, IOError):
continue
with bkey:
subkeys, values, _ = winreg.QueryInfoKey(bkey)
for i in range(values):
try:
ver = float(winreg.EnumValue(bkey, i)[0])
if ver not in vs_vers:
vs_vers.append(ver)
except ValueError:
pass
for i in range(subkeys):
try:
ver = float(winreg.EnumKey(bkey, i))
if ver not in vs_vers:
vs_vers.append(ver)
except ValueError:
pass
return sorted(vs_vers)
def find_programdata_vs_vers(self):
r"""
Find Visual studio 2017+ versions from information in
"C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances".
Return
------
dict
float version as key, path as value.
"""
vs_versions = {}
instances_dir = \
r'C:\ProgramData\Microsoft\VisualStudio\Packages\_Instances'
try:
hashed_names = listdir(instances_dir)
except (OSError, IOError):
# Directory not exists with all Visual Studio versions
return vs_versions
for name in hashed_names:
try:
# Get VS installation path from "state.json" file
state_path = join(instances_dir, name, 'state.json')
with open(state_path, 'rt', encoding='utf-8') as state_file:
state = json.load(state_file)
vs_path = state['installationPath']
# Raises OSError if this VS installation does not contain VC
listdir(join(vs_path, r'VC\Tools\MSVC'))
# Store version and path
vs_versions[self._as_float_version(
state['installationVersion'])] = vs_path
except (OSError, IOError, KeyError):
# Skip if "state.json" file is missing or bad format
continue
return vs_versions
@staticmethod
def _as_float_version(version):
"""
Return a string version as a simplified float version (major.minor)
Parameters
----------
version: str
Version.
Return
------
float
version
"""
return float('.'.join(version.split('.')[:2]))
@property
def VSInstallDir(self):
"""
Microsoft Visual Studio directory.
Return
------
str
path
"""
# Default path
default = join(self.ProgramFilesx86,
'Microsoft Visual Studio %0.1f' % self.vs_ver)
# Try to get path from registry, if fail use default path
return self.ri.lookup(self.ri.vs, '%0.1f' % self.vs_ver) or default
@property
def VCInstallDir(self):
"""
Microsoft Visual C++ directory.
Return
------
str
path
"""
path = self._guess_vc() or self._guess_vc_legacy()
if not isdir(path):
msg = 'Microsoft Visual C++ directory not found'
raise distutils.errors.DistutilsPlatformError(msg)
return path
def _guess_vc(self):
"""
Locate Visual C++ for VS2017+.
Return
------
str
path
"""
if self.vs_ver <= 14.0:
return ''
try:
# First search in known VS paths
vs_dir = self.known_vs_paths[self.vs_ver]
except KeyError:
# Else, search with path from registry
vs_dir = self.VSInstallDir
guess_vc = join(vs_dir, r'VC\Tools\MSVC')
# Subdir with VC exact version as name
try:
# Update the VC version with real one instead of VS version
vc_ver = listdir(guess_vc)[-1]
self.vc_ver = self._as_float_version(vc_ver)
return join(guess_vc, vc_ver)
except (OSError, IOError, IndexError):
return ''
def _guess_vc_legacy(self):
"""
Locate Visual C++ for versions prior to 2017.
Return
------
str
path
"""
default = join(self.ProgramFilesx86,
r'Microsoft Visual Studio %0.1f\VC' % self.vs_ver)
# Try to get "VC++ for Python" path from registry as default path
reg_path = join(self.ri.vc_for_python, '%0.1f' % self.vs_ver)
python_vc = self.ri.lookup(reg_path, 'installdir')
default_vc = join(python_vc, 'VC') if python_vc else default
# Try to get path from registry, if fail use default path
return self.ri.lookup(self.ri.vc, '%0.1f' % self.vs_ver) or default_vc
@property
def WindowsSdkVersion(self):
"""
Microsoft Windows SDK versions for specified MSVC++ version.
Return
------
tuple of str
versions
"""
if self.vs_ver <= 9.0:
return '7.0', '6.1', '6.0a'
elif self.vs_ver == 10.0:
return '7.1', '7.0a'
elif self.vs_ver == 11.0:
return '8.0', '8.0a'
elif self.vs_ver == 12.0:
return '8.1', '8.1a'
elif self.vs_ver >= 14.0:
return '10.0', '8.1'
@property
def WindowsSdkLastVersion(self):
"""
Microsoft Windows SDK last version.
Return
------
str
version
"""
return self._use_last_dir_name(join(self.WindowsSdkDir, 'lib'))
@property
def WindowsSdkDir(self):
"""
Microsoft Windows SDK directory.
Return
------
str
path
"""
sdkdir = ''
for ver in self.WindowsSdkVersion:
# Try to get it from registry
loc = join(self.ri.windows_sdk, 'v%s' % ver)
sdkdir = self.ri.lookup(loc, 'installationfolder')
if sdkdir:
break
if not sdkdir or not isdir(sdkdir):
# Try to get "VC++ for Python" version from registry
path = join(self.ri.vc_for_python, '%0.1f' % self.vc_ver)
install_base = self.ri.lookup(path, 'installdir')
if install_base:
sdkdir = join(install_base, 'WinSDK')
if not sdkdir or not isdir(sdkdir):
# If fail, use default new path
for ver in self.WindowsSdkVersion:
intver = ver[:ver.rfind('.')]
path = r'Microsoft SDKs\Windows Kits\%s' % intver
d = join(self.ProgramFiles, path)
if isdir(d):
sdkdir = d
if not sdkdir or not isdir(sdkdir):
# If fail, use default old path
for ver in self.WindowsSdkVersion:
path = r'Microsoft SDKs\Windows\v%s' % ver
d = join(self.ProgramFiles, path)
if isdir(d):
sdkdir = d
if not sdkdir:
# If fail, use Platform SDK
sdkdir = join(self.VCInstallDir, 'PlatformSDK')
return sdkdir
@property
def WindowsSDKExecutablePath(self):
"""
Microsoft Windows SDK executable directory.
Return
------
str
path
"""
# Find WinSDK NetFx Tools registry dir name
if self.vs_ver <= 11.0:
netfxver = 35
arch = ''
else:
netfxver = 40
hidex86 = True if self.vs_ver <= 12.0 else False
arch = self.pi.current_dir(x64=True, hidex86=hidex86)
fx = 'WinSDK-NetFx%dTools%s' % (netfxver, arch.replace('\\', '-'))
# list all possibles registry paths
regpaths = []
if self.vs_ver >= 14.0:
for ver in self.NetFxSdkVersion:
regpaths += [join(self.ri.netfx_sdk, ver, fx)]
for ver in self.WindowsSdkVersion:
regpaths += [join(self.ri.windows_sdk, 'v%sA' % ver, fx)]
# Return installation folder from the more recent path
for path in regpaths:
execpath = self.ri.lookup(path, 'installationfolder')
if execpath:
return execpath
@property
def FSharpInstallDir(self):
"""
Microsoft Visual F# directory.
Return
------
str
path
"""
path = join(self.ri.visualstudio, r'%0.1f\Setup\F#' % self.vs_ver)
return self.ri.lookup(path, 'productdir') or ''
@property
def UniversalCRTSdkDir(self):
"""
Microsoft Universal CRT SDK directory.
Return
------
str
path
"""
# Set Kit Roots versions for specified MSVC++ version
vers = ('10', '81') if self.vs_ver >= 14.0 else ()
# Find path of the more recent Kit
for ver in vers:
sdkdir = self.ri.lookup(self.ri.windows_kits_roots,
'kitsroot%s' % ver)
if sdkdir:
return sdkdir or ''
@property
def UniversalCRTSdkLastVersion(self):
"""
Microsoft Universal C Runtime SDK last version.
Return
------
str
version
"""
return self._use_last_dir_name(join(self.UniversalCRTSdkDir, 'lib'))
@property
def NetFxSdkVersion(self):
"""
Microsoft .NET Framework SDK versions.
Return
------
tuple of str
versions
"""
# Set FxSdk versions for specified VS version
return (('4.7.2', '4.7.1', '4.7',
'4.6.2', '4.6.1', '4.6',
'4.5.2', '4.5.1', '4.5')
if self.vs_ver >= 14.0 else ())
@property
def NetFxSdkDir(self):
"""
Microsoft .NET Framework SDK directory.
Return
------
str
path
"""
sdkdir = ''
for ver in self.NetFxSdkVersion:
loc = join(self.ri.netfx_sdk, ver)
sdkdir = self.ri.lookup(loc, 'kitsinstallationfolder')
if sdkdir:
break
return sdkdir
@property
def FrameworkDir32(self):
"""
Microsoft .NET Framework 32bit directory.
Return
------
str
path
"""
# Default path
guess_fw = join(self.WinDir, r'Microsoft.NET\Framework')
# Try to get path from registry, if fail use default path
return self.ri.lookup(self.ri.vc, 'frameworkdir32') or guess_fw
@property
def FrameworkDir64(self):
"""
Microsoft .NET Framework 64bit directory.
Return
------
str
path
"""
# Default path
guess_fw = join(self.WinDir, r'Microsoft.NET\Framework64')
# Try to get path from registry, if fail use default path
return self.ri.lookup(self.ri.vc, 'frameworkdir64') or guess_fw
@property
def FrameworkVersion32(self):
"""
Microsoft .NET Framework 32bit versions.
Return
------
tuple of str
versions
"""
return self._find_dot_net_versions(32)
@property
def FrameworkVersion64(self):
"""
Microsoft .NET Framework 64bit versions.
Return
------
tuple of str
versions
"""
return self._find_dot_net_versions(64)
def _find_dot_net_versions(self, bits):
"""
Find Microsoft .NET Framework versions.
Parameters
----------
bits: int
Platform number of bits: 32 or 64.
Return
------
tuple of str
versions
"""
# Find actual .NET version in registry
reg_ver = self.ri.lookup(self.ri.vc, 'frameworkver%d' % bits)
dot_net_dir = getattr(self, 'FrameworkDir%d' % bits)
ver = reg_ver or self._use_last_dir_name(dot_net_dir, 'v') or ''
# Set .NET versions for specified MSVC++ version
if self.vs_ver >= 12.0:
return ver, 'v4.0'
elif self.vs_ver >= 10.0:
return 'v4.0.30319' if ver.lower()[:2] != 'v4' else ver, 'v3.5'
elif self.vs_ver == 9.0:
return 'v3.5', 'v2.0.50727'
elif self.vs_ver == 8.0:
return 'v3.0', 'v2.0.50727'
@staticmethod
def _use_last_dir_name(path, prefix=''):
"""
Return name of the last dir in path or '' if no dir found.
Parameters
----------
path: str
Use dirs in this path
prefix: str
Use only dirs starting by this prefix
Return
------
str
name
"""
matching_dirs = (
dir_name
for dir_name in reversed(listdir(path))
if isdir(join(path, dir_name)) and
dir_name.startswith(prefix)
)
return next(matching_dirs, None) or ''
class EnvironmentInfo:
"""
Return environment variables for specified Microsoft Visual C++ version
and platform : Lib, Include, Path and libpath.
This function is compatible with Microsoft Visual C++ 9.0 to 14.X.
Script created by analysing Microsoft environment configuration files like
"vcvars[...].bat", "SetEnv.Cmd", "vcbuildtools.bat", ...
Parameters
----------
arch: str
Target architecture.
vc_ver: float
Required Microsoft Visual C++ version. If not set, autodetect the last
version.
vc_min_ver: float
Minimum Microsoft Visual C++ version.
"""
# Variables and properties in this class use originals CamelCase variables
# names from Microsoft source files for more easy comparison.
def __init__(self, arch, vc_ver=None, vc_min_ver=0):
self.pi = PlatformInfo(arch)
self.ri = RegistryInfo(self.pi)
self.si = SystemInfo(self.ri, vc_ver)
if self.vc_ver < vc_min_ver:
err = 'No suitable Microsoft Visual C++ version found'
raise distutils.errors.DistutilsPlatformError(err)
@property
def vs_ver(self):
"""
Microsoft Visual Studio.
Return
------
float
version
"""
return self.si.vs_ver
@property
def vc_ver(self):
"""
Microsoft Visual C++ version.
Return
------
float
version
"""
return self.si.vc_ver
@property
def VSTools(self):
"""
Microsoft Visual Studio Tools.
Return
------
list of str
paths
"""
paths = [r'Common7\IDE', r'Common7\Tools']
if self.vs_ver >= 14.0:
arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
paths += [r'Common7\IDE\CommonExtensions\Microsoft\TestWindow']
paths += [r'Team Tools\Performance Tools']
paths += [r'Team Tools\Performance Tools%s' % arch_subdir]
return [join(self.si.VSInstallDir, path) for path in paths]
@property
def VCIncludes(self):
"""
Microsoft Visual C++ & Microsoft Foundation Class Includes.
Return
------
list of str
paths
"""
return [join(self.si.VCInstallDir, 'Include'),
join(self.si.VCInstallDir, r'ATLMFC\Include')]
@property
def VCLibraries(self):
"""
Microsoft Visual C++ & Microsoft Foundation Class Libraries.
Return
------
list of str
paths
"""
if self.vs_ver >= 15.0:
arch_subdir = self.pi.target_dir(x64=True)
else:
arch_subdir = self.pi.target_dir(hidex86=True)
paths = ['Lib%s' % arch_subdir, r'ATLMFC\Lib%s' % arch_subdir]
if self.vs_ver >= 14.0:
paths += [r'Lib\store%s' % arch_subdir]
return [join(self.si.VCInstallDir, path) for path in paths]
@property
def VCStoreRefs(self):
"""
Microsoft Visual C++ store references Libraries.
Return
------
list of str
paths
"""
if self.vs_ver < 14.0:
return []
return [join(self.si.VCInstallDir, r'Lib\store\references')]
@property
def VCTools(self):
"""
Microsoft Visual C++ Tools.
Return
------
list of str
paths
"""
si = self.si
tools = [join(si.VCInstallDir, 'VCPackages')]
forcex86 = True if self.vs_ver <= 10.0 else False
arch_subdir = self.pi.cross_dir(forcex86)
if arch_subdir:
tools += [join(si.VCInstallDir, 'Bin%s' % arch_subdir)]
if self.vs_ver == 14.0:
path = 'Bin%s' % self.pi.current_dir(hidex86=True)
tools += [join(si.VCInstallDir, path)]
elif self.vs_ver >= 15.0:
host_dir = (r'bin\HostX86%s' if self.pi.current_is_x86() else
r'bin\HostX64%s')
tools += [join(
si.VCInstallDir, host_dir % self.pi.target_dir(x64=True))]
if self.pi.current_cpu != self.pi.target_cpu:
tools += [join(
si.VCInstallDir, host_dir % self.pi.current_dir(x64=True))]
else:
tools += [join(si.VCInstallDir, 'Bin')]
return tools
@property
def OSLibraries(self):
"""
Microsoft Windows SDK Libraries.
Return
------
list of str
paths
"""
if self.vs_ver <= 10.0:
arch_subdir = self.pi.target_dir(hidex86=True, x64=True)
return [join(self.si.WindowsSdkDir, 'Lib%s' % arch_subdir)]
else:
arch_subdir = self.pi.target_dir(x64=True)
lib = join(self.si.WindowsSdkDir, 'lib')
libver = self._sdk_subdir
return [join(lib, '%sum%s' % (libver, arch_subdir))]
@property
def OSIncludes(self):
"""
Microsoft Windows SDK Include.
Return
------
list of str
paths
"""
include = join(self.si.WindowsSdkDir, 'include')
if self.vs_ver <= 10.0:
return [include, join(include, 'gl')]
else:
if self.vs_ver >= 14.0:
sdkver = self._sdk_subdir
else:
sdkver = ''
return [join(include, '%sshared' % sdkver),
join(include, '%sum' % sdkver),
join(include, '%swinrt' % sdkver)]
@property
def OSLibpath(self):
"""
Microsoft Windows SDK Libraries Paths.
Return
------
list of str
paths
"""
ref = join(self.si.WindowsSdkDir, 'References')
libpath = []
if self.vs_ver <= 9.0:
libpath += self.OSLibraries
if self.vs_ver >= 11.0:
libpath += [join(ref, r'CommonConfiguration\Neutral')]
if self.vs_ver >= 14.0:
libpath += [
ref,
join(self.si.WindowsSdkDir, 'UnionMetadata'),
join(
ref, 'Windows.Foundation.UniversalApiContract', '1.0.0.0'),
join(ref, 'Windows.Foundation.FoundationContract', '1.0.0.0'),
join(
ref, 'Windows.Networking.Connectivity.WwanContract',
'1.0.0.0'),
join(
self.si.WindowsSdkDir, 'ExtensionSDKs', 'Microsoft.VCLibs',
'%0.1f' % self.vs_ver, 'References', 'CommonConfiguration',
'neutral'),
]
return libpath
@property
def SdkTools(self):
"""
Microsoft Windows SDK Tools.
Return
------
list of str
paths
"""
return list(self._sdk_tools())
def _sdk_tools(self):
"""
Microsoft Windows SDK Tools paths generator.
Return
------
generator of str
paths
"""
if self.vs_ver < 15.0:
bin_dir = 'Bin' if self.vs_ver <= 11.0 else r'Bin\x86'
yield join(self.si.WindowsSdkDir, bin_dir)
if not self.pi.current_is_x86():
arch_subdir = self.pi.current_dir(x64=True)
path = 'Bin%s' % arch_subdir
yield join(self.si.WindowsSdkDir, path)
if self.vs_ver in (10.0, 11.0):
if self.pi.target_is_x86():
arch_subdir = ''
else:
arch_subdir = self.pi.current_dir(hidex86=True, x64=True)
path = r'Bin\NETFX 4.0 Tools%s' % arch_subdir
yield join(self.si.WindowsSdkDir, path)
elif self.vs_ver >= 15.0:
path = join(self.si.WindowsSdkDir, 'Bin')
arch_subdir = self.pi.current_dir(x64=True)
sdkver = self.si.WindowsSdkLastVersion
yield join(path, '%s%s' % (sdkver, arch_subdir))
if self.si.WindowsSDKExecutablePath:
yield self.si.WindowsSDKExecutablePath
@property
def _sdk_subdir(self):
"""
Microsoft Windows SDK version subdir.
Return
------
str
subdir
"""
ucrtver = self.si.WindowsSdkLastVersion
return ('%s\\' % ucrtver) if ucrtver else ''
@property
def SdkSetup(self):
"""
Microsoft Windows SDK Setup.
Return
------
list of str
paths
"""
if self.vs_ver > 9.0:
return []
return [join(self.si.WindowsSdkDir, 'Setup')]
@property
def FxTools(self):
"""
Microsoft .NET Framework Tools.
Return
------
list of str
paths
"""
pi = self.pi
si = self.si
if self.vs_ver <= 10.0:
include32 = True
include64 = not pi.target_is_x86() and not pi.current_is_x86()
else:
include32 = pi.target_is_x86() or pi.current_is_x86()
include64 = pi.current_cpu == 'amd64' or pi.target_cpu == 'amd64'
tools = []
if include32:
tools += [join(si.FrameworkDir32, ver)
for ver in si.FrameworkVersion32]
if include64:
tools += [join(si.FrameworkDir64, ver)
for ver in si.FrameworkVersion64]
return tools
@property
def NetFxSDKLibraries(self):
"""
Microsoft .Net Framework SDK Libraries.
Return
------
list of str
paths
"""
if self.vs_ver < 14.0 or not self.si.NetFxSdkDir:
return []
arch_subdir = self.pi.target_dir(x64=True)
return [join(self.si.NetFxSdkDir, r'lib\um%s' % arch_subdir)]
@property
def NetFxSDKIncludes(self):
"""
Microsoft .Net Framework SDK Includes.
Return
------
list of str
paths
"""
if self.vs_ver < 14.0 or not self.si.NetFxSdkDir:
return []
return [join(self.si.NetFxSdkDir, r'include\um')]
@property
def VsTDb(self):
"""
Microsoft Visual Studio Team System Database.
Return
------
list of str
paths
"""
return [join(self.si.VSInstallDir, r'VSTSDB\Deploy')]
@property
def MSBuild(self):
"""
Microsoft Build Engine.
Return
------
list of str
paths
"""
if self.vs_ver < 12.0:
return []
elif self.vs_ver < 15.0:
base_path = self.si.ProgramFilesx86
arch_subdir = self.pi.current_dir(hidex86=True)
else:
base_path = self.si.VSInstallDir
arch_subdir = ''
path = r'MSBuild\%0.1f\bin%s' % (self.vs_ver, arch_subdir)
build = [join(base_path, path)]
if self.vs_ver >= 15.0:
# Add Roslyn C# & Visual Basic Compiler
build += [join(base_path, path, 'Roslyn')]
return build
@property
def HTMLHelpWorkshop(self):
"""
Microsoft HTML Help Workshop.
Return
------
list of str
paths
"""
if self.vs_ver < 11.0:
return []
return [join(self.si.ProgramFilesx86, 'HTML Help Workshop')]
@property
def UCRTLibraries(self):
"""
Microsoft Universal C Runtime SDK Libraries.
Return
------
list of str
paths
"""
if self.vs_ver < 14.0:
return []
arch_subdir = self.pi.target_dir(x64=True)
lib = join(self.si.UniversalCRTSdkDir, 'lib')
ucrtver = self._ucrt_subdir
return [join(lib, '%sucrt%s' % (ucrtver, arch_subdir))]
@property
def UCRTIncludes(self):
"""
Microsoft Universal C Runtime SDK Include.
Return
------
list of str
paths
"""
if self.vs_ver < 14.0:
return []
include = join(self.si.UniversalCRTSdkDir, 'include')
return [join(include, '%sucrt' % self._ucrt_subdir)]
@property
def _ucrt_subdir(self):
"""
Microsoft Universal C Runtime SDK version subdir.
Return
------
str
subdir
"""
ucrtver = self.si.UniversalCRTSdkLastVersion
return ('%s\\' % ucrtver) if ucrtver else ''
@property
def FSharp(self):
"""
Microsoft Visual F#.
Return
------
list of str
paths
"""
if 11.0 > self.vs_ver > 12.0:
return []
return [self.si.FSharpInstallDir]
@property
def VCRuntimeRedist(self):
"""
Microsoft Visual C++ runtime redistributable dll.
Return
------
str
path
"""
vcruntime = 'vcruntime%d0.dll' % self.vc_ver
arch_subdir = self.pi.target_dir(x64=True).strip('\\')
# Installation prefixes candidates
prefixes = []
tools_path = self.si.VCInstallDir
redist_path = dirname(tools_path.replace(r'\Tools', r'\Redist'))
if isdir(redist_path):
# Redist version may not be exactly the same as tools
redist_path = join(redist_path, listdir(redist_path)[-1])
prefixes += [redist_path, join(redist_path, 'onecore')]
prefixes += [join(tools_path, 'redist')] # VS14 legacy path
# CRT directory
crt_dirs = ('Microsoft.VC%d.CRT' % (self.vc_ver * 10),
# Sometime store in directory with VS version instead of VC
'Microsoft.VC%d.CRT' % (int(self.vs_ver) * 10))
# vcruntime path
for prefix, crt_dir in itertools.product(prefixes, crt_dirs):
path = join(prefix, arch_subdir, crt_dir, vcruntime)
if isfile(path):
return path
def return_env(self, exists=True):
"""
Return environment dict.
Parameters
----------
exists: bool
It True, only return existing paths.
Return
------
dict
environment
"""
env = dict(
include=self._build_paths('include',
[self.VCIncludes,
self.OSIncludes,
self.UCRTIncludes,
self.NetFxSDKIncludes],
exists),
lib=self._build_paths('lib',
[self.VCLibraries,
self.OSLibraries,
self.FxTools,
self.UCRTLibraries,
self.NetFxSDKLibraries],
exists),
libpath=self._build_paths('libpath',
[self.VCLibraries,
self.FxTools,
self.VCStoreRefs,
self.OSLibpath],
exists),
path=self._build_paths('path',
[self.VCTools,
self.VSTools,
self.VsTDb,
self.SdkTools,
self.SdkSetup,
self.FxTools,
self.MSBuild,
self.HTMLHelpWorkshop,
self.FSharp],
exists),
)
if self.vs_ver >= 14 and isfile(self.VCRuntimeRedist):
env['py_vcruntime_redist'] = self.VCRuntimeRedist
return env
def _build_paths(self, name, spec_path_lists, exists):
"""
Given an environment variable name and specified paths,
return a pathsep-separated string of paths containing
unique, extant, directories from those paths and from
the environment variable. Raise an error if no paths
are resolved.
Parameters
----------
name: str
Environment variable name
spec_path_lists: list of str
Paths
exists: bool
It True, only return existing paths.
Return
------
str
Pathsep-separated paths
"""
# flatten spec_path_lists
spec_paths = itertools.chain.from_iterable(spec_path_lists)
env_paths = environ.get(name, '').split(pathsep)
paths = itertools.chain(spec_paths, env_paths)
extant_paths = list(filter(isdir, paths)) if exists else paths
if not extant_paths:
msg = "%s environment variable is empty" % name.upper()
raise distutils.errors.DistutilsPlatformError(msg)
unique_paths = self._unique_everseen(extant_paths)
return pathsep.join(unique_paths)
# from Python docs
@staticmethod
def _unique_everseen(iterable, key=None):
"""
List unique elements, preserving order.
Remember all elements ever seen.
_unique_everseen('AAAABBBCCDAABBB') --> A B C D
_unique_everseen('ABBCcAD', str.lower) --> A B C D
"""
seen = set()
seen_add = seen.add
if key is None:
for element in filterfalse(seen.__contains__, iterable):
seen_add(element)
yield element
else:
for element in iterable:
k = key(element)
if k not in seen:
seen_add(k)
yield element
| Django-locallibrary/env/Lib/site-packages/setuptools/msvc.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/setuptools/msvc.py",
"repo_id": "Django-locallibrary",
"token_count": 25479
} | 25 |
# This file must be used with "source bin/activate" *from bash*
# you cannot run it directly
deactivate () {
# reset old environment variables
if [ -n "${_OLD_VIRTUAL_PATH:-}" ] ; then
PATH="${_OLD_VIRTUAL_PATH:-}"
export PATH
unset _OLD_VIRTUAL_PATH
fi
if [ -n "${_OLD_VIRTUAL_PYTHONHOME:-}" ] ; then
PYTHONHOME="${_OLD_VIRTUAL_PYTHONHOME:-}"
export PYTHONHOME
unset _OLD_VIRTUAL_PYTHONHOME
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi
if [ -n "${_OLD_VIRTUAL_PS1:-}" ] ; then
PS1="${_OLD_VIRTUAL_PS1:-}"
export PS1
unset _OLD_VIRTUAL_PS1
fi
unset VIRTUAL_ENV
if [ ! "${1:-}" = "nondestructive" ] ; then
# Self destruct!
unset -f deactivate
fi
}
# unset irrelevant variables
deactivate nondestructive
VIRTUAL_ENV="D:\Django-Apps\Local Library2\env"
export VIRTUAL_ENV
_OLD_VIRTUAL_PATH="$PATH"
PATH="$VIRTUAL_ENV/Scripts:$PATH"
export PATH
# unset PYTHONHOME if set
# this will fail if PYTHONHOME is set to the empty string (which is bad anyway)
# could use `if (set -u; : $PYTHONHOME) ;` in bash
if [ -n "${PYTHONHOME:-}" ] ; then
_OLD_VIRTUAL_PYTHONHOME="${PYTHONHOME:-}"
unset PYTHONHOME
fi
if [ -z "${VIRTUAL_ENV_DISABLE_PROMPT:-}" ] ; then
_OLD_VIRTUAL_PS1="${PS1:-}"
PS1="(env) ${PS1:-}"
export PS1
fi
# This should detect bash and zsh, which have a hash command that must
# be called to get it to forget past commands. Without forgetting
# past commands the $PATH changes we made may not be respected
if [ -n "${BASH:-}" -o -n "${ZSH_VERSION:-}" ] ; then
hash -r 2> /dev/null
fi
| Django-locallibrary/env/Scripts/activate/0 | {
"file_path": "Django-locallibrary/env/Scripts/activate",
"repo_id": "Django-locallibrary",
"token_count": 815
} | 26 |
//+------------------------------------------------------------------+
//| TestScript.mq5 |
//| Copy_nasdaqright 2021, Omega Joctan |
//| https://www.mql5.com/en/users/omegajoctan |
//+------------------------------------------------------------------+
//hire me on your next big Machine Learning Project on this link > https://www.mql5.com/en/job/new?prefered=omegajoctan
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
#property copyright "Copy_nasdaqright 2021, Omega Joctan"
#property link "https://www.mql5.com/en/users/omegajoctan"
#property version "1.00"
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
#include "LinearRegressionLib.mqh";
//#include "C:\Users\Omega Joctan\AppData\Roaming\MetaQuotes\Terminal\892B47EBC091D6EF95E3961284A76097\MQL5\Experts\DataScience\LogisticRegression\LogisticRegressionLib.mqh";
//CLogisticRegression Logreg;
#include "LinearRegressionLib.mqh";
CMatrixRegression *m_lr;
//+------------------------------------------------------------------+
//| Script program start function |
//+------------------------------------------------------------------+
void OnStart()
{
//---
string file_name = "Apple Dataset.csv",delimiter =",";
//---
/*
m_lr = new CMatrixRegression;
Print("Matrix simple regression");
m_lr.Init(2,"1",file_name,delimiter);
m_lr.MultipleMatLinearRegMain();
delete m_lr;
*/
//---
m_lr = new CMatrixRegression;
Print("Matrix multiple regression");
m_lr.LrInit(8,"2,3,4,5,6,7",file_name,",",0.7);
m_lr.corrcoeff();
m_lr.MultipleMatLinearRegMain();
delete m_lr;
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+ | LogisticRegression-MQL5-and-python/LinearRegression mql5/TestScript.mq5/0 | {
"file_path": "LogisticRegression-MQL5-and-python/LinearRegression mql5/TestScript.mq5",
"repo_id": "LogisticRegression-MQL5-and-python",
"token_count": 947
} | 27 |
//+------------------------------------------------------------------+
//| pca.mqh |
//| Copyright 2022, Fxalgebra.com |
//| https://www.mql5.com/en/users/omegajoctan |
//+------------------------------------------------------------------+
#property copyright "Copyright 2022, Fxalgebra.com"
#property link "https://www.mql5.com/en/users/omegajoctan"
//+------------------------------------------------------------------+
//| Principle Component Analysis Library |
//+------------------------------------------------------------------+
#include "base.mqh"
#include <MALE5\MqPlotLib\plots.mqh>
//+------------------------------------------------------------------+
//| Principal Component Analysis Class |
//+------------------------------------------------------------------+
class CPCA
{
enum criterion
{
CRITERION_VARIANCE,
CRITERION_KAISER,
CRITERION_SCREE_PLOT
};
CPlots plt;
protected:
uint m_components;
criterion m_criterion;
matrix components_matrix;
vector mean;
uint n_features;
uint extract_components(vector &eigen_values, double threshold=0.95);
public:
CPCA(int k=0, criterion CRITERION_=CRITERION_SCREE_PLOT);
~CPCA(void);
matrix fit_transform(matrix &X);
matrix transform(matrix &X);
vector transform(vector &X);
bool save(string dir);
bool load(string dir);
};
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
CPCA::CPCA(int k=0, criterion CRITERION_=CRITERION_SCREE_PLOT)
:m_components(k),
m_criterion(CRITERION_)
{
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
CPCA::~CPCA(void)
{
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
matrix CPCA::fit_transform(matrix &X)
{
n_features = (uint)X.Cols();
if (m_components>n_features)
{
printf("%s Number of dimensions K[%d] is supposed to be <= number of features %d",__FUNCTION__,m_components,n_features);
this.m_components = (int)n_features;
Print(__LINE__);
}
//---
this.mean = X.Mean(0);
matrix X_centered = BaseDimRed::subtract(X, this.mean);
BaseDimRed::ReplaceNaN(X_centered);
matrix cov_matrix = cova(X_centered, false);
matrix eigen_vectors;
vector eigen_values;
BaseDimRed::ReplaceNaN(cov_matrix);
if (!cov_matrix.Eig(eigen_vectors, eigen_values))
printf("Failed to caculate Eigen matrix and vectors Err=%d",GetLastError());
//--- Sort eigenvectors by decreasing eigenvalues
vector args = MatrixExtend::ArgSort(eigen_values); MatrixExtend::Reverse(args);
eigen_values = BaseDimRed::Sort(eigen_values, args);
eigen_vectors = BaseDimRed::Sort(eigen_vectors, args);
//---
if (MQLInfoInteger(MQL_DEBUG))
Print("Eigen values: ",eigen_values);
if (m_components==0)
m_components = this.extract_components(eigen_values);
else
this.extract_components(eigen_values);
if (MQLInfoInteger(MQL_DEBUG))
printf("%s Selected components %d",__FUNCTION__,m_components);
this.components_matrix = BaseDimRed::Slice(eigen_vectors, m_components, 1); //Get the components matrix
//MatrixExtend::NormalizeDouble_(this.components_matrix, 5);
//this.components_matrix = scaler.fit_transform(this.components_matrix.Transpose()); //Normalize components matrix
this.components_matrix = this.components_matrix.Transpose();
if (MQLInfoInteger(MQL_DEBUG))
Print("components_matrix\n",components_matrix);
//---
return X_centered.MatMul(components_matrix.Transpose()); //return the pca scores
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
matrix CPCA::transform(matrix &X)
{
if (X.Cols()!=this.n_features)
{
printf("%s Inconsistent input X matrix size, It is supposed to be of size %d same as the matrix used under fit_transform",__FUNCTION__,n_features);
this.m_components = n_features;
}
matrix X_centered = BaseDimRed::subtract(X, this.mean);
return X_centered.MatMul(this.components_matrix.Transpose()); //return the pca scores
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
vector CPCA::transform(vector &X)
{
matrix INPUT_MAT = MatrixExtend::VectorToMatrix(X, X.Size());
matrix OUTPUT_MAT = transform(INPUT_MAT);
return MatrixExtend::MatrixToVector(OUTPUT_MAT);
}
//+------------------------------------------------------------------+
//| Select the number of components based on some criterion |
//+------------------------------------------------------------------+
uint CPCA::extract_components(vector &eigen_values, double threshold=0.95)
{
uint k = 0;
vector eigen_pow = MathPow(eigen_values, 2);
vector cum_sum = eigen_pow.CumSum();
double sum = eigen_pow.Sum();
switch(m_criterion)
{
case CRITERION_VARIANCE:
{
vector cumulative_variance = cum_sum / sum;
if (MQLInfoInteger(MQL_DEBUG))
Print("Cummulative variance: ",cumulative_variance);
vector v(cumulative_variance.Size()); v.Fill(0.0);
for (ulong i=0; i<v.Size(); i++)
v[i] = (cumulative_variance[i] >= threshold);
k = (uint)v.ArgMax() + 1;
}
break;
case CRITERION_KAISER:
{
vector v(eigen_values.Size()); v.Fill(0.0);
for (ulong i=0; i<eigen_values.Size(); i++)
v[i] = (eigen_values[i] >= 1);
k = uint(v.Sum());
}
break;
case CRITERION_SCREE_PLOT:
{
vector v_cols(eigen_values.Size());
for (ulong i=0; i<v_cols.Size(); i++)
v_cols[i] = (int)i+1;
vector vars = eigen_values;
//matrix_utils.Sort(vars); //Make sure they are in ascending first order
//matrix_utils.Reverse(vars); //Set them to descending order
plt.Plot("Scree plot",v_cols,vars,"EigenValue","Principal Components","EigenValue",CURVE_POINTS_AND_LINES);
//---
string warn = "\n<<<< WARNING >>>>\nThe Scree plot doesn't return the determined number of k components\nThe cummulative variance will return the number of k components instead\nThe k returned might be different from what you see on the scree plot";
warn += "\nTo apply the same number of k components to the PCA from the scree plot\nCall the PCA model again with that value applied from the plot\n";
Print(warn);
//--- Kaiser
vector v(eigen_values.Size()); v.Fill(0.0);
for (ulong i=0; i<eigen_values.Size(); i++)
v[i] = (eigen_values[i] >= 1);
k = uint(v.Sum());
}
break;
}
return (k);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
bool CPCA::save(string dir)
{
matrix m = MatrixExtend::VectorToMatrix(this.mean, this.mean.Size());
if (!MatrixExtend::WriteCsv(dir+"\\PCA-Mean.csv",m,NULL,false,8))
{
Print("Failed to Save PCA-Mean information to ",dir);
return false;
}
//---
if (!MatrixExtend::WriteCsv(dir+"\\PCA-ComponentsMatrix.csv",this.components_matrix,NULL,false,8))
{
Print("Failed to Save PCA-ComponentsMatrix information to ",dir);
return false;
}
return true;
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
bool CPCA::load(string dir)
{
string header;
matrix m = MatrixExtend::ReadCsv(dir+"\\PCA-Mean.csv",header);
if (m.Rows()==0)
return false;
this.mean = MatrixExtend::MatrixToVector(m);
this.n_features = (uint)this.mean.Size();
//---
this.components_matrix = MatrixExtend::ReadCsv(dir+"\\PCA-ComponentsMatrix.csv",header);
//printf("Components Matrix[%dx%d]",components_matrix.Rows(),components_matrix.Cols());
if (components_matrix.Rows()==0)
return false;
return true;
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
matrix cova(matrix &data, bool row_var=true)
{
if (row_var)
data = data.Transpose(); // Transpose if each row represents a data point
// Step 1: Center the data
matrix centered_data = BaseDimRed::subtract(data, data.Mean(0));
// Step 2: Calculate the covariance matrix
matrix covariance_matrix = centered_data.Transpose().MatMul(centered_data) / (data.Rows() - 1);
return covariance_matrix;
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+ | MALE5/Dimensionality Reduction/PCA.mqh/0 | {
"file_path": "MALE5/Dimensionality Reduction/PCA.mqh",
"repo_id": "MALE5",
"token_count": 4617
} | 28 |
## Naive Bayes Classifier
This documentation explains the `CNaiveBayes` class in MQL5, which implements a **Naive Bayes classifier** for classification tasks.
**I. Naive Bayes Theory:**
Naive Bayes is a probabilistic classifier based on **Bayes' theorem**. It assumes that the features used for classification are **independent** of each other given the class label. This simplifies the calculations involved in making predictions.
**II. CNaiveBayes Class:**
The `CNaiveBayes` class provides functionalities for training and using a Naive Bayes classifier in MQL5:
**Public Functions:**
* **CNaiveBayes(void):** Constructor.
* **~CNaiveBayes(void):** Destructor.
* **void fit(matrix &x, vector &y):** Trains the model on the provided data (`x` - features, `y` - target labels).
* **int predict(vector &x):** Predicts the class label for a single input vector.
* **vector predict(matrix &x):** Predicts class labels for all rows in the input matrix.
**Internal Variables:**
* `n_features`: Number of features in the data.
* `y_target`: Vector of target labels used during training.
* `classes`: Vector containing the available class labels.
* `class_proba`: Vector storing the prior probability of each class.
* `features_proba`: Matrix storing the conditional probability of each feature value given each class.
* `c_prior_proba`: Vector storing the calculated prior probability of each class after training.
* `c_evidence`: Vector storing the calculated class evidence for a new data point.
* `calcProba(vector &v_features)`: Internal function (not directly accessible) that likely calculates the class probabilities for a given feature vector.
**III. Class Functionality:**
1. **Training:**
* The `fit` function takes the input data (features and labels) and performs the following:
* Calculates the prior probability of each class (number of samples belonging to each class divided by the total number of samples).
* Estimates the conditional probability of each feature value given each class (using techniques like Laplace smoothing to handle unseen features).
* These probabilities are stored in the internal variables for later use in prediction.
2. **Prediction:**
* The `predict` functions take a new data point (feature vector) and:
* Calculate the class evidence for each class using Bayes' theorem, considering the prior probabilities and conditional probabilities of the features.
* The class with the **highest class evidence** is predicted as the most likely class for the new data point.
**IV. Additional Notes:**
* The class assumes the data is already preprocessed and ready for use.
**Reference**
* [Data Science and Machine Learning (Part 11): Naïve Bayes, Probability theory in Trading](https://www.mql5.com/en/articles/12184) | MALE5/Naive Bayes/README.md/0 | {
"file_path": "MALE5/Naive Bayes/README.md",
"repo_id": "MALE5",
"token_count": 739
} | 29 |
//+------------------------------------------------------------------+
//| linalg.mqh |
//| Copyright 2023, Omega Joctan |
//| https://www.mql5.com/en/users/omegajoctan |
//+------------------------------------------------------------------+
#property copyright "Copyright 2023, Omega Joctan"
#property link "https://www.mql5.com/en/users/omegajoctan"
#include "MatrixExtend.mqh"
//+------------------------------------------------------------------+
//| implementations of standard linear algebra algorithms |
//+------------------------------------------------------------------+
class LinAlg
{
public:
LinAlg(void);
~LinAlg(void);
template<typename T>
static matrix<T> dot(matrix<T> &A, matrix<T> &B);
template<typename T>
static matrix<T> norm(const matrix<T> &A, const matrix<T> &B);
template<typename T>
static double norm(const vector<T> &v1, const vector<T> &v2);
template<typename T>
static matrix<T> outer(const matrix<T> &A, const matrix<T> &B);
static bool svd(matrix &mat, matrix &U, matrix &V, vector &singular_value);
};
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
LinAlg::LinAlg(void)
{
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
LinAlg::~LinAlg(void)
{
}
//+------------------------------------------------------------------+
//| Dot product of two matrices | Flexible funciton - numpy like |
//+------------------------------------------------------------------+
template<typename T>
matrix<T> LinAlg::dot(matrix<T> &A, matrix<T> &B)
{
matrix Z={};
if (A.Cols() == B.Rows()) //Valid Normal matrix multiplication
{
Z = A.MatMul(B);
return Z;
}
else
{
//---Check for one dimensional matrices | Scalar
if ((A.Rows()==1 && A.Cols()==1))
{
Z = B * A[0][0];
return Z;
}
if (B.Rows()==1 && B.Cols()==1)
{
Z = B[0][0] * A;
return Z;
}
//-- Element wise multiplication
if (A.Rows()==B.Rows() && A.Cols()==B.Cols())
{
Z = A * B;
return Z;
}
}
return Z;
}
//+------------------------------------------------------------------+
//| Matrix or vector<T> norm. | Finds the equlidean distance of the |
//| two matrices |
//+------------------------------------------------------------------+
template<typename T>
matrix<T> LinAlg::norm(const matrix<T> &A, const matrix<T> &B)
{
matrix<T> ret = {};
if (B.Cols() != A.Cols())
{
Print(__FUNCTION__," Dimensions Error");
return ret;
}
if (A.Rows()==1 || B.Rows()==1)
{
matrix<T> A_temp = A, B_temp = B;
vector<T> A_vector, B_vector;
A_vector.Swap(A_temp);
B_vector.Swap(B_temp);
ulong size = 0;
if (A_vector.Size() >= B_vector.Size())
{
size = A_vector.Size();
B_vector.Resize(size);
}
else
{
size = B_vector.Size();
A_vector.Resize(size);
}
ret.Resize(1,1);
ret[0][0] = MathSqrt( MathPow(A_vector - B_vector, 2).Sum() ) ;
return (ret);
}
ulong size = A.Rows() > B.Rows() ? A.Rows() : B.Rows();
vector<T> euc(size);
for (ulong i=0; i<A.Rows(); i++)
for (ulong j=0; j<B.Rows(); j++)
euc[i] = MathSqrt( MathPow(A.Row(i) - B.Row(j), 2).Sum() );
euc.Swap(ret);
return ret;
}
//+------------------------------------------------------------------+
//| Euclidean Distance of two vectors |
//+------------------------------------------------------------------+
template<typename T>
double LinAlg::norm(const vector<T> &v1, const vector<T> &v2)
{
double dist = 0;
if(v1.Size() != v2.Size())
Print(__FUNCTION__, " v1 and v2 not matching in size");
else
{
double c = 0;
for(ulong i=0; i<v1.Size(); i++)
c += MathPow(v1[i] - v2[i], 2);
dist = MathSqrt(c);
}
return(dist);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
template<typename T>
matrix<T> LinAlg::outer(const matrix<T> &A,const matrix<T> &B)
{
return A.Outer(B);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
bool LinAlg::svd(matrix &mat, matrix &U,matrix &V,vector &singular_value)
{
return mat.SVD(U,V,singular_value);
}
//+------------------------------------------------------------------+
//| |
//+------------------------------------------------------------------+
| MALE5/linalg.mqh/0 | {
"file_path": "MALE5/linalg.mqh",
"repo_id": "MALE5",
"token_count": 2723
} | 30 |
#include <stdio.h>
#include <conio.h>
#include <math.h>
#define SIZE 10
int main()
{
float a[SIZE][SIZE], x[SIZE], ratio;
int i,j,k,n;
// clrscr();
/* Inputs */
/* 1. Reading order of matrix */
printf("Enter order of matrix: ");
scanf("%d", &n);
/* 2. Reading Matrix */
printf("Enter coefficients of Matrix:\n");
for(i=1;i<=n;i++)
{
for(j=1;j<=n;j++)
{
printf("a[%d][%d] = ",i,j);
scanf("%f", &a[i][j]);
}
}
/* Augmenting Identity Matrix of Order n */
for(i=1;i<=n;i++)
{
for(j=1;j<=n;j++)
{
if(i==j)
{
a[i][j+n] = 1;
}
else
{
a[i][j+n] = 0;
}
}
}
/*
printf("\n before gauss jordan elimination");
for (i=1; i<=n; i++)
for (k=1; k<=n*2; k++)
{
printf("\n a[%d][%d] = %.4f ",i,k, a[i][k]);
}
*/
/* Applying Gauss Jordan Elimination */
for(i=1;i<=n;i++)
{
if(a[i][i] == 0.0)
{
printf("Mathematical Error!");
exit(0);
}
for(j=1;j<=n;j++)
{
if(i!=j)
{
ratio = a[j][i]/a[i][i];
for(k=1;k<=2*n;k++)
{
// printf("\n a[%d][%d] val =%.2f a[%d][%d] val =%.2f ",j,k, a[j][k], i,k ,a[i][k]);
// printf("\n jk val =%.4f, ratio = %.4f , ik val =%.4f ",a[j][k],ratio,a[i][k]);
a[j][k] = a[j][k] - ratio*a[i][k];
// printf(" \n a[%d][%d] = %0.5f ",j,k, a[j][k]);
}
}
}
}
/*
printf("\n after gauss jordan elimination");
for (i=1; i<=n; i++)
for (k=1; k<=n*2; k++)
{
printf("\n a[%d][%d] = %.4f ",i,k, a[i][k]);
}
*/
//Row Operation to Make Principal Diagonal to 1
for(i=1;i<=n;i++)
{
for(j=n+1;j<=2*n;j++)
{
// printf("\n a[%d][%d] = %.2f a[%d][%d] = %.2f ",i,j, a[i][j], i,i, a[i][i]);
a[i][j] = a[i][j]/a[i][i];
// printf("\n mathematical operation = %.4f",a[i][j]);
}
}
//Displaying Inverse Matrix
printf("\nInverse Matrix is:\n");
for(i=1;i<=n;i++)
{
for(j=n+1;j<=2*n;j++)
{
printf("%0.3f\t",a[i][j]);
}
printf("\n");
}
getch();
return(0);
} | MatrixRegressionMQL5/Gauss-Jordan c++/gauss-jordan.cpp/0 | {
"file_path": "MatrixRegressionMQL5/Gauss-Jordan c++/gauss-jordan.cpp",
"repo_id": "MatrixRegressionMQL5",
"token_count": 1350
} | 31 |
{% extends "base_template.html" %}
{% block content %}
<form action="" method="post">
{% csrf_token %}
<table> {{ form.as_table }} </table>
</form>
{% endblock %} | Django-locallibrary/LocalLibrary/catalog/Templates/Author_update.html/0 | {
"file_path": "Django-locallibrary/LocalLibrary/catalog/Templates/Author_update.html",
"repo_id": "Django-locallibrary",
"token_count": 103
} | 0 |
{% extends "base_template.html" %}
{% block content %}
<p>Successfully logged out</p>
<a href="{% url 'login' %}">Login again</a>
{% endblock %} | Django-locallibrary/LocalLibrary/templates/registration/logged_out.html/0 | {
"file_path": "Django-locallibrary/LocalLibrary/templates/registration/logged_out.html",
"repo_id": "Django-locallibrary",
"token_count": 76
} | 1 |
"""Build Environment used for isolation during sdist building
"""
import logging
import os
import sys
import textwrap
from collections import OrderedDict
from distutils.sysconfig import get_python_lib
from sysconfig import get_paths
from pip._vendor.pkg_resources import Requirement, VersionConflict, WorkingSet
from pip import __file__ as pip_location
from pip._internal.cli.spinners import open_spinner
from pip._internal.utils.subprocess import call_subprocess
from pip._internal.utils.temp_dir import TempDirectory, tempdir_kinds
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from types import TracebackType
from typing import Tuple, Set, Iterable, Optional, List, Type
from pip._internal.index.package_finder import PackageFinder
logger = logging.getLogger(__name__)
class _Prefix:
def __init__(self, path):
# type: (str) -> None
self.path = path
self.setup = False
self.bin_dir = get_paths(
'nt' if os.name == 'nt' else 'posix_prefix',
vars={'base': path, 'platbase': path}
)['scripts']
# Note: prefer distutils' sysconfig to get the
# library paths so PyPy is correctly supported.
purelib = get_python_lib(plat_specific=False, prefix=path)
platlib = get_python_lib(plat_specific=True, prefix=path)
if purelib == platlib:
self.lib_dirs = [purelib]
else:
self.lib_dirs = [purelib, platlib]
class BuildEnvironment(object):
"""Creates and manages an isolated environment to install build deps
"""
def __init__(self):
# type: () -> None
temp_dir = TempDirectory(
kind=tempdir_kinds.BUILD_ENV, globally_managed=True
)
self._prefixes = OrderedDict((
(name, _Prefix(os.path.join(temp_dir.path, name)))
for name in ('normal', 'overlay')
))
self._bin_dirs = [] # type: List[str]
self._lib_dirs = [] # type: List[str]
for prefix in reversed(list(self._prefixes.values())):
self._bin_dirs.append(prefix.bin_dir)
self._lib_dirs.extend(prefix.lib_dirs)
# Customize site to:
# - ensure .pth files are honored
# - prevent access to system site packages
system_sites = {
os.path.normcase(site) for site in (
get_python_lib(plat_specific=False),
get_python_lib(plat_specific=True),
)
}
self._site_dir = os.path.join(temp_dir.path, 'site')
if not os.path.exists(self._site_dir):
os.mkdir(self._site_dir)
with open(os.path.join(self._site_dir, 'sitecustomize.py'), 'w') as fp:
fp.write(textwrap.dedent(
'''
import os, site, sys
# First, drop system-sites related paths.
original_sys_path = sys.path[:]
known_paths = set()
for path in {system_sites!r}:
site.addsitedir(path, known_paths=known_paths)
system_paths = set(
os.path.normcase(path)
for path in sys.path[len(original_sys_path):]
)
original_sys_path = [
path for path in original_sys_path
if os.path.normcase(path) not in system_paths
]
sys.path = original_sys_path
# Second, add lib directories.
# ensuring .pth file are processed.
for path in {lib_dirs!r}:
assert not path in sys.path
site.addsitedir(path)
'''
).format(system_sites=system_sites, lib_dirs=self._lib_dirs))
def __enter__(self):
# type: () -> None
self._save_env = {
name: os.environ.get(name, None)
for name in ('PATH', 'PYTHONNOUSERSITE', 'PYTHONPATH')
}
path = self._bin_dirs[:]
old_path = self._save_env['PATH']
if old_path:
path.extend(old_path.split(os.pathsep))
pythonpath = [self._site_dir]
os.environ.update({
'PATH': os.pathsep.join(path),
'PYTHONNOUSERSITE': '1',
'PYTHONPATH': os.pathsep.join(pythonpath),
})
def __exit__(
self,
exc_type, # type: Optional[Type[BaseException]]
exc_val, # type: Optional[BaseException]
exc_tb # type: Optional[TracebackType]
):
# type: (...) -> None
for varname, old_value in self._save_env.items():
if old_value is None:
os.environ.pop(varname, None)
else:
os.environ[varname] = old_value
def check_requirements(self, reqs):
# type: (Iterable[str]) -> Tuple[Set[Tuple[str, str]], Set[str]]
"""Return 2 sets:
- conflicting requirements: set of (installed, wanted) reqs tuples
- missing requirements: set of reqs
"""
missing = set()
conflicting = set()
if reqs:
ws = WorkingSet(self._lib_dirs)
for req in reqs:
try:
if ws.find(Requirement.parse(req)) is None:
missing.add(req)
except VersionConflict as e:
conflicting.add((str(e.args[0].as_requirement()),
str(e.args[1])))
return conflicting, missing
def install_requirements(
self,
finder, # type: PackageFinder
requirements, # type: Iterable[str]
prefix_as_string, # type: str
message # type: str
):
# type: (...) -> None
prefix = self._prefixes[prefix_as_string]
assert not prefix.setup
prefix.setup = True
if not requirements:
return
args = [
sys.executable, os.path.dirname(pip_location), 'install',
'--ignore-installed', '--no-user', '--prefix', prefix.path,
'--no-warn-script-location',
] # type: List[str]
if logger.getEffectiveLevel() <= logging.DEBUG:
args.append('-v')
for format_control in ('no_binary', 'only_binary'):
formats = getattr(finder.format_control, format_control)
args.extend(('--' + format_control.replace('_', '-'),
','.join(sorted(formats or {':none:'}))))
index_urls = finder.index_urls
if index_urls:
args.extend(['-i', index_urls[0]])
for extra_index in index_urls[1:]:
args.extend(['--extra-index-url', extra_index])
else:
args.append('--no-index')
for link in finder.find_links:
args.extend(['--find-links', link])
for host in finder.trusted_hosts:
args.extend(['--trusted-host', host])
if finder.allow_all_prereleases:
args.append('--pre')
if finder.prefer_binary:
args.append('--prefer-binary')
args.append('--')
args.extend(requirements)
with open_spinner(message) as spinner:
call_subprocess(args, spinner=spinner)
class NoOpBuildEnvironment(BuildEnvironment):
"""A no-op drop-in replacement for BuildEnvironment
"""
def __init__(self):
# type: () -> None
pass
def __enter__(self):
# type: () -> None
pass
def __exit__(
self,
exc_type, # type: Optional[Type[BaseException]]
exc_val, # type: Optional[BaseException]
exc_tb # type: Optional[TracebackType]
):
# type: (...) -> None
pass
def cleanup(self):
# type: () -> None
pass
def install_requirements(
self,
finder, # type: PackageFinder
requirements, # type: Iterable[str]
prefix_as_string, # type: str
message # type: str
):
# type: (...) -> None
raise NotImplementedError()
| Django-locallibrary/env/Lib/site-packages/pip/_internal/build_env.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/build_env.py",
"repo_id": "Django-locallibrary",
"token_count": 3932
} | 2 |
"""Base Command class, and related routines"""
from __future__ import absolute_import, print_function
import logging
import logging.config
import optparse
import os
import platform
import sys
import traceback
from pip._internal.cli import cmdoptions
from pip._internal.cli.command_context import CommandContextMixIn
from pip._internal.cli.parser import (
ConfigOptionParser,
UpdatingDefaultsHelpFormatter,
)
from pip._internal.cli.status_codes import (
ERROR,
PREVIOUS_BUILD_DIR_ERROR,
UNKNOWN_ERROR,
VIRTUALENV_NOT_FOUND,
)
from pip._internal.exceptions import (
BadCommand,
CommandError,
InstallationError,
NetworkConnectionError,
PreviousBuildDirError,
SubProcessError,
UninstallationError,
)
from pip._internal.utils.deprecation import deprecated
from pip._internal.utils.filesystem import check_path_owner
from pip._internal.utils.logging import BrokenStdoutLoggingError, setup_logging
from pip._internal.utils.misc import get_prog, normalize_path
from pip._internal.utils.temp_dir import (
global_tempdir_manager,
tempdir_registry,
)
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.virtualenv import running_under_virtualenv
if MYPY_CHECK_RUNNING:
from typing import List, Optional, Tuple, Any
from optparse import Values
from pip._internal.utils.temp_dir import (
TempDirectoryTypeRegistry as TempDirRegistry
)
__all__ = ['Command']
logger = logging.getLogger(__name__)
class Command(CommandContextMixIn):
usage = None # type: str
ignore_require_venv = False # type: bool
def __init__(self, name, summary, isolated=False):
# type: (str, str, bool) -> None
super(Command, self).__init__()
parser_kw = {
'usage': self.usage,
'prog': '{} {}'.format(get_prog(), name),
'formatter': UpdatingDefaultsHelpFormatter(),
'add_help_option': False,
'name': name,
'description': self.__doc__,
'isolated': isolated,
}
self.name = name
self.summary = summary
self.parser = ConfigOptionParser(**parser_kw)
self.tempdir_registry = None # type: Optional[TempDirRegistry]
# Commands should add options to this option group
optgroup_name = '{} Options'.format(self.name.capitalize())
self.cmd_opts = optparse.OptionGroup(self.parser, optgroup_name)
# Add the general options
gen_opts = cmdoptions.make_option_group(
cmdoptions.general_group,
self.parser,
)
self.parser.add_option_group(gen_opts)
self.add_options()
def add_options(self):
# type: () -> None
pass
def handle_pip_version_check(self, options):
# type: (Values) -> None
"""
This is a no-op so that commands by default do not do the pip version
check.
"""
# Make sure we do the pip version check if the index_group options
# are present.
assert not hasattr(options, 'no_index')
def run(self, options, args):
# type: (Values, List[Any]) -> int
raise NotImplementedError
def parse_args(self, args):
# type: (List[str]) -> Tuple[Any, Any]
# factored out for testability
return self.parser.parse_args(args)
def main(self, args):
# type: (List[str]) -> int
try:
with self.main_context():
return self._main(args)
finally:
logging.shutdown()
def _main(self, args):
# type: (List[str]) -> int
# We must initialize this before the tempdir manager, otherwise the
# configuration would not be accessible by the time we clean up the
# tempdir manager.
self.tempdir_registry = self.enter_context(tempdir_registry())
# Intentionally set as early as possible so globally-managed temporary
# directories are available to the rest of the code.
self.enter_context(global_tempdir_manager())
options, args = self.parse_args(args)
# Set verbosity so that it can be used elsewhere.
self.verbosity = options.verbose - options.quiet
level_number = setup_logging(
verbosity=self.verbosity,
no_color=options.no_color,
user_log_file=options.log,
)
if (
sys.version_info[:2] == (2, 7) and
not options.no_python_version_warning
):
message = (
"pip 21.0 will drop support for Python 2.7 in January 2021. "
"More details about Python 2 support in pip can be found at "
"https://pip.pypa.io/en/latest/development/release-process/#python-2-support" # noqa
)
if platform.python_implementation() == "CPython":
message = (
"Python 2.7 reached the end of its life on January "
"1st, 2020. Please upgrade your Python as Python 2.7 "
"is no longer maintained. "
) + message
deprecated(message, replacement=None, gone_in="21.0")
if (
sys.version_info[:2] == (3, 5) and
not options.no_python_version_warning
):
message = (
"Python 3.5 reached the end of its life on September "
"13th, 2020. Please upgrade your Python as Python 3.5 "
"is no longer maintained. pip 21.0 will drop support "
"for Python 3.5 in January 2021."
)
deprecated(message, replacement=None, gone_in="21.0")
# TODO: Try to get these passing down from the command?
# without resorting to os.environ to hold these.
# This also affects isolated builds and it should.
if options.no_input:
os.environ['PIP_NO_INPUT'] = '1'
if options.exists_action:
os.environ['PIP_EXISTS_ACTION'] = ' '.join(options.exists_action)
if options.require_venv and not self.ignore_require_venv:
# If a venv is required check if it can really be found
if not running_under_virtualenv():
logger.critical(
'Could not find an activated virtualenv (required).'
)
sys.exit(VIRTUALENV_NOT_FOUND)
if options.cache_dir:
options.cache_dir = normalize_path(options.cache_dir)
if not check_path_owner(options.cache_dir):
logger.warning(
"The directory '%s' or its parent directory is not owned "
"or is not writable by the current user. The cache "
"has been disabled. Check the permissions and owner of "
"that directory. If executing pip with sudo, you may want "
"sudo's -H flag.",
options.cache_dir,
)
options.cache_dir = None
if getattr(options, "build_dir", None):
deprecated(
reason=(
"The -b/--build/--build-dir/--build-directory "
"option is deprecated."
),
replacement=(
"use the TMPDIR/TEMP/TMP environment variable, "
"possibly combined with --no-clean"
),
gone_in="20.3",
issue=8333,
)
if 'resolver' in options.unstable_features:
logger.critical(
"--unstable-feature=resolver is no longer supported, and "
"has been replaced with --use-feature=2020-resolver instead."
)
sys.exit(ERROR)
try:
status = self.run(options, args)
assert isinstance(status, int)
return status
except PreviousBuildDirError as exc:
logger.critical(str(exc))
logger.debug('Exception information:', exc_info=True)
return PREVIOUS_BUILD_DIR_ERROR
except (InstallationError, UninstallationError, BadCommand,
SubProcessError, NetworkConnectionError) as exc:
logger.critical(str(exc))
logger.debug('Exception information:', exc_info=True)
return ERROR
except CommandError as exc:
logger.critical('%s', exc)
logger.debug('Exception information:', exc_info=True)
return ERROR
except BrokenStdoutLoggingError:
# Bypass our logger and write any remaining messages to stderr
# because stdout no longer works.
print('ERROR: Pipe to stdout was broken', file=sys.stderr)
if level_number <= logging.DEBUG:
traceback.print_exc(file=sys.stderr)
return ERROR
except KeyboardInterrupt:
logger.critical('Operation cancelled by user')
logger.debug('Exception information:', exc_info=True)
return ERROR
except BaseException:
logger.critical('Exception:', exc_info=True)
return UNKNOWN_ERROR
finally:
self.handle_pip_version_check(options)
| Django-locallibrary/env/Lib/site-packages/pip/_internal/cli/base_command.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/cli/base_command.py",
"repo_id": "Django-locallibrary",
"token_count": 4125
} | 3 |
from __future__ import absolute_import
import logging
import os
from pip._internal.cli import cmdoptions
from pip._internal.cli.cmdoptions import make_target_python
from pip._internal.cli.req_command import RequirementCommand, with_cleanup
from pip._internal.cli.status_codes import SUCCESS
from pip._internal.req.req_tracker import get_requirement_tracker
from pip._internal.utils.misc import ensure_dir, normalize_path, write_output
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from optparse import Values
from typing import List
logger = logging.getLogger(__name__)
class DownloadCommand(RequirementCommand):
"""
Download packages from:
- PyPI (and other indexes) using requirement specifiers.
- VCS project urls.
- Local project directories.
- Local or remote source archives.
pip also supports downloading from "requirements files", which provide
an easy way to specify a whole environment to be downloaded.
"""
usage = """
%prog [options] <requirement specifier> [package-index-options] ...
%prog [options] -r <requirements file> [package-index-options] ...
%prog [options] <vcs project url> ...
%prog [options] <local project path> ...
%prog [options] <archive url/path> ..."""
def add_options(self):
# type: () -> None
self.cmd_opts.add_option(cmdoptions.constraints())
self.cmd_opts.add_option(cmdoptions.requirements())
self.cmd_opts.add_option(cmdoptions.build_dir())
self.cmd_opts.add_option(cmdoptions.no_deps())
self.cmd_opts.add_option(cmdoptions.global_options())
self.cmd_opts.add_option(cmdoptions.no_binary())
self.cmd_opts.add_option(cmdoptions.only_binary())
self.cmd_opts.add_option(cmdoptions.prefer_binary())
self.cmd_opts.add_option(cmdoptions.src())
self.cmd_opts.add_option(cmdoptions.pre())
self.cmd_opts.add_option(cmdoptions.require_hashes())
self.cmd_opts.add_option(cmdoptions.progress_bar())
self.cmd_opts.add_option(cmdoptions.no_build_isolation())
self.cmd_opts.add_option(cmdoptions.use_pep517())
self.cmd_opts.add_option(cmdoptions.no_use_pep517())
self.cmd_opts.add_option(
'-d', '--dest', '--destination-dir', '--destination-directory',
dest='download_dir',
metavar='dir',
default=os.curdir,
help=("Download packages into <dir>."),
)
cmdoptions.add_target_python_options(self.cmd_opts)
index_opts = cmdoptions.make_option_group(
cmdoptions.index_group,
self.parser,
)
self.parser.insert_option_group(0, index_opts)
self.parser.insert_option_group(0, self.cmd_opts)
@with_cleanup
def run(self, options, args):
# type: (Values, List[str]) -> int
options.ignore_installed = True
# editable doesn't really make sense for `pip download`, but the bowels
# of the RequirementSet code require that property.
options.editables = []
cmdoptions.check_dist_restriction(options)
options.download_dir = normalize_path(options.download_dir)
ensure_dir(options.download_dir)
session = self.get_default_session(options)
target_python = make_target_python(options)
finder = self._build_package_finder(
options=options,
session=session,
target_python=target_python,
)
build_delete = (not (options.no_clean or options.build_dir))
req_tracker = self.enter_context(get_requirement_tracker())
directory = TempDirectory(
options.build_dir,
delete=build_delete,
kind="download",
globally_managed=True,
)
reqs = self.get_requirements(args, options, finder, session)
preparer = self.make_requirement_preparer(
temp_build_dir=directory,
options=options,
req_tracker=req_tracker,
session=session,
finder=finder,
download_dir=options.download_dir,
use_user_site=False,
)
resolver = self.make_resolver(
preparer=preparer,
finder=finder,
options=options,
py_version_info=options.python_version,
)
self.trace_basic_info(finder)
requirement_set = resolver.resolve(
reqs, check_supported_wheels=True
)
downloaded = ' '.join([req.name # type: ignore
for req in requirement_set.requirements.values()
if req.successfully_downloaded])
if downloaded:
write_output('Successfully downloaded %s', downloaded)
return SUCCESS
| Django-locallibrary/env/Lib/site-packages/pip/_internal/commands/download.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/commands/download.py",
"repo_id": "Django-locallibrary",
"token_count": 2087
} | 4 |
import itertools
import logging
import os
import posixpath
from pip._vendor.packaging.utils import canonicalize_name
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._internal.models.index import PyPI
from pip._internal.utils.compat import has_tls
from pip._internal.utils.misc import normalize_path, redact_auth_from_url
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import List
logger = logging.getLogger(__name__)
class SearchScope(object):
"""
Encapsulates the locations that pip is configured to search.
"""
__slots__ = ["find_links", "index_urls"]
@classmethod
def create(
cls,
find_links, # type: List[str]
index_urls, # type: List[str]
):
# type: (...) -> SearchScope
"""
Create a SearchScope object after normalizing the `find_links`.
"""
# Build find_links. If an argument starts with ~, it may be
# a local file relative to a home directory. So try normalizing
# it and if it exists, use the normalized version.
# This is deliberately conservative - it might be fine just to
# blindly normalize anything starting with a ~...
built_find_links = [] # type: List[str]
for link in find_links:
if link.startswith('~'):
new_link = normalize_path(link)
if os.path.exists(new_link):
link = new_link
built_find_links.append(link)
# If we don't have TLS enabled, then WARN if anyplace we're looking
# relies on TLS.
if not has_tls():
for link in itertools.chain(index_urls, built_find_links):
parsed = urllib_parse.urlparse(link)
if parsed.scheme == 'https':
logger.warning(
'pip is configured with locations that require '
'TLS/SSL, however the ssl module in Python is not '
'available.'
)
break
return cls(
find_links=built_find_links,
index_urls=index_urls,
)
def __init__(
self,
find_links, # type: List[str]
index_urls, # type: List[str]
):
# type: (...) -> None
self.find_links = find_links
self.index_urls = index_urls
def get_formatted_locations(self):
# type: () -> str
lines = []
redacted_index_urls = []
if self.index_urls and self.index_urls != [PyPI.simple_url]:
for url in self.index_urls:
redacted_index_url = redact_auth_from_url(url)
# Parse the URL
purl = urllib_parse.urlsplit(redacted_index_url)
# URL is generally invalid if scheme and netloc is missing
# there are issues with Python and URL parsing, so this test
# is a bit crude. See bpo-20271, bpo-23505. Python doesn't
# always parse invalid URLs correctly - it should raise
# exceptions for malformed URLs
if not purl.scheme and not purl.netloc:
logger.warning(
'The index url "%s" seems invalid, '
'please provide a scheme.', redacted_index_url)
redacted_index_urls.append(redacted_index_url)
lines.append('Looking in indexes: {}'.format(
', '.join(redacted_index_urls)))
if self.find_links:
lines.append(
'Looking in links: {}'.format(', '.join(
redact_auth_from_url(url) for url in self.find_links))
)
return '\n'.join(lines)
def get_index_urls_locations(self, project_name):
# type: (str) -> List[str]
"""Returns the locations found via self.index_urls
Checks the url_name on the main (first in the list) index and
use this url_name to produce all locations
"""
def mkurl_pypi_url(url):
# type: (str) -> str
loc = posixpath.join(
url,
urllib_parse.quote(canonicalize_name(project_name)))
# For maximum compatibility with easy_install, ensure the path
# ends in a trailing slash. Although this isn't in the spec
# (and PyPI can handle it without the slash) some other index
# implementations might break if they relied on easy_install's
# behavior.
if not loc.endswith('/'):
loc = loc + '/'
return loc
return [mkurl_pypi_url(url) for url in self.index_urls]
| Django-locallibrary/env/Lib/site-packages/pip/_internal/models/search_scope.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/models/search_scope.py",
"repo_id": "Django-locallibrary",
"token_count": 2208
} | 5 |
"""Lazy ZIP over HTTP"""
__all__ = ['HTTPRangeRequestUnsupported', 'dist_from_wheel_url']
from bisect import bisect_left, bisect_right
from contextlib import contextmanager
from tempfile import NamedTemporaryFile
from zipfile import BadZipfile, ZipFile
from pip._vendor.requests.models import CONTENT_CHUNK_SIZE
from pip._vendor.six.moves import range
from pip._internal.network.utils import (
HEADERS,
raise_for_status,
response_chunks,
)
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
from pip._internal.utils.wheel import pkg_resources_distribution_for_wheel
if MYPY_CHECK_RUNNING:
from typing import Any, Dict, Iterator, List, Optional, Tuple
from pip._vendor.pkg_resources import Distribution
from pip._vendor.requests.models import Response
from pip._internal.network.session import PipSession
class HTTPRangeRequestUnsupported(Exception):
pass
def dist_from_wheel_url(name, url, session):
# type: (str, str, PipSession) -> Distribution
"""Return a pkg_resources.Distribution from the given wheel URL.
This uses HTTP range requests to only fetch the potion of the wheel
containing metadata, just enough for the object to be constructed.
If such requests are not supported, HTTPRangeRequestUnsupported
is raised.
"""
with LazyZipOverHTTP(url, session) as wheel:
# For read-only ZIP files, ZipFile only needs methods read,
# seek, seekable and tell, not the whole IO protocol.
zip_file = ZipFile(wheel) # type: ignore
# After context manager exit, wheel.name
# is an invalid file by intention.
return pkg_resources_distribution_for_wheel(zip_file, name, wheel.name)
class LazyZipOverHTTP(object):
"""File-like object mapped to a ZIP file over HTTP.
This uses HTTP range requests to lazily fetch the file's content,
which is supposed to be fed to ZipFile. If such requests are not
supported by the server, raise HTTPRangeRequestUnsupported
during initialization.
"""
def __init__(self, url, session, chunk_size=CONTENT_CHUNK_SIZE):
# type: (str, PipSession, int) -> None
head = session.head(url, headers=HEADERS)
raise_for_status(head)
assert head.status_code == 200
self._session, self._url, self._chunk_size = session, url, chunk_size
self._length = int(head.headers['Content-Length'])
self._file = NamedTemporaryFile()
self.truncate(self._length)
self._left = [] # type: List[int]
self._right = [] # type: List[int]
if 'bytes' not in head.headers.get('Accept-Ranges', 'none'):
raise HTTPRangeRequestUnsupported('range request is not supported')
self._check_zip()
@property
def mode(self):
# type: () -> str
"""Opening mode, which is always rb."""
return 'rb'
@property
def name(self):
# type: () -> str
"""Path to the underlying file."""
return self._file.name
def seekable(self):
# type: () -> bool
"""Return whether random access is supported, which is True."""
return True
def close(self):
# type: () -> None
"""Close the file."""
self._file.close()
@property
def closed(self):
# type: () -> bool
"""Whether the file is closed."""
return self._file.closed
def read(self, size=-1):
# type: (int) -> bytes
"""Read up to size bytes from the object and return them.
As a convenience, if size is unspecified or -1,
all bytes until EOF are returned. Fewer than
size bytes may be returned if EOF is reached.
"""
download_size = max(size, self._chunk_size)
start, length = self.tell(), self._length
stop = length if size < 0 else min(start+download_size, length)
start = max(0, stop-download_size)
self._download(start, stop-1)
return self._file.read(size)
def readable(self):
# type: () -> bool
"""Return whether the file is readable, which is True."""
return True
def seek(self, offset, whence=0):
# type: (int, int) -> int
"""Change stream position and return the new absolute position.
Seek to offset relative position indicated by whence:
* 0: Start of stream (the default). pos should be >= 0;
* 1: Current position - pos may be negative;
* 2: End of stream - pos usually negative.
"""
return self._file.seek(offset, whence)
def tell(self):
# type: () -> int
"""Return the current possition."""
return self._file.tell()
def truncate(self, size=None):
# type: (Optional[int]) -> int
"""Resize the stream to the given size in bytes.
If size is unspecified resize to the current position.
The current stream position isn't changed.
Return the new file size.
"""
return self._file.truncate(size)
def writable(self):
# type: () -> bool
"""Return False."""
return False
def __enter__(self):
# type: () -> LazyZipOverHTTP
self._file.__enter__()
return self
def __exit__(self, *exc):
# type: (*Any) -> Optional[bool]
return self._file.__exit__(*exc)
@contextmanager
def _stay(self):
# type: ()-> Iterator[None]
"""Return a context manager keeping the position.
At the end of the block, seek back to original position.
"""
pos = self.tell()
try:
yield
finally:
self.seek(pos)
def _check_zip(self):
# type: () -> None
"""Check and download until the file is a valid ZIP."""
end = self._length - 1
for start in reversed(range(0, end, self._chunk_size)):
self._download(start, end)
with self._stay():
try:
# For read-only ZIP files, ZipFile only needs
# methods read, seek, seekable and tell.
ZipFile(self) # type: ignore
except BadZipfile:
pass
else:
break
def _stream_response(self, start, end, base_headers=HEADERS):
# type: (int, int, Dict[str, str]) -> Response
"""Return HTTP response to a range request from start to end."""
headers = base_headers.copy()
headers['Range'] = 'bytes={}-{}'.format(start, end)
# TODO: Get range requests to be correctly cached
headers['Cache-Control'] = 'no-cache'
return self._session.get(self._url, headers=headers, stream=True)
def _merge(self, start, end, left, right):
# type: (int, int, int, int) -> Iterator[Tuple[int, int]]
"""Return an iterator of intervals to be fetched.
Args:
start (int): Start of needed interval
end (int): End of needed interval
left (int): Index of first overlapping downloaded data
right (int): Index after last overlapping downloaded data
"""
lslice, rslice = self._left[left:right], self._right[left:right]
i = start = min([start]+lslice[:1])
end = max([end]+rslice[-1:])
for j, k in zip(lslice, rslice):
if j > i:
yield i, j-1
i = k + 1
if i <= end:
yield i, end
self._left[left:right], self._right[left:right] = [start], [end]
def _download(self, start, end):
# type: (int, int) -> None
"""Download bytes from start to end inclusively."""
with self._stay():
left = bisect_left(self._right, start)
right = bisect_right(self._left, end)
for start, end in self._merge(start, end, left, right):
response = self._stream_response(start, end)
response.raise_for_status()
self.seek(start)
for chunk in response_chunks(response, self._chunk_size):
self._file.write(chunk)
| Django-locallibrary/env/Lib/site-packages/pip/_internal/network/lazy_wheel.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/network/lazy_wheel.py",
"repo_id": "Django-locallibrary",
"token_count": 3333
} | 6 |
"""Metadata generation logic for legacy source distributions.
"""
import logging
import os
from pip._internal.exceptions import InstallationError
from pip._internal.utils.setuptools_build import make_setuptools_egg_info_args
from pip._internal.utils.subprocess import call_subprocess
from pip._internal.utils.temp_dir import TempDirectory
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from pip._internal.build_env import BuildEnvironment
logger = logging.getLogger(__name__)
def _find_egg_info(directory):
# type: (str) -> str
"""Find an .egg-info subdirectory in `directory`.
"""
filenames = [
f for f in os.listdir(directory) if f.endswith(".egg-info")
]
if not filenames:
raise InstallationError(
"No .egg-info directory found in {}".format(directory)
)
if len(filenames) > 1:
raise InstallationError(
"More than one .egg-info directory found in {}".format(
directory
)
)
return os.path.join(directory, filenames[0])
def generate_metadata(
build_env, # type: BuildEnvironment
setup_py_path, # type: str
source_dir, # type: str
isolated, # type: bool
details, # type: str
):
# type: (...) -> str
"""Generate metadata using setup.py-based defacto mechanisms.
Returns the generated metadata directory.
"""
logger.debug(
'Running setup.py (path:%s) egg_info for package %s',
setup_py_path, details,
)
egg_info_dir = TempDirectory(
kind="pip-egg-info", globally_managed=True
).path
args = make_setuptools_egg_info_args(
setup_py_path,
egg_info_dir=egg_info_dir,
no_user_config=isolated,
)
with build_env:
call_subprocess(
args,
cwd=source_dir,
command_desc='python setup.py egg_info',
)
# Return the .egg-info directory.
return _find_egg_info(egg_info_dir)
| Django-locallibrary/env/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/operations/build/metadata_legacy.py",
"repo_id": "Django-locallibrary",
"token_count": 817
} | 7 |
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Callable, List
from pip._internal.req.req_install import InstallRequirement
from pip._internal.req.req_set import RequirementSet
InstallRequirementProvider = Callable[
[str, InstallRequirement], InstallRequirement
]
class BaseResolver(object):
def resolve(self, root_reqs, check_supported_wheels):
# type: (List[InstallRequirement], bool) -> RequirementSet
raise NotImplementedError()
def get_installation_order(self, req_set):
# type: (RequirementSet) -> List[InstallRequirement]
raise NotImplementedError()
| Django-locallibrary/env/Lib/site-packages/pip/_internal/resolution/base.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/resolution/base.py",
"repo_id": "Django-locallibrary",
"token_count": 240
} | 8 |
from pip._vendor.packaging.specifiers import SpecifierSet
from pip._vendor.resolvelib.providers import AbstractProvider
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import (
Any,
Dict,
Iterable,
Optional,
Sequence,
Set,
Tuple,
Union,
)
from .base import Requirement, Candidate
from .factory import Factory
# Notes on the relationship between the provider, the factory, and the
# candidate and requirement classes.
#
# The provider is a direct implementation of the resolvelib class. Its role
# is to deliver the API that resolvelib expects.
#
# Rather than work with completely abstract "requirement" and "candidate"
# concepts as resolvelib does, pip has concrete classes implementing these two
# ideas. The API of Requirement and Candidate objects are defined in the base
# classes, but essentially map fairly directly to the equivalent provider
# methods. In particular, `find_matches` and `is_satisfied_by` are
# requirement methods, and `get_dependencies` is a candidate method.
#
# The factory is the interface to pip's internal mechanisms. It is stateless,
# and is created by the resolver and held as a property of the provider. It is
# responsible for creating Requirement and Candidate objects, and provides
# services to those objects (access to pip's finder and preparer).
class PipProvider(AbstractProvider):
def __init__(
self,
factory, # type: Factory
constraints, # type: Dict[str, SpecifierSet]
ignore_dependencies, # type: bool
upgrade_strategy, # type: str
user_requested, # type: Set[str]
):
# type: (...) -> None
self._factory = factory
self._constraints = constraints
self._ignore_dependencies = ignore_dependencies
self._upgrade_strategy = upgrade_strategy
self.user_requested = user_requested
def _sort_matches(self, matches):
# type: (Iterable[Candidate]) -> Sequence[Candidate]
# The requirement is responsible for returning a sequence of potential
# candidates, one per version. The provider handles the logic of
# deciding the order in which these candidates should be passed to
# the resolver.
# The `matches` argument is a sequence of candidates, one per version,
# which are potential options to be installed. The requirement will
# have already sorted out whether to give us an already-installed
# candidate or a version from PyPI (i.e., it will deal with options
# like --force-reinstall and --ignore-installed).
# We now work out the correct order.
#
# 1. If no other considerations apply, later versions take priority.
# 2. An already installed distribution is preferred over any other,
# unless the user has requested an upgrade.
# Upgrades are allowed when:
# * The --upgrade flag is set, and
# - The project was specified on the command line, or
# - The project is a dependency and the "eager" upgrade strategy
# was requested.
def _eligible_for_upgrade(name):
# type: (str) -> bool
"""Are upgrades allowed for this project?
This checks the upgrade strategy, and whether the project was one
that the user specified in the command line, in order to decide
whether we should upgrade if there's a newer version available.
(Note that we don't need access to the `--upgrade` flag, because
an upgrade strategy of "to-satisfy-only" means that `--upgrade`
was not specified).
"""
if self._upgrade_strategy == "eager":
return True
elif self._upgrade_strategy == "only-if-needed":
return (name in self.user_requested)
return False
def sort_key(c):
# type: (Candidate) -> int
"""Return a sort key for the matches.
The highest priority should be given to installed candidates that
are not eligible for upgrade. We use the integer value in the first
part of the key to sort these before other candidates.
We only pull the installed candidate to the bottom (i.e. most
preferred), but otherwise keep the ordering returned by the
requirement. The requirement is responsible for returning a list
otherwise sorted for the resolver, taking account for versions
and binary preferences as specified by the user.
"""
if c.is_installed and not _eligible_for_upgrade(c.name):
return 1
return 0
return sorted(matches, key=sort_key)
def identify(self, dependency):
# type: (Union[Requirement, Candidate]) -> str
return dependency.name
def get_preference(
self,
resolution, # type: Optional[Candidate]
candidates, # type: Sequence[Candidate]
information # type: Sequence[Tuple[Requirement, Candidate]]
):
# type: (...) -> Any
# Use the "usual" value for now
return len(candidates)
def find_matches(self, requirements):
# type: (Sequence[Requirement]) -> Iterable[Candidate]
if not requirements:
return []
constraint = self._constraints.get(
requirements[0].name, SpecifierSet(),
)
candidates = self._factory.find_candidates(requirements, constraint)
return reversed(self._sort_matches(candidates))
def is_satisfied_by(self, requirement, candidate):
# type: (Requirement, Candidate) -> bool
return requirement.is_satisfied_by(candidate)
def get_dependencies(self, candidate):
# type: (Candidate) -> Sequence[Requirement]
with_requires = not self._ignore_dependencies
return [
r
for r in candidate.iter_dependencies(with_requires)
if r is not None
]
| Django-locallibrary/env/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/resolution/resolvelib/provider.py",
"repo_id": "Django-locallibrary",
"token_count": 2276
} | 9 |
# The following comment should be removed at some point in the future.
# mypy: disallow-untyped-defs=False
from __future__ import absolute_import
import contextlib
import errno
import logging
import logging.handlers
import os
import sys
from logging import Filter, getLogger
from pip._vendor.six import PY2
from pip._internal.utils.compat import WINDOWS
from pip._internal.utils.deprecation import DEPRECATION_MSG_PREFIX
from pip._internal.utils.misc import ensure_dir
try:
import threading
except ImportError:
import dummy_threading as threading # type: ignore
try:
# Use "import as" and set colorama in the else clause to avoid mypy
# errors and get the following correct revealed type for colorama:
# `Union[_importlib_modulespec.ModuleType, None]`
# Otherwise, we get an error like the following in the except block:
# > Incompatible types in assignment (expression has type "None",
# variable has type Module)
# TODO: eliminate the need to use "import as" once mypy addresses some
# of its issues with conditional imports. Here is an umbrella issue:
# https://github.com/python/mypy/issues/1297
from pip._vendor import colorama as _colorama
# Lots of different errors can come from this, including SystemError and
# ImportError.
except Exception:
colorama = None
else:
# Import Fore explicitly rather than accessing below as colorama.Fore
# to avoid the following error running mypy:
# > Module has no attribute "Fore"
# TODO: eliminate the need to import Fore once mypy addresses some of its
# issues with conditional imports. This particular case could be an
# instance of the following issue (but also see the umbrella issue above):
# https://github.com/python/mypy/issues/3500
from pip._vendor.colorama import Fore
colorama = _colorama
_log_state = threading.local()
subprocess_logger = getLogger('pip.subprocessor')
class BrokenStdoutLoggingError(Exception):
"""
Raised if BrokenPipeError occurs for the stdout stream while logging.
"""
pass
# BrokenPipeError does not exist in Python 2 and, in addition, manifests
# differently in Windows and non-Windows.
if WINDOWS:
# In Windows, a broken pipe can show up as EINVAL rather than EPIPE:
# https://bugs.python.org/issue19612
# https://bugs.python.org/issue30418
if PY2:
def _is_broken_pipe_error(exc_class, exc):
"""See the docstring for non-Windows Python 3 below."""
return (exc_class is IOError and
exc.errno in (errno.EINVAL, errno.EPIPE))
else:
# In Windows, a broken pipe IOError became OSError in Python 3.
def _is_broken_pipe_error(exc_class, exc):
"""See the docstring for non-Windows Python 3 below."""
return ((exc_class is BrokenPipeError) or # noqa: F821
(exc_class is OSError and
exc.errno in (errno.EINVAL, errno.EPIPE)))
elif PY2:
def _is_broken_pipe_error(exc_class, exc):
"""See the docstring for non-Windows Python 3 below."""
return (exc_class is IOError and exc.errno == errno.EPIPE)
else:
# Then we are in the non-Windows Python 3 case.
def _is_broken_pipe_error(exc_class, exc):
"""
Return whether an exception is a broken pipe error.
Args:
exc_class: an exception class.
exc: an exception instance.
"""
return (exc_class is BrokenPipeError) # noqa: F821
@contextlib.contextmanager
def indent_log(num=2):
"""
A context manager which will cause the log output to be indented for any
log messages emitted inside it.
"""
# For thread-safety
_log_state.indentation = get_indentation()
_log_state.indentation += num
try:
yield
finally:
_log_state.indentation -= num
def get_indentation():
return getattr(_log_state, 'indentation', 0)
class IndentingFormatter(logging.Formatter):
def __init__(self, *args, **kwargs):
"""
A logging.Formatter that obeys the indent_log() context manager.
:param add_timestamp: A bool indicating output lines should be prefixed
with their record's timestamp.
"""
self.add_timestamp = kwargs.pop("add_timestamp", False)
super(IndentingFormatter, self).__init__(*args, **kwargs)
def get_message_start(self, formatted, levelno):
"""
Return the start of the formatted log message (not counting the
prefix to add to each line).
"""
if levelno < logging.WARNING:
return ''
if formatted.startswith(DEPRECATION_MSG_PREFIX):
# Then the message already has a prefix. We don't want it to
# look like "WARNING: DEPRECATION: ...."
return ''
if levelno < logging.ERROR:
return 'WARNING: '
return 'ERROR: '
def format(self, record):
"""
Calls the standard formatter, but will indent all of the log message
lines by our current indentation level.
"""
formatted = super(IndentingFormatter, self).format(record)
message_start = self.get_message_start(formatted, record.levelno)
formatted = message_start + formatted
prefix = ''
if self.add_timestamp:
# TODO: Use Formatter.default_time_format after dropping PY2.
t = self.formatTime(record, "%Y-%m-%dT%H:%M:%S")
prefix = '{t},{record.msecs:03.0f} '.format(**locals())
prefix += " " * get_indentation()
formatted = "".join([
prefix + line
for line in formatted.splitlines(True)
])
return formatted
def _color_wrap(*colors):
def wrapped(inp):
return "".join(list(colors) + [inp, colorama.Style.RESET_ALL])
return wrapped
class ColorizedStreamHandler(logging.StreamHandler):
# Don't build up a list of colors if we don't have colorama
if colorama:
COLORS = [
# This needs to be in order from highest logging level to lowest.
(logging.ERROR, _color_wrap(Fore.RED)),
(logging.WARNING, _color_wrap(Fore.YELLOW)),
]
else:
COLORS = []
def __init__(self, stream=None, no_color=None):
logging.StreamHandler.__init__(self, stream)
self._no_color = no_color
if WINDOWS and colorama:
self.stream = colorama.AnsiToWin32(self.stream)
def _using_stdout(self):
"""
Return whether the handler is using sys.stdout.
"""
if WINDOWS and colorama:
# Then self.stream is an AnsiToWin32 object.
return self.stream.wrapped is sys.stdout
return self.stream is sys.stdout
def should_color(self):
# Don't colorize things if we do not have colorama or if told not to
if not colorama or self._no_color:
return False
real_stream = (
self.stream if not isinstance(self.stream, colorama.AnsiToWin32)
else self.stream.wrapped
)
# If the stream is a tty we should color it
if hasattr(real_stream, "isatty") and real_stream.isatty():
return True
# If we have an ANSI term we should color it
if os.environ.get("TERM") == "ANSI":
return True
# If anything else we should not color it
return False
def format(self, record):
msg = logging.StreamHandler.format(self, record)
if self.should_color():
for level, color in self.COLORS:
if record.levelno >= level:
msg = color(msg)
break
return msg
# The logging module says handleError() can be customized.
def handleError(self, record):
exc_class, exc = sys.exc_info()[:2]
# If a broken pipe occurred while calling write() or flush() on the
# stdout stream in logging's Handler.emit(), then raise our special
# exception so we can handle it in main() instead of logging the
# broken pipe error and continuing.
if (exc_class and self._using_stdout() and
_is_broken_pipe_error(exc_class, exc)):
raise BrokenStdoutLoggingError()
return super(ColorizedStreamHandler, self).handleError(record)
class BetterRotatingFileHandler(logging.handlers.RotatingFileHandler):
def _open(self):
ensure_dir(os.path.dirname(self.baseFilename))
return logging.handlers.RotatingFileHandler._open(self)
class MaxLevelFilter(Filter):
def __init__(self, level):
self.level = level
def filter(self, record):
return record.levelno < self.level
class ExcludeLoggerFilter(Filter):
"""
A logging Filter that excludes records from a logger (or its children).
"""
def filter(self, record):
# The base Filter class allows only records from a logger (or its
# children).
return not super(ExcludeLoggerFilter, self).filter(record)
def setup_logging(verbosity, no_color, user_log_file):
"""Configures and sets up all of the logging
Returns the requested logging level, as its integer value.
"""
# Determine the level to be logging at.
if verbosity >= 1:
level = "DEBUG"
elif verbosity == -1:
level = "WARNING"
elif verbosity == -2:
level = "ERROR"
elif verbosity <= -3:
level = "CRITICAL"
else:
level = "INFO"
level_number = getattr(logging, level)
# The "root" logger should match the "console" level *unless* we also need
# to log to a user log file.
include_user_log = user_log_file is not None
if include_user_log:
additional_log_file = user_log_file
root_level = "DEBUG"
else:
additional_log_file = "/dev/null"
root_level = level
# Disable any logging besides WARNING unless we have DEBUG level logging
# enabled for vendored libraries.
vendored_log_level = "WARNING" if level in ["INFO", "ERROR"] else "DEBUG"
# Shorthands for clarity
log_streams = {
"stdout": "ext://sys.stdout",
"stderr": "ext://sys.stderr",
}
handler_classes = {
"stream": "pip._internal.utils.logging.ColorizedStreamHandler",
"file": "pip._internal.utils.logging.BetterRotatingFileHandler",
}
handlers = ["console", "console_errors", "console_subprocess"] + (
["user_log"] if include_user_log else []
)
logging.config.dictConfig({
"version": 1,
"disable_existing_loggers": False,
"filters": {
"exclude_warnings": {
"()": "pip._internal.utils.logging.MaxLevelFilter",
"level": logging.WARNING,
},
"restrict_to_subprocess": {
"()": "logging.Filter",
"name": subprocess_logger.name,
},
"exclude_subprocess": {
"()": "pip._internal.utils.logging.ExcludeLoggerFilter",
"name": subprocess_logger.name,
},
},
"formatters": {
"indent": {
"()": IndentingFormatter,
"format": "%(message)s",
},
"indent_with_timestamp": {
"()": IndentingFormatter,
"format": "%(message)s",
"add_timestamp": True,
},
},
"handlers": {
"console": {
"level": level,
"class": handler_classes["stream"],
"no_color": no_color,
"stream": log_streams["stdout"],
"filters": ["exclude_subprocess", "exclude_warnings"],
"formatter": "indent",
},
"console_errors": {
"level": "WARNING",
"class": handler_classes["stream"],
"no_color": no_color,
"stream": log_streams["stderr"],
"filters": ["exclude_subprocess"],
"formatter": "indent",
},
# A handler responsible for logging to the console messages
# from the "subprocessor" logger.
"console_subprocess": {
"level": level,
"class": handler_classes["stream"],
"no_color": no_color,
"stream": log_streams["stderr"],
"filters": ["restrict_to_subprocess"],
"formatter": "indent",
},
"user_log": {
"level": "DEBUG",
"class": handler_classes["file"],
"filename": additional_log_file,
"delay": True,
"formatter": "indent_with_timestamp",
},
},
"root": {
"level": root_level,
"handlers": handlers,
},
"loggers": {
"pip._vendor": {
"level": vendored_log_level
}
},
})
return level_number
| Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/logging.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_internal/utils/logging.py",
"repo_id": "Django-locallibrary",
"token_count": 5605
} | 10 |
import logging
from pip._vendor import requests
from pip._vendor.cachecontrol.adapter import CacheControlAdapter
from pip._vendor.cachecontrol.cache import DictCache
from pip._vendor.cachecontrol.controller import logger
from argparse import ArgumentParser
def setup_logging():
logger.setLevel(logging.DEBUG)
handler = logging.StreamHandler()
logger.addHandler(handler)
def get_session():
adapter = CacheControlAdapter(
DictCache(), cache_etags=True, serializer=None, heuristic=None
)
sess = requests.Session()
sess.mount("http://", adapter)
sess.mount("https://", adapter)
sess.cache_controller = adapter.controller
return sess
def get_args():
parser = ArgumentParser()
parser.add_argument("url", help="The URL to try and cache")
return parser.parse_args()
def main(args=None):
args = get_args()
sess = get_session()
# Make a request to get a response
resp = sess.get(args.url)
# Turn on logging
setup_logging()
# try setting the cache
sess.cache_controller.cache_response(resp.request, resp.raw)
# Now try to get it
if sess.cache_controller.cached_request(resp.request):
print("Cached!")
else:
print("Not cached :(")
if __name__ == "__main__":
main()
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/cachecontrol/_cmd.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/cachecontrol/_cmd.py",
"repo_id": "Django-locallibrary",
"token_count": 456
} | 11 |
import argparse
from pip._vendor.certifi import contents, where
parser = argparse.ArgumentParser()
parser.add_argument("-c", "--contents", action="store_true")
args = parser.parse_args()
if args.contents:
print(contents())
else:
print(where())
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/certifi/__main__.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/certifi/__main__.py",
"repo_id": "Django-locallibrary",
"token_count": 87
} | 12 |
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Communicator client code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .euctwfreq import (EUCTW_CHAR_TO_FREQ_ORDER, EUCTW_TABLE_SIZE,
EUCTW_TYPICAL_DISTRIBUTION_RATIO)
from .euckrfreq import (EUCKR_CHAR_TO_FREQ_ORDER, EUCKR_TABLE_SIZE,
EUCKR_TYPICAL_DISTRIBUTION_RATIO)
from .gb2312freq import (GB2312_CHAR_TO_FREQ_ORDER, GB2312_TABLE_SIZE,
GB2312_TYPICAL_DISTRIBUTION_RATIO)
from .big5freq import (BIG5_CHAR_TO_FREQ_ORDER, BIG5_TABLE_SIZE,
BIG5_TYPICAL_DISTRIBUTION_RATIO)
from .jisfreq import (JIS_CHAR_TO_FREQ_ORDER, JIS_TABLE_SIZE,
JIS_TYPICAL_DISTRIBUTION_RATIO)
class CharDistributionAnalysis(object):
ENOUGH_DATA_THRESHOLD = 1024
SURE_YES = 0.99
SURE_NO = 0.01
MINIMUM_DATA_THRESHOLD = 3
def __init__(self):
# Mapping table to get frequency order from char order (get from
# GetOrder())
self._char_to_freq_order = None
self._table_size = None # Size of above table
# This is a constant value which varies from language to language,
# used in calculating confidence. See
# http://www.mozilla.org/projects/intl/UniversalCharsetDetection.html
# for further detail.
self.typical_distribution_ratio = None
self._done = None
self._total_chars = None
self._freq_chars = None
self.reset()
def reset(self):
"""reset analyser, clear any state"""
# If this flag is set to True, detection is done and conclusion has
# been made
self._done = False
self._total_chars = 0 # Total characters encountered
# The number of characters whose frequency order is less than 512
self._freq_chars = 0
def feed(self, char, char_len):
"""feed a character with known length"""
if char_len == 2:
# we only care about 2-bytes character in our distribution analysis
order = self.get_order(char)
else:
order = -1
if order >= 0:
self._total_chars += 1
# order is valid
if order < self._table_size:
if 512 > self._char_to_freq_order[order]:
self._freq_chars += 1
def get_confidence(self):
"""return confidence based on existing data"""
# if we didn't receive any character in our consideration range,
# return negative answer
if self._total_chars <= 0 or self._freq_chars <= self.MINIMUM_DATA_THRESHOLD:
return self.SURE_NO
if self._total_chars != self._freq_chars:
r = (self._freq_chars / ((self._total_chars - self._freq_chars)
* self.typical_distribution_ratio))
if r < self.SURE_YES:
return r
# normalize confidence (we don't want to be 100% sure)
return self.SURE_YES
def got_enough_data(self):
# It is not necessary to receive all data to draw conclusion.
# For charset detection, certain amount of data is enough
return self._total_chars > self.ENOUGH_DATA_THRESHOLD
def get_order(self, byte_str):
# We do not handle characters based on the original encoding string,
# but convert this encoding string to a number, here called order.
# This allows multiple encodings of a language to share one frequency
# table.
return -1
class EUCTWDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
super(EUCTWDistributionAnalysis, self).__init__()
self._char_to_freq_order = EUCTW_CHAR_TO_FREQ_ORDER
self._table_size = EUCTW_TABLE_SIZE
self.typical_distribution_ratio = EUCTW_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, byte_str):
# for euc-TW encoding, we are interested
# first byte range: 0xc4 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = byte_str[0]
if first_char >= 0xC4:
return 94 * (first_char - 0xC4) + byte_str[1] - 0xA1
else:
return -1
class EUCKRDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
super(EUCKRDistributionAnalysis, self).__init__()
self._char_to_freq_order = EUCKR_CHAR_TO_FREQ_ORDER
self._table_size = EUCKR_TABLE_SIZE
self.typical_distribution_ratio = EUCKR_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, byte_str):
# for euc-KR encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char = byte_str[0]
if first_char >= 0xB0:
return 94 * (first_char - 0xB0) + byte_str[1] - 0xA1
else:
return -1
class GB2312DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
super(GB2312DistributionAnalysis, self).__init__()
self._char_to_freq_order = GB2312_CHAR_TO_FREQ_ORDER
self._table_size = GB2312_TABLE_SIZE
self.typical_distribution_ratio = GB2312_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, byte_str):
# for GB2312 encoding, we are interested
# first byte range: 0xb0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = byte_str[0], byte_str[1]
if (first_char >= 0xB0) and (second_char >= 0xA1):
return 94 * (first_char - 0xB0) + second_char - 0xA1
else:
return -1
class Big5DistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
super(Big5DistributionAnalysis, self).__init__()
self._char_to_freq_order = BIG5_CHAR_TO_FREQ_ORDER
self._table_size = BIG5_TABLE_SIZE
self.typical_distribution_ratio = BIG5_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, byte_str):
# for big5 encoding, we are interested
# first byte range: 0xa4 -- 0xfe
# second byte range: 0x40 -- 0x7e , 0xa1 -- 0xfe
# no validation needed here. State machine has done that
first_char, second_char = byte_str[0], byte_str[1]
if first_char >= 0xA4:
if second_char >= 0xA1:
return 157 * (first_char - 0xA4) + second_char - 0xA1 + 63
else:
return 157 * (first_char - 0xA4) + second_char - 0x40
else:
return -1
class SJISDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
super(SJISDistributionAnalysis, self).__init__()
self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
self._table_size = JIS_TABLE_SIZE
self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, byte_str):
# for sjis encoding, we are interested
# first byte range: 0x81 -- 0x9f , 0xe0 -- 0xfe
# second byte range: 0x40 -- 0x7e, 0x81 -- oxfe
# no validation needed here. State machine has done that
first_char, second_char = byte_str[0], byte_str[1]
if (first_char >= 0x81) and (first_char <= 0x9F):
order = 188 * (first_char - 0x81)
elif (first_char >= 0xE0) and (first_char <= 0xEF):
order = 188 * (first_char - 0xE0 + 31)
else:
return -1
order = order + second_char - 0x40
if second_char > 0x7F:
order = -1
return order
class EUCJPDistributionAnalysis(CharDistributionAnalysis):
def __init__(self):
super(EUCJPDistributionAnalysis, self).__init__()
self._char_to_freq_order = JIS_CHAR_TO_FREQ_ORDER
self._table_size = JIS_TABLE_SIZE
self.typical_distribution_ratio = JIS_TYPICAL_DISTRIBUTION_RATIO
def get_order(self, byte_str):
# for euc-JP encoding, we are interested
# first byte range: 0xa0 -- 0xfe
# second byte range: 0xa1 -- 0xfe
# no validation needed here. State machine has done that
char = byte_str[0]
if char >= 0xA0:
return 94 * (char - 0xA1) + byte_str[1] - 0xa1
else:
return -1
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/chardistribution.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/chardistribution.py",
"repo_id": "Django-locallibrary",
"token_count": 4053
} | 13 |
######################## BEGIN LICENSE BLOCK ########################
# The Original Code is Mozilla Universal charset detector code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 2001
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
# Shy Shalom - original C code
# Proofpoint, Inc.
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .charsetgroupprober import CharSetGroupProber
from .utf8prober import UTF8Prober
from .sjisprober import SJISProber
from .eucjpprober import EUCJPProber
from .gb2312prober import GB2312Prober
from .euckrprober import EUCKRProber
from .cp949prober import CP949Prober
from .big5prober import Big5Prober
from .euctwprober import EUCTWProber
class MBCSGroupProber(CharSetGroupProber):
def __init__(self, lang_filter=None):
super(MBCSGroupProber, self).__init__(lang_filter=lang_filter)
self.probers = [
UTF8Prober(),
SJISProber(),
EUCJPProber(),
GB2312Prober(),
EUCKRProber(),
CP949Prober(),
Big5Prober(),
EUCTWProber()
]
self.reset()
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/mbcsgroupprober.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/chardet/mbcsgroupprober.py",
"repo_id": "Django-locallibrary",
"token_count": 671
} | 14 |
# Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
import re
import sys
import os
from .ansi import AnsiFore, AnsiBack, AnsiStyle, Style
from .winterm import WinTerm, WinColor, WinStyle
from .win32 import windll, winapi_test
winterm = None
if windll is not None:
winterm = WinTerm()
class StreamWrapper(object):
'''
Wraps a stream (such as stdout), acting as a transparent proxy for all
attribute access apart from method 'write()', which is delegated to our
Converter instance.
'''
def __init__(self, wrapped, converter):
# double-underscore everything to prevent clashes with names of
# attributes on the wrapped stream object.
self.__wrapped = wrapped
self.__convertor = converter
def __getattr__(self, name):
return getattr(self.__wrapped, name)
def __enter__(self, *args, **kwargs):
# special method lookup bypasses __getattr__/__getattribute__, see
# https://stackoverflow.com/questions/12632894/why-doesnt-getattr-work-with-exit
# thus, contextlib magic methods are not proxied via __getattr__
return self.__wrapped.__enter__(*args, **kwargs)
def __exit__(self, *args, **kwargs):
return self.__wrapped.__exit__(*args, **kwargs)
def write(self, text):
self.__convertor.write(text)
def isatty(self):
stream = self.__wrapped
if 'PYCHARM_HOSTED' in os.environ:
if stream is not None and (stream is sys.__stdout__ or stream is sys.__stderr__):
return True
try:
stream_isatty = stream.isatty
except AttributeError:
return False
else:
return stream_isatty()
@property
def closed(self):
stream = self.__wrapped
try:
return stream.closed
except AttributeError:
return True
class AnsiToWin32(object):
'''
Implements a 'write()' method which, on Windows, will strip ANSI character
sequences from the text, and if outputting to a tty, will convert them into
win32 function calls.
'''
ANSI_CSI_RE = re.compile('\001?\033\\[((?:\\d|;)*)([a-zA-Z])\002?') # Control Sequence Introducer
ANSI_OSC_RE = re.compile('\001?\033\\]((?:.|;)*?)(\x07)\002?') # Operating System Command
def __init__(self, wrapped, convert=None, strip=None, autoreset=False):
# The wrapped stream (normally sys.stdout or sys.stderr)
self.wrapped = wrapped
# should we reset colors to defaults after every .write()
self.autoreset = autoreset
# create the proxy wrapping our output stream
self.stream = StreamWrapper(wrapped, self)
on_windows = os.name == 'nt'
# We test if the WinAPI works, because even if we are on Windows
# we may be using a terminal that doesn't support the WinAPI
# (e.g. Cygwin Terminal). In this case it's up to the terminal
# to support the ANSI codes.
conversion_supported = on_windows and winapi_test()
# should we strip ANSI sequences from our output?
if strip is None:
strip = conversion_supported or (not self.stream.closed and not self.stream.isatty())
self.strip = strip
# should we should convert ANSI sequences into win32 calls?
if convert is None:
convert = conversion_supported and not self.stream.closed and self.stream.isatty()
self.convert = convert
# dict of ansi codes to win32 functions and parameters
self.win32_calls = self.get_win32_calls()
# are we wrapping stderr?
self.on_stderr = self.wrapped is sys.stderr
def should_wrap(self):
'''
True if this class is actually needed. If false, then the output
stream will not be affected, nor will win32 calls be issued, so
wrapping stdout is not actually required. This will generally be
False on non-Windows platforms, unless optional functionality like
autoreset has been requested using kwargs to init()
'''
return self.convert or self.strip or self.autoreset
def get_win32_calls(self):
if self.convert and winterm:
return {
AnsiStyle.RESET_ALL: (winterm.reset_all, ),
AnsiStyle.BRIGHT: (winterm.style, WinStyle.BRIGHT),
AnsiStyle.DIM: (winterm.style, WinStyle.NORMAL),
AnsiStyle.NORMAL: (winterm.style, WinStyle.NORMAL),
AnsiFore.BLACK: (winterm.fore, WinColor.BLACK),
AnsiFore.RED: (winterm.fore, WinColor.RED),
AnsiFore.GREEN: (winterm.fore, WinColor.GREEN),
AnsiFore.YELLOW: (winterm.fore, WinColor.YELLOW),
AnsiFore.BLUE: (winterm.fore, WinColor.BLUE),
AnsiFore.MAGENTA: (winterm.fore, WinColor.MAGENTA),
AnsiFore.CYAN: (winterm.fore, WinColor.CYAN),
AnsiFore.WHITE: (winterm.fore, WinColor.GREY),
AnsiFore.RESET: (winterm.fore, ),
AnsiFore.LIGHTBLACK_EX: (winterm.fore, WinColor.BLACK, True),
AnsiFore.LIGHTRED_EX: (winterm.fore, WinColor.RED, True),
AnsiFore.LIGHTGREEN_EX: (winterm.fore, WinColor.GREEN, True),
AnsiFore.LIGHTYELLOW_EX: (winterm.fore, WinColor.YELLOW, True),
AnsiFore.LIGHTBLUE_EX: (winterm.fore, WinColor.BLUE, True),
AnsiFore.LIGHTMAGENTA_EX: (winterm.fore, WinColor.MAGENTA, True),
AnsiFore.LIGHTCYAN_EX: (winterm.fore, WinColor.CYAN, True),
AnsiFore.LIGHTWHITE_EX: (winterm.fore, WinColor.GREY, True),
AnsiBack.BLACK: (winterm.back, WinColor.BLACK),
AnsiBack.RED: (winterm.back, WinColor.RED),
AnsiBack.GREEN: (winterm.back, WinColor.GREEN),
AnsiBack.YELLOW: (winterm.back, WinColor.YELLOW),
AnsiBack.BLUE: (winterm.back, WinColor.BLUE),
AnsiBack.MAGENTA: (winterm.back, WinColor.MAGENTA),
AnsiBack.CYAN: (winterm.back, WinColor.CYAN),
AnsiBack.WHITE: (winterm.back, WinColor.GREY),
AnsiBack.RESET: (winterm.back, ),
AnsiBack.LIGHTBLACK_EX: (winterm.back, WinColor.BLACK, True),
AnsiBack.LIGHTRED_EX: (winterm.back, WinColor.RED, True),
AnsiBack.LIGHTGREEN_EX: (winterm.back, WinColor.GREEN, True),
AnsiBack.LIGHTYELLOW_EX: (winterm.back, WinColor.YELLOW, True),
AnsiBack.LIGHTBLUE_EX: (winterm.back, WinColor.BLUE, True),
AnsiBack.LIGHTMAGENTA_EX: (winterm.back, WinColor.MAGENTA, True),
AnsiBack.LIGHTCYAN_EX: (winterm.back, WinColor.CYAN, True),
AnsiBack.LIGHTWHITE_EX: (winterm.back, WinColor.GREY, True),
}
return dict()
def write(self, text):
if self.strip or self.convert:
self.write_and_convert(text)
else:
self.wrapped.write(text)
self.wrapped.flush()
if self.autoreset:
self.reset_all()
def reset_all(self):
if self.convert:
self.call_win32('m', (0,))
elif not self.strip and not self.stream.closed:
self.wrapped.write(Style.RESET_ALL)
def write_and_convert(self, text):
'''
Write the given text to our wrapped stream, stripping any ANSI
sequences from the text, and optionally converting them into win32
calls.
'''
cursor = 0
text = self.convert_osc(text)
for match in self.ANSI_CSI_RE.finditer(text):
start, end = match.span()
self.write_plain_text(text, cursor, start)
self.convert_ansi(*match.groups())
cursor = end
self.write_plain_text(text, cursor, len(text))
def write_plain_text(self, text, start, end):
if start < end:
self.wrapped.write(text[start:end])
self.wrapped.flush()
def convert_ansi(self, paramstring, command):
if self.convert:
params = self.extract_params(command, paramstring)
self.call_win32(command, params)
def extract_params(self, command, paramstring):
if command in 'Hf':
params = tuple(int(p) if len(p) != 0 else 1 for p in paramstring.split(';'))
while len(params) < 2:
# defaults:
params = params + (1,)
else:
params = tuple(int(p) for p in paramstring.split(';') if len(p) != 0)
if len(params) == 0:
# defaults:
if command in 'JKm':
params = (0,)
elif command in 'ABCD':
params = (1,)
return params
def call_win32(self, command, params):
if command == 'm':
for param in params:
if param in self.win32_calls:
func_args = self.win32_calls[param]
func = func_args[0]
args = func_args[1:]
kwargs = dict(on_stderr=self.on_stderr)
func(*args, **kwargs)
elif command in 'J':
winterm.erase_screen(params[0], on_stderr=self.on_stderr)
elif command in 'K':
winterm.erase_line(params[0], on_stderr=self.on_stderr)
elif command in 'Hf': # cursor position - absolute
winterm.set_cursor_position(params, on_stderr=self.on_stderr)
elif command in 'ABCD': # cursor position - relative
n = params[0]
# A - up, B - down, C - forward, D - back
x, y = {'A': (0, -n), 'B': (0, n), 'C': (n, 0), 'D': (-n, 0)}[command]
winterm.cursor_adjust(x, y, on_stderr=self.on_stderr)
def convert_osc(self, text):
for match in self.ANSI_OSC_RE.finditer(text):
start, end = match.span()
text = text[:start] + text[end:]
paramstring, command = match.groups()
if command in '\x07': # \x07 = BEL
params = paramstring.split(";")
# 0 - change title and icon (we will only change title)
# 1 - change icon (we don't support this)
# 2 - change title
if params[0] in '02':
winterm.set_title(params[1])
return text
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/colorama/ansitowin32.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/colorama/ansitowin32.py",
"repo_id": "Django-locallibrary",
"token_count": 4962
} | 15 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
import hashlib
import logging
import os
import shutil
import subprocess
import tempfile
try:
from threading import Thread
except ImportError:
from dummy_threading import Thread
from . import DistlibException
from .compat import (HTTPBasicAuthHandler, Request, HTTPPasswordMgr,
urlparse, build_opener, string_types)
from .util import cached_property, zip_dir, ServerProxy
logger = logging.getLogger(__name__)
DEFAULT_INDEX = 'https://pypi.org/pypi'
DEFAULT_REALM = 'pypi'
class PackageIndex(object):
"""
This class represents a package index compatible with PyPI, the Python
Package Index.
"""
boundary = b'----------ThIs_Is_tHe_distlib_index_bouNdaRY_$'
def __init__(self, url=None):
"""
Initialise an instance.
:param url: The URL of the index. If not specified, the URL for PyPI is
used.
"""
self.url = url or DEFAULT_INDEX
self.read_configuration()
scheme, netloc, path, params, query, frag = urlparse(self.url)
if params or query or frag or scheme not in ('http', 'https'):
raise DistlibException('invalid repository: %s' % self.url)
self.password_handler = None
self.ssl_verifier = None
self.gpg = None
self.gpg_home = None
with open(os.devnull, 'w') as sink:
# Use gpg by default rather than gpg2, as gpg2 insists on
# prompting for passwords
for s in ('gpg', 'gpg2'):
try:
rc = subprocess.check_call([s, '--version'], stdout=sink,
stderr=sink)
if rc == 0:
self.gpg = s
break
except OSError:
pass
def _get_pypirc_command(self):
"""
Get the distutils command for interacting with PyPI configurations.
:return: the command.
"""
from distutils.core import Distribution
from distutils.config import PyPIRCCommand
d = Distribution()
return PyPIRCCommand(d)
def read_configuration(self):
"""
Read the PyPI access configuration as supported by distutils, getting
PyPI to do the actual work. This populates ``username``, ``password``,
``realm`` and ``url`` attributes from the configuration.
"""
# get distutils to do the work
c = self._get_pypirc_command()
c.repository = self.url
cfg = c._read_pypirc()
self.username = cfg.get('username')
self.password = cfg.get('password')
self.realm = cfg.get('realm', 'pypi')
self.url = cfg.get('repository', self.url)
def save_configuration(self):
"""
Save the PyPI access configuration. You must have set ``username`` and
``password`` attributes before calling this method.
Again, distutils is used to do the actual work.
"""
self.check_credentials()
# get distutils to do the work
c = self._get_pypirc_command()
c._store_pypirc(self.username, self.password)
def check_credentials(self):
"""
Check that ``username`` and ``password`` have been set, and raise an
exception if not.
"""
if self.username is None or self.password is None:
raise DistlibException('username and password must be set')
pm = HTTPPasswordMgr()
_, netloc, _, _, _, _ = urlparse(self.url)
pm.add_password(self.realm, netloc, self.username, self.password)
self.password_handler = HTTPBasicAuthHandler(pm)
def register(self, metadata):
"""
Register a distribution on PyPI, using the provided metadata.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the distribution to be
registered.
:return: The HTTP response received from PyPI upon submission of the
request.
"""
self.check_credentials()
metadata.validate()
d = metadata.todict()
d[':action'] = 'verify'
request = self.encode_request(d.items(), [])
response = self.send_request(request)
d[':action'] = 'submit'
request = self.encode_request(d.items(), [])
return self.send_request(request)
def _reader(self, name, stream, outbuf):
"""
Thread runner for reading lines of from a subprocess into a buffer.
:param name: The logical name of the stream (used for logging only).
:param stream: The stream to read from. This will typically a pipe
connected to the output stream of a subprocess.
:param outbuf: The list to append the read lines to.
"""
while True:
s = stream.readline()
if not s:
break
s = s.decode('utf-8').rstrip()
outbuf.append(s)
logger.debug('%s: %s' % (name, s))
stream.close()
def get_sign_command(self, filename, signer, sign_password,
keystore=None):
"""
Return a suitable command for signing a file.
:param filename: The pathname to the file to be signed.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: The signing command as a list suitable to be
passed to :class:`subprocess.Popen`.
"""
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
if keystore is None:
keystore = self.gpg_home
if keystore:
cmd.extend(['--homedir', keystore])
if sign_password is not None:
cmd.extend(['--batch', '--passphrase-fd', '0'])
td = tempfile.mkdtemp()
sf = os.path.join(td, os.path.basename(filename) + '.asc')
cmd.extend(['--detach-sign', '--armor', '--local-user',
signer, '--output', sf, filename])
logger.debug('invoking: %s', ' '.join(cmd))
return cmd, sf
def run_command(self, cmd, input_data=None):
"""
Run a command in a child process , passing it any input data specified.
:param cmd: The command to run.
:param input_data: If specified, this must be a byte string containing
data to be sent to the child process.
:return: A tuple consisting of the subprocess' exit code, a list of
lines read from the subprocess' ``stdout``, and a list of
lines read from the subprocess' ``stderr``.
"""
kwargs = {
'stdout': subprocess.PIPE,
'stderr': subprocess.PIPE,
}
if input_data is not None:
kwargs['stdin'] = subprocess.PIPE
stdout = []
stderr = []
p = subprocess.Popen(cmd, **kwargs)
# We don't use communicate() here because we may need to
# get clever with interacting with the command
t1 = Thread(target=self._reader, args=('stdout', p.stdout, stdout))
t1.start()
t2 = Thread(target=self._reader, args=('stderr', p.stderr, stderr))
t2.start()
if input_data is not None:
p.stdin.write(input_data)
p.stdin.close()
p.wait()
t1.join()
t2.join()
return p.returncode, stdout, stderr
def sign_file(self, filename, signer, sign_password, keystore=None):
"""
Sign a file.
:param filename: The pathname to the file to be signed.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param keystore: The path to a directory which contains the keys
used in signing. If not specified, the instance's
``gpg_home`` attribute is used instead.
:return: The absolute pathname of the file where the signature is
stored.
"""
cmd, sig_file = self.get_sign_command(filename, signer, sign_password,
keystore)
rc, stdout, stderr = self.run_command(cmd,
sign_password.encode('utf-8'))
if rc != 0:
raise DistlibException('sign command failed with error '
'code %s' % rc)
return sig_file
def upload_file(self, metadata, filename, signer=None, sign_password=None,
filetype='sdist', pyversion='source', keystore=None):
"""
Upload a release file to the index.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the file to be uploaded.
:param filename: The pathname of the file to be uploaded.
:param signer: The identifier of the signer of the file.
:param sign_password: The passphrase for the signer's
private key used for signing.
:param filetype: The type of the file being uploaded. This is the
distutils command which produced that file, e.g.
``sdist`` or ``bdist_wheel``.
:param pyversion: The version of Python which the release relates
to. For code compatible with any Python, this would
be ``source``, otherwise it would be e.g. ``3.2``.
:param keystore: The path to a directory which contains the keys
used in signing. If not specified, the instance's
``gpg_home`` attribute is used instead.
:return: The HTTP response received from PyPI upon submission of the
request.
"""
self.check_credentials()
if not os.path.exists(filename):
raise DistlibException('not found: %s' % filename)
metadata.validate()
d = metadata.todict()
sig_file = None
if signer:
if not self.gpg:
logger.warning('no signing program available - not signed')
else:
sig_file = self.sign_file(filename, signer, sign_password,
keystore)
with open(filename, 'rb') as f:
file_data = f.read()
md5_digest = hashlib.md5(file_data).hexdigest()
sha256_digest = hashlib.sha256(file_data).hexdigest()
d.update({
':action': 'file_upload',
'protocol_version': '1',
'filetype': filetype,
'pyversion': pyversion,
'md5_digest': md5_digest,
'sha256_digest': sha256_digest,
})
files = [('content', os.path.basename(filename), file_data)]
if sig_file:
with open(sig_file, 'rb') as f:
sig_data = f.read()
files.append(('gpg_signature', os.path.basename(sig_file),
sig_data))
shutil.rmtree(os.path.dirname(sig_file))
request = self.encode_request(d.items(), files)
return self.send_request(request)
def upload_documentation(self, metadata, doc_dir):
"""
Upload documentation to the index.
:param metadata: A :class:`Metadata` instance defining at least a name
and version number for the documentation to be
uploaded.
:param doc_dir: The pathname of the directory which contains the
documentation. This should be the directory that
contains the ``index.html`` for the documentation.
:return: The HTTP response received from PyPI upon submission of the
request.
"""
self.check_credentials()
if not os.path.isdir(doc_dir):
raise DistlibException('not a directory: %r' % doc_dir)
fn = os.path.join(doc_dir, 'index.html')
if not os.path.exists(fn):
raise DistlibException('not found: %r' % fn)
metadata.validate()
name, version = metadata.name, metadata.version
zip_data = zip_dir(doc_dir).getvalue()
fields = [(':action', 'doc_upload'),
('name', name), ('version', version)]
files = [('content', name, zip_data)]
request = self.encode_request(fields, files)
return self.send_request(request)
def get_verify_command(self, signature_filename, data_filename,
keystore=None):
"""
Return a suitable command for verifying a file.
:param signature_filename: The pathname to the file containing the
signature.
:param data_filename: The pathname to the file containing the
signed data.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: The verifying command as a list suitable to be
passed to :class:`subprocess.Popen`.
"""
cmd = [self.gpg, '--status-fd', '2', '--no-tty']
if keystore is None:
keystore = self.gpg_home
if keystore:
cmd.extend(['--homedir', keystore])
cmd.extend(['--verify', signature_filename, data_filename])
logger.debug('invoking: %s', ' '.join(cmd))
return cmd
def verify_signature(self, signature_filename, data_filename,
keystore=None):
"""
Verify a signature for a file.
:param signature_filename: The pathname to the file containing the
signature.
:param data_filename: The pathname to the file containing the
signed data.
:param keystore: The path to a directory which contains the keys
used in verification. If not specified, the
instance's ``gpg_home`` attribute is used instead.
:return: True if the signature was verified, else False.
"""
if not self.gpg:
raise DistlibException('verification unavailable because gpg '
'unavailable')
cmd = self.get_verify_command(signature_filename, data_filename,
keystore)
rc, stdout, stderr = self.run_command(cmd)
if rc not in (0, 1):
raise DistlibException('verify command failed with error '
'code %s' % rc)
return rc == 0
def download_file(self, url, destfile, digest=None, reporthook=None):
"""
This is a convenience method for downloading a file from an URL.
Normally, this will be a file from the index, though currently
no check is made for this (i.e. a file can be downloaded from
anywhere).
The method is just like the :func:`urlretrieve` function in the
standard library, except that it allows digest computation to be
done during download and checking that the downloaded data
matched any expected value.
:param url: The URL of the file to be downloaded (assumed to be
available via an HTTP GET request).
:param destfile: The pathname where the downloaded file is to be
saved.
:param digest: If specified, this must be a (hasher, value)
tuple, where hasher is the algorithm used (e.g.
``'md5'``) and ``value`` is the expected value.
:param reporthook: The same as for :func:`urlretrieve` in the
standard library.
"""
if digest is None:
digester = None
logger.debug('No digest specified')
else:
if isinstance(digest, (list, tuple)):
hasher, digest = digest
else:
hasher = 'md5'
digester = getattr(hashlib, hasher)()
logger.debug('Digest specified: %s' % digest)
# The following code is equivalent to urlretrieve.
# We need to do it this way so that we can compute the
# digest of the file as we go.
with open(destfile, 'wb') as dfp:
# addinfourl is not a context manager on 2.x
# so we have to use try/finally
sfp = self.send_request(Request(url))
try:
headers = sfp.info()
blocksize = 8192
size = -1
read = 0
blocknum = 0
if "content-length" in headers:
size = int(headers["Content-Length"])
if reporthook:
reporthook(blocknum, blocksize, size)
while True:
block = sfp.read(blocksize)
if not block:
break
read += len(block)
dfp.write(block)
if digester:
digester.update(block)
blocknum += 1
if reporthook:
reporthook(blocknum, blocksize, size)
finally:
sfp.close()
# check that we got the whole file, if we can
if size >= 0 and read < size:
raise DistlibException(
'retrieval incomplete: got only %d out of %d bytes'
% (read, size))
# if we have a digest, it must match.
if digester:
actual = digester.hexdigest()
if digest != actual:
raise DistlibException('%s digest mismatch for %s: expected '
'%s, got %s' % (hasher, destfile,
digest, actual))
logger.debug('Digest verified: %s', digest)
def send_request(self, req):
"""
Send a standard library :class:`Request` to PyPI and return its
response.
:param req: The request to send.
:return: The HTTP response from PyPI (a standard library HTTPResponse).
"""
handlers = []
if self.password_handler:
handlers.append(self.password_handler)
if self.ssl_verifier:
handlers.append(self.ssl_verifier)
opener = build_opener(*handlers)
return opener.open(req)
def encode_request(self, fields, files):
"""
Encode fields and files for posting to an HTTP server.
:param fields: The fields to send as a list of (fieldname, value)
tuples.
:param files: The files to send as a list of (fieldname, filename,
file_bytes) tuple.
"""
# Adapted from packaging, which in turn was adapted from
# http://code.activestate.com/recipes/146306
parts = []
boundary = self.boundary
for k, values in fields:
if not isinstance(values, (list, tuple)):
values = [values]
for v in values:
parts.extend((
b'--' + boundary,
('Content-Disposition: form-data; name="%s"' %
k).encode('utf-8'),
b'',
v.encode('utf-8')))
for key, filename, value in files:
parts.extend((
b'--' + boundary,
('Content-Disposition: form-data; name="%s"; filename="%s"' %
(key, filename)).encode('utf-8'),
b'',
value))
parts.extend((b'--' + boundary + b'--', b''))
body = b'\r\n'.join(parts)
ct = b'multipart/form-data; boundary=' + boundary
headers = {
'Content-type': ct,
'Content-length': str(len(body))
}
return Request(self.url, body, headers)
def search(self, terms, operator=None):
if isinstance(terms, string_types):
terms = {'name': terms}
rpc_proxy = ServerProxy(self.url, timeout=3.0)
try:
return rpc_proxy.search(terms, operator or 'and')
finally:
rpc_proxy('close')()
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/distlib/index.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/distlib/index.py",
"repo_id": "Django-locallibrary",
"token_count": 9958
} | 16 |
from __future__ import absolute_import, division, unicode_literals
from pip._vendor.six import text_type
from bisect import bisect_left
from ._base import Trie as ABCTrie
class Trie(ABCTrie):
def __init__(self, data):
if not all(isinstance(x, text_type) for x in data.keys()):
raise TypeError("All keys must be strings")
self._data = data
self._keys = sorted(data.keys())
self._cachestr = ""
self._cachepoints = (0, len(data))
def __contains__(self, key):
return key in self._data
def __len__(self):
return len(self._data)
def __iter__(self):
return iter(self._data)
def __getitem__(self, key):
return self._data[key]
def keys(self, prefix=None):
if prefix is None or prefix == "" or not self._keys:
return set(self._keys)
if prefix.startswith(self._cachestr):
lo, hi = self._cachepoints
start = i = bisect_left(self._keys, prefix, lo, hi)
else:
start = i = bisect_left(self._keys, prefix)
keys = set()
if start == len(self._keys):
return keys
while self._keys[i].startswith(prefix):
keys.add(self._keys[i])
i += 1
self._cachestr = prefix
self._cachepoints = (start, i)
return keys
def has_keys_with_prefix(self, prefix):
if prefix in self._data:
return True
if prefix.startswith(self._cachestr):
lo, hi = self._cachepoints
i = bisect_left(self._keys, prefix, lo, hi)
else:
i = bisect_left(self._keys, prefix)
if i == len(self._keys):
return False
return self._keys[i].startswith(prefix)
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/_trie/py.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/_trie/py.py",
"repo_id": "Django-locallibrary",
"token_count": 806
} | 17 |
from __future__ import absolute_import, division, unicode_literals
from . import base
class Filter(base.Filter):
"""Removes optional tags from the token stream"""
def slider(self):
previous1 = previous2 = None
for token in self.source:
if previous1 is not None:
yield previous2, previous1, token
previous2 = previous1
previous1 = token
if previous1 is not None:
yield previous2, previous1, None
def __iter__(self):
for previous, token, next in self.slider():
type = token["type"]
if type == "StartTag":
if (token["data"] or
not self.is_optional_start(token["name"], previous, next)):
yield token
elif type == "EndTag":
if not self.is_optional_end(token["name"], next):
yield token
else:
yield token
def is_optional_start(self, tagname, previous, next):
type = next and next["type"] or None
if tagname in 'html':
# An html element's start tag may be omitted if the first thing
# inside the html element is not a space character or a comment.
return type not in ("Comment", "SpaceCharacters")
elif tagname == 'head':
# A head element's start tag may be omitted if the first thing
# inside the head element is an element.
# XXX: we also omit the start tag if the head element is empty
if type in ("StartTag", "EmptyTag"):
return True
elif type == "EndTag":
return next["name"] == "head"
elif tagname == 'body':
# A body element's start tag may be omitted if the first thing
# inside the body element is not a space character or a comment,
# except if the first thing inside the body element is a script
# or style element and the node immediately preceding the body
# element is a head element whose end tag has been omitted.
if type in ("Comment", "SpaceCharacters"):
return False
elif type == "StartTag":
# XXX: we do not look at the preceding event, so we never omit
# the body element's start tag if it's followed by a script or
# a style element.
return next["name"] not in ('script', 'style')
else:
return True
elif tagname == 'colgroup':
# A colgroup element's start tag may be omitted if the first thing
# inside the colgroup element is a col element, and if the element
# is not immediately preceded by another colgroup element whose
# end tag has been omitted.
if type in ("StartTag", "EmptyTag"):
# XXX: we do not look at the preceding event, so instead we never
# omit the colgroup element's end tag when it is immediately
# followed by another colgroup element. See is_optional_end.
return next["name"] == "col"
else:
return False
elif tagname == 'tbody':
# A tbody element's start tag may be omitted if the first thing
# inside the tbody element is a tr element, and if the element is
# not immediately preceded by a tbody, thead, or tfoot element
# whose end tag has been omitted.
if type == "StartTag":
# omit the thead and tfoot elements' end tag when they are
# immediately followed by a tbody element. See is_optional_end.
if previous and previous['type'] == 'EndTag' and \
previous['name'] in ('tbody', 'thead', 'tfoot'):
return False
return next["name"] == 'tr'
else:
return False
return False
def is_optional_end(self, tagname, next):
type = next and next["type"] or None
if tagname in ('html', 'head', 'body'):
# An html element's end tag may be omitted if the html element
# is not immediately followed by a space character or a comment.
return type not in ("Comment", "SpaceCharacters")
elif tagname in ('li', 'optgroup', 'tr'):
# A li element's end tag may be omitted if the li element is
# immediately followed by another li element or if there is
# no more content in the parent element.
# An optgroup element's end tag may be omitted if the optgroup
# element is immediately followed by another optgroup element,
# or if there is no more content in the parent element.
# A tr element's end tag may be omitted if the tr element is
# immediately followed by another tr element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] == tagname
else:
return type == "EndTag" or type is None
elif tagname in ('dt', 'dd'):
# A dt element's end tag may be omitted if the dt element is
# immediately followed by another dt element or a dd element.
# A dd element's end tag may be omitted if the dd element is
# immediately followed by another dd element or a dt element,
# or if there is no more content in the parent element.
if type == "StartTag":
return next["name"] in ('dt', 'dd')
elif tagname == 'dd':
return type == "EndTag" or type is None
else:
return False
elif tagname == 'p':
# A p element's end tag may be omitted if the p element is
# immediately followed by an address, article, aside,
# blockquote, datagrid, dialog, dir, div, dl, fieldset,
# footer, form, h1, h2, h3, h4, h5, h6, header, hr, menu,
# nav, ol, p, pre, section, table, or ul, element, or if
# there is no more content in the parent element.
if type in ("StartTag", "EmptyTag"):
return next["name"] in ('address', 'article', 'aside',
'blockquote', 'datagrid', 'dialog',
'dir', 'div', 'dl', 'fieldset', 'footer',
'form', 'h1', 'h2', 'h3', 'h4', 'h5', 'h6',
'header', 'hr', 'menu', 'nav', 'ol',
'p', 'pre', 'section', 'table', 'ul')
else:
return type == "EndTag" or type is None
elif tagname == 'option':
# An option element's end tag may be omitted if the option
# element is immediately followed by another option element,
# or if it is immediately followed by an <code>optgroup</code>
# element, or if there is no more content in the parent
# element.
if type == "StartTag":
return next["name"] in ('option', 'optgroup')
else:
return type == "EndTag" or type is None
elif tagname in ('rt', 'rp'):
# An rt element's end tag may be omitted if the rt element is
# immediately followed by an rt or rp element, or if there is
# no more content in the parent element.
# An rp element's end tag may be omitted if the rp element is
# immediately followed by an rt or rp element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] in ('rt', 'rp')
else:
return type == "EndTag" or type is None
elif tagname == 'colgroup':
# A colgroup element's end tag may be omitted if the colgroup
# element is not immediately followed by a space character or
# a comment.
if type in ("Comment", "SpaceCharacters"):
return False
elif type == "StartTag":
# XXX: we also look for an immediately following colgroup
# element. See is_optional_start.
return next["name"] != 'colgroup'
else:
return True
elif tagname in ('thead', 'tbody'):
# A thead element's end tag may be omitted if the thead element
# is immediately followed by a tbody or tfoot element.
# A tbody element's end tag may be omitted if the tbody element
# is immediately followed by a tbody or tfoot element, or if
# there is no more content in the parent element.
# A tfoot element's end tag may be omitted if the tfoot element
# is immediately followed by a tbody element, or if there is no
# more content in the parent element.
# XXX: we never omit the end tag when the following element is
# a tbody. See is_optional_start.
if type == "StartTag":
return next["name"] in ['tbody', 'tfoot']
elif tagname == 'tbody':
return type == "EndTag" or type is None
else:
return False
elif tagname == 'tfoot':
# A tfoot element's end tag may be omitted if the tfoot element
# is immediately followed by a tbody element, or if there is no
# more content in the parent element.
# XXX: we never omit the end tag when the following element is
# a tbody. See is_optional_start.
if type == "StartTag":
return next["name"] == 'tbody'
else:
return type == "EndTag" or type is None
elif tagname in ('td', 'th'):
# A td element's end tag may be omitted if the td element is
# immediately followed by a td or th element, or if there is
# no more content in the parent element.
# A th element's end tag may be omitted if the th element is
# immediately followed by a td or th element, or if there is
# no more content in the parent element.
if type == "StartTag":
return next["name"] in ('td', 'th')
else:
return type == "EndTag" or type is None
return False
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/filters/optionaltags.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/filters/optionaltags.py",
"repo_id": "Django-locallibrary",
"token_count": 4716
} | 18 |
from __future__ import absolute_import, division, unicode_literals
from genshi.core import QName
from genshi.core import START, END, XML_NAMESPACE, DOCTYPE, TEXT
from genshi.core import START_NS, END_NS, START_CDATA, END_CDATA, PI, COMMENT
from . import base
from ..constants import voidElements, namespaces
class TreeWalker(base.TreeWalker):
def __iter__(self):
# Buffer the events so we can pass in the following one
previous = None
for event in self.tree:
if previous is not None:
for token in self.tokens(previous, event):
yield token
previous = event
# Don't forget the final event!
if previous is not None:
for token in self.tokens(previous, None):
yield token
def tokens(self, event, next):
kind, data, _ = event
if kind == START:
tag, attribs = data
name = tag.localname
namespace = tag.namespace
converted_attribs = {}
for k, v in attribs:
if isinstance(k, QName):
converted_attribs[(k.namespace, k.localname)] = v
else:
converted_attribs[(None, k)] = v
if namespace == namespaces["html"] and name in voidElements:
for token in self.emptyTag(namespace, name, converted_attribs,
not next or next[0] != END or
next[1] != tag):
yield token
else:
yield self.startTag(namespace, name, converted_attribs)
elif kind == END:
name = data.localname
namespace = data.namespace
if namespace != namespaces["html"] or name not in voidElements:
yield self.endTag(namespace, name)
elif kind == COMMENT:
yield self.comment(data)
elif kind == TEXT:
for token in self.text(data):
yield token
elif kind == DOCTYPE:
yield self.doctype(*data)
elif kind in (XML_NAMESPACE, DOCTYPE, START_NS, END_NS,
START_CDATA, END_CDATA, PI):
pass
else:
yield self.unknown(kind)
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/html5lib/treewalkers/genshi.py",
"repo_id": "Django-locallibrary",
"token_count": 1154
} | 19 |
# This file is automatically generated by tools/idna-data
# vim: set fileencoding=utf-8 :
"""IDNA Mapping Table from UTS46."""
__version__ = "13.0.0"
def _seg_0():
return [
(0x0, '3'),
(0x1, '3'),
(0x2, '3'),
(0x3, '3'),
(0x4, '3'),
(0x5, '3'),
(0x6, '3'),
(0x7, '3'),
(0x8, '3'),
(0x9, '3'),
(0xA, '3'),
(0xB, '3'),
(0xC, '3'),
(0xD, '3'),
(0xE, '3'),
(0xF, '3'),
(0x10, '3'),
(0x11, '3'),
(0x12, '3'),
(0x13, '3'),
(0x14, '3'),
(0x15, '3'),
(0x16, '3'),
(0x17, '3'),
(0x18, '3'),
(0x19, '3'),
(0x1A, '3'),
(0x1B, '3'),
(0x1C, '3'),
(0x1D, '3'),
(0x1E, '3'),
(0x1F, '3'),
(0x20, '3'),
(0x21, '3'),
(0x22, '3'),
(0x23, '3'),
(0x24, '3'),
(0x25, '3'),
(0x26, '3'),
(0x27, '3'),
(0x28, '3'),
(0x29, '3'),
(0x2A, '3'),
(0x2B, '3'),
(0x2C, '3'),
(0x2D, 'V'),
(0x2E, 'V'),
(0x2F, '3'),
(0x30, 'V'),
(0x31, 'V'),
(0x32, 'V'),
(0x33, 'V'),
(0x34, 'V'),
(0x35, 'V'),
(0x36, 'V'),
(0x37, 'V'),
(0x38, 'V'),
(0x39, 'V'),
(0x3A, '3'),
(0x3B, '3'),
(0x3C, '3'),
(0x3D, '3'),
(0x3E, '3'),
(0x3F, '3'),
(0x40, '3'),
(0x41, 'M', u'a'),
(0x42, 'M', u'b'),
(0x43, 'M', u'c'),
(0x44, 'M', u'd'),
(0x45, 'M', u'e'),
(0x46, 'M', u'f'),
(0x47, 'M', u'g'),
(0x48, 'M', u'h'),
(0x49, 'M', u'i'),
(0x4A, 'M', u'j'),
(0x4B, 'M', u'k'),
(0x4C, 'M', u'l'),
(0x4D, 'M', u'm'),
(0x4E, 'M', u'n'),
(0x4F, 'M', u'o'),
(0x50, 'M', u'p'),
(0x51, 'M', u'q'),
(0x52, 'M', u'r'),
(0x53, 'M', u's'),
(0x54, 'M', u't'),
(0x55, 'M', u'u'),
(0x56, 'M', u'v'),
(0x57, 'M', u'w'),
(0x58, 'M', u'x'),
(0x59, 'M', u'y'),
(0x5A, 'M', u'z'),
(0x5B, '3'),
(0x5C, '3'),
(0x5D, '3'),
(0x5E, '3'),
(0x5F, '3'),
(0x60, '3'),
(0x61, 'V'),
(0x62, 'V'),
(0x63, 'V'),
]
def _seg_1():
return [
(0x64, 'V'),
(0x65, 'V'),
(0x66, 'V'),
(0x67, 'V'),
(0x68, 'V'),
(0x69, 'V'),
(0x6A, 'V'),
(0x6B, 'V'),
(0x6C, 'V'),
(0x6D, 'V'),
(0x6E, 'V'),
(0x6F, 'V'),
(0x70, 'V'),
(0x71, 'V'),
(0x72, 'V'),
(0x73, 'V'),
(0x74, 'V'),
(0x75, 'V'),
(0x76, 'V'),
(0x77, 'V'),
(0x78, 'V'),
(0x79, 'V'),
(0x7A, 'V'),
(0x7B, '3'),
(0x7C, '3'),
(0x7D, '3'),
(0x7E, '3'),
(0x7F, '3'),
(0x80, 'X'),
(0x81, 'X'),
(0x82, 'X'),
(0x83, 'X'),
(0x84, 'X'),
(0x85, 'X'),
(0x86, 'X'),
(0x87, 'X'),
(0x88, 'X'),
(0x89, 'X'),
(0x8A, 'X'),
(0x8B, 'X'),
(0x8C, 'X'),
(0x8D, 'X'),
(0x8E, 'X'),
(0x8F, 'X'),
(0x90, 'X'),
(0x91, 'X'),
(0x92, 'X'),
(0x93, 'X'),
(0x94, 'X'),
(0x95, 'X'),
(0x96, 'X'),
(0x97, 'X'),
(0x98, 'X'),
(0x99, 'X'),
(0x9A, 'X'),
(0x9B, 'X'),
(0x9C, 'X'),
(0x9D, 'X'),
(0x9E, 'X'),
(0x9F, 'X'),
(0xA0, '3', u' '),
(0xA1, 'V'),
(0xA2, 'V'),
(0xA3, 'V'),
(0xA4, 'V'),
(0xA5, 'V'),
(0xA6, 'V'),
(0xA7, 'V'),
(0xA8, '3', u' ̈'),
(0xA9, 'V'),
(0xAA, 'M', u'a'),
(0xAB, 'V'),
(0xAC, 'V'),
(0xAD, 'I'),
(0xAE, 'V'),
(0xAF, '3', u' ̄'),
(0xB0, 'V'),
(0xB1, 'V'),
(0xB2, 'M', u'2'),
(0xB3, 'M', u'3'),
(0xB4, '3', u' ́'),
(0xB5, 'M', u'μ'),
(0xB6, 'V'),
(0xB7, 'V'),
(0xB8, '3', u' ̧'),
(0xB9, 'M', u'1'),
(0xBA, 'M', u'o'),
(0xBB, 'V'),
(0xBC, 'M', u'1⁄4'),
(0xBD, 'M', u'1⁄2'),
(0xBE, 'M', u'3⁄4'),
(0xBF, 'V'),
(0xC0, 'M', u'à'),
(0xC1, 'M', u'á'),
(0xC2, 'M', u'â'),
(0xC3, 'M', u'ã'),
(0xC4, 'M', u'ä'),
(0xC5, 'M', u'å'),
(0xC6, 'M', u'æ'),
(0xC7, 'M', u'ç'),
]
def _seg_2():
return [
(0xC8, 'M', u'è'),
(0xC9, 'M', u'é'),
(0xCA, 'M', u'ê'),
(0xCB, 'M', u'ë'),
(0xCC, 'M', u'ì'),
(0xCD, 'M', u'í'),
(0xCE, 'M', u'î'),
(0xCF, 'M', u'ï'),
(0xD0, 'M', u'ð'),
(0xD1, 'M', u'ñ'),
(0xD2, 'M', u'ò'),
(0xD3, 'M', u'ó'),
(0xD4, 'M', u'ô'),
(0xD5, 'M', u'õ'),
(0xD6, 'M', u'ö'),
(0xD7, 'V'),
(0xD8, 'M', u'ø'),
(0xD9, 'M', u'ù'),
(0xDA, 'M', u'ú'),
(0xDB, 'M', u'û'),
(0xDC, 'M', u'ü'),
(0xDD, 'M', u'ý'),
(0xDE, 'M', u'þ'),
(0xDF, 'D', u'ss'),
(0xE0, 'V'),
(0xE1, 'V'),
(0xE2, 'V'),
(0xE3, 'V'),
(0xE4, 'V'),
(0xE5, 'V'),
(0xE6, 'V'),
(0xE7, 'V'),
(0xE8, 'V'),
(0xE9, 'V'),
(0xEA, 'V'),
(0xEB, 'V'),
(0xEC, 'V'),
(0xED, 'V'),
(0xEE, 'V'),
(0xEF, 'V'),
(0xF0, 'V'),
(0xF1, 'V'),
(0xF2, 'V'),
(0xF3, 'V'),
(0xF4, 'V'),
(0xF5, 'V'),
(0xF6, 'V'),
(0xF7, 'V'),
(0xF8, 'V'),
(0xF9, 'V'),
(0xFA, 'V'),
(0xFB, 'V'),
(0xFC, 'V'),
(0xFD, 'V'),
(0xFE, 'V'),
(0xFF, 'V'),
(0x100, 'M', u'ā'),
(0x101, 'V'),
(0x102, 'M', u'ă'),
(0x103, 'V'),
(0x104, 'M', u'ą'),
(0x105, 'V'),
(0x106, 'M', u'ć'),
(0x107, 'V'),
(0x108, 'M', u'ĉ'),
(0x109, 'V'),
(0x10A, 'M', u'ċ'),
(0x10B, 'V'),
(0x10C, 'M', u'č'),
(0x10D, 'V'),
(0x10E, 'M', u'ď'),
(0x10F, 'V'),
(0x110, 'M', u'đ'),
(0x111, 'V'),
(0x112, 'M', u'ē'),
(0x113, 'V'),
(0x114, 'M', u'ĕ'),
(0x115, 'V'),
(0x116, 'M', u'ė'),
(0x117, 'V'),
(0x118, 'M', u'ę'),
(0x119, 'V'),
(0x11A, 'M', u'ě'),
(0x11B, 'V'),
(0x11C, 'M', u'ĝ'),
(0x11D, 'V'),
(0x11E, 'M', u'ğ'),
(0x11F, 'V'),
(0x120, 'M', u'ġ'),
(0x121, 'V'),
(0x122, 'M', u'ģ'),
(0x123, 'V'),
(0x124, 'M', u'ĥ'),
(0x125, 'V'),
(0x126, 'M', u'ħ'),
(0x127, 'V'),
(0x128, 'M', u'ĩ'),
(0x129, 'V'),
(0x12A, 'M', u'ī'),
(0x12B, 'V'),
]
def _seg_3():
return [
(0x12C, 'M', u'ĭ'),
(0x12D, 'V'),
(0x12E, 'M', u'į'),
(0x12F, 'V'),
(0x130, 'M', u'i̇'),
(0x131, 'V'),
(0x132, 'M', u'ij'),
(0x134, 'M', u'ĵ'),
(0x135, 'V'),
(0x136, 'M', u'ķ'),
(0x137, 'V'),
(0x139, 'M', u'ĺ'),
(0x13A, 'V'),
(0x13B, 'M', u'ļ'),
(0x13C, 'V'),
(0x13D, 'M', u'ľ'),
(0x13E, 'V'),
(0x13F, 'M', u'l·'),
(0x141, 'M', u'ł'),
(0x142, 'V'),
(0x143, 'M', u'ń'),
(0x144, 'V'),
(0x145, 'M', u'ņ'),
(0x146, 'V'),
(0x147, 'M', u'ň'),
(0x148, 'V'),
(0x149, 'M', u'ʼn'),
(0x14A, 'M', u'ŋ'),
(0x14B, 'V'),
(0x14C, 'M', u'ō'),
(0x14D, 'V'),
(0x14E, 'M', u'ŏ'),
(0x14F, 'V'),
(0x150, 'M', u'ő'),
(0x151, 'V'),
(0x152, 'M', u'œ'),
(0x153, 'V'),
(0x154, 'M', u'ŕ'),
(0x155, 'V'),
(0x156, 'M', u'ŗ'),
(0x157, 'V'),
(0x158, 'M', u'ř'),
(0x159, 'V'),
(0x15A, 'M', u'ś'),
(0x15B, 'V'),
(0x15C, 'M', u'ŝ'),
(0x15D, 'V'),
(0x15E, 'M', u'ş'),
(0x15F, 'V'),
(0x160, 'M', u'š'),
(0x161, 'V'),
(0x162, 'M', u'ţ'),
(0x163, 'V'),
(0x164, 'M', u'ť'),
(0x165, 'V'),
(0x166, 'M', u'ŧ'),
(0x167, 'V'),
(0x168, 'M', u'ũ'),
(0x169, 'V'),
(0x16A, 'M', u'ū'),
(0x16B, 'V'),
(0x16C, 'M', u'ŭ'),
(0x16D, 'V'),
(0x16E, 'M', u'ů'),
(0x16F, 'V'),
(0x170, 'M', u'ű'),
(0x171, 'V'),
(0x172, 'M', u'ų'),
(0x173, 'V'),
(0x174, 'M', u'ŵ'),
(0x175, 'V'),
(0x176, 'M', u'ŷ'),
(0x177, 'V'),
(0x178, 'M', u'ÿ'),
(0x179, 'M', u'ź'),
(0x17A, 'V'),
(0x17B, 'M', u'ż'),
(0x17C, 'V'),
(0x17D, 'M', u'ž'),
(0x17E, 'V'),
(0x17F, 'M', u's'),
(0x180, 'V'),
(0x181, 'M', u'ɓ'),
(0x182, 'M', u'ƃ'),
(0x183, 'V'),
(0x184, 'M', u'ƅ'),
(0x185, 'V'),
(0x186, 'M', u'ɔ'),
(0x187, 'M', u'ƈ'),
(0x188, 'V'),
(0x189, 'M', u'ɖ'),
(0x18A, 'M', u'ɗ'),
(0x18B, 'M', u'ƌ'),
(0x18C, 'V'),
(0x18E, 'M', u'ǝ'),
(0x18F, 'M', u'ə'),
(0x190, 'M', u'ɛ'),
(0x191, 'M', u'ƒ'),
(0x192, 'V'),
(0x193, 'M', u'ɠ'),
]
def _seg_4():
return [
(0x194, 'M', u'ɣ'),
(0x195, 'V'),
(0x196, 'M', u'ɩ'),
(0x197, 'M', u'ɨ'),
(0x198, 'M', u'ƙ'),
(0x199, 'V'),
(0x19C, 'M', u'ɯ'),
(0x19D, 'M', u'ɲ'),
(0x19E, 'V'),
(0x19F, 'M', u'ɵ'),
(0x1A0, 'M', u'ơ'),
(0x1A1, 'V'),
(0x1A2, 'M', u'ƣ'),
(0x1A3, 'V'),
(0x1A4, 'M', u'ƥ'),
(0x1A5, 'V'),
(0x1A6, 'M', u'ʀ'),
(0x1A7, 'M', u'ƨ'),
(0x1A8, 'V'),
(0x1A9, 'M', u'ʃ'),
(0x1AA, 'V'),
(0x1AC, 'M', u'ƭ'),
(0x1AD, 'V'),
(0x1AE, 'M', u'ʈ'),
(0x1AF, 'M', u'ư'),
(0x1B0, 'V'),
(0x1B1, 'M', u'ʊ'),
(0x1B2, 'M', u'ʋ'),
(0x1B3, 'M', u'ƴ'),
(0x1B4, 'V'),
(0x1B5, 'M', u'ƶ'),
(0x1B6, 'V'),
(0x1B7, 'M', u'ʒ'),
(0x1B8, 'M', u'ƹ'),
(0x1B9, 'V'),
(0x1BC, 'M', u'ƽ'),
(0x1BD, 'V'),
(0x1C4, 'M', u'dž'),
(0x1C7, 'M', u'lj'),
(0x1CA, 'M', u'nj'),
(0x1CD, 'M', u'ǎ'),
(0x1CE, 'V'),
(0x1CF, 'M', u'ǐ'),
(0x1D0, 'V'),
(0x1D1, 'M', u'ǒ'),
(0x1D2, 'V'),
(0x1D3, 'M', u'ǔ'),
(0x1D4, 'V'),
(0x1D5, 'M', u'ǖ'),
(0x1D6, 'V'),
(0x1D7, 'M', u'ǘ'),
(0x1D8, 'V'),
(0x1D9, 'M', u'ǚ'),
(0x1DA, 'V'),
(0x1DB, 'M', u'ǜ'),
(0x1DC, 'V'),
(0x1DE, 'M', u'ǟ'),
(0x1DF, 'V'),
(0x1E0, 'M', u'ǡ'),
(0x1E1, 'V'),
(0x1E2, 'M', u'ǣ'),
(0x1E3, 'V'),
(0x1E4, 'M', u'ǥ'),
(0x1E5, 'V'),
(0x1E6, 'M', u'ǧ'),
(0x1E7, 'V'),
(0x1E8, 'M', u'ǩ'),
(0x1E9, 'V'),
(0x1EA, 'M', u'ǫ'),
(0x1EB, 'V'),
(0x1EC, 'M', u'ǭ'),
(0x1ED, 'V'),
(0x1EE, 'M', u'ǯ'),
(0x1EF, 'V'),
(0x1F1, 'M', u'dz'),
(0x1F4, 'M', u'ǵ'),
(0x1F5, 'V'),
(0x1F6, 'M', u'ƕ'),
(0x1F7, 'M', u'ƿ'),
(0x1F8, 'M', u'ǹ'),
(0x1F9, 'V'),
(0x1FA, 'M', u'ǻ'),
(0x1FB, 'V'),
(0x1FC, 'M', u'ǽ'),
(0x1FD, 'V'),
(0x1FE, 'M', u'ǿ'),
(0x1FF, 'V'),
(0x200, 'M', u'ȁ'),
(0x201, 'V'),
(0x202, 'M', u'ȃ'),
(0x203, 'V'),
(0x204, 'M', u'ȅ'),
(0x205, 'V'),
(0x206, 'M', u'ȇ'),
(0x207, 'V'),
(0x208, 'M', u'ȉ'),
(0x209, 'V'),
(0x20A, 'M', u'ȋ'),
(0x20B, 'V'),
(0x20C, 'M', u'ȍ'),
]
def _seg_5():
return [
(0x20D, 'V'),
(0x20E, 'M', u'ȏ'),
(0x20F, 'V'),
(0x210, 'M', u'ȑ'),
(0x211, 'V'),
(0x212, 'M', u'ȓ'),
(0x213, 'V'),
(0x214, 'M', u'ȕ'),
(0x215, 'V'),
(0x216, 'M', u'ȗ'),
(0x217, 'V'),
(0x218, 'M', u'ș'),
(0x219, 'V'),
(0x21A, 'M', u'ț'),
(0x21B, 'V'),
(0x21C, 'M', u'ȝ'),
(0x21D, 'V'),
(0x21E, 'M', u'ȟ'),
(0x21F, 'V'),
(0x220, 'M', u'ƞ'),
(0x221, 'V'),
(0x222, 'M', u'ȣ'),
(0x223, 'V'),
(0x224, 'M', u'ȥ'),
(0x225, 'V'),
(0x226, 'M', u'ȧ'),
(0x227, 'V'),
(0x228, 'M', u'ȩ'),
(0x229, 'V'),
(0x22A, 'M', u'ȫ'),
(0x22B, 'V'),
(0x22C, 'M', u'ȭ'),
(0x22D, 'V'),
(0x22E, 'M', u'ȯ'),
(0x22F, 'V'),
(0x230, 'M', u'ȱ'),
(0x231, 'V'),
(0x232, 'M', u'ȳ'),
(0x233, 'V'),
(0x23A, 'M', u'ⱥ'),
(0x23B, 'M', u'ȼ'),
(0x23C, 'V'),
(0x23D, 'M', u'ƚ'),
(0x23E, 'M', u'ⱦ'),
(0x23F, 'V'),
(0x241, 'M', u'ɂ'),
(0x242, 'V'),
(0x243, 'M', u'ƀ'),
(0x244, 'M', u'ʉ'),
(0x245, 'M', u'ʌ'),
(0x246, 'M', u'ɇ'),
(0x247, 'V'),
(0x248, 'M', u'ɉ'),
(0x249, 'V'),
(0x24A, 'M', u'ɋ'),
(0x24B, 'V'),
(0x24C, 'M', u'ɍ'),
(0x24D, 'V'),
(0x24E, 'M', u'ɏ'),
(0x24F, 'V'),
(0x2B0, 'M', u'h'),
(0x2B1, 'M', u'ɦ'),
(0x2B2, 'M', u'j'),
(0x2B3, 'M', u'r'),
(0x2B4, 'M', u'ɹ'),
(0x2B5, 'M', u'ɻ'),
(0x2B6, 'M', u'ʁ'),
(0x2B7, 'M', u'w'),
(0x2B8, 'M', u'y'),
(0x2B9, 'V'),
(0x2D8, '3', u' ̆'),
(0x2D9, '3', u' ̇'),
(0x2DA, '3', u' ̊'),
(0x2DB, '3', u' ̨'),
(0x2DC, '3', u' ̃'),
(0x2DD, '3', u' ̋'),
(0x2DE, 'V'),
(0x2E0, 'M', u'ɣ'),
(0x2E1, 'M', u'l'),
(0x2E2, 'M', u's'),
(0x2E3, 'M', u'x'),
(0x2E4, 'M', u'ʕ'),
(0x2E5, 'V'),
(0x340, 'M', u'̀'),
(0x341, 'M', u'́'),
(0x342, 'V'),
(0x343, 'M', u'̓'),
(0x344, 'M', u'̈́'),
(0x345, 'M', u'ι'),
(0x346, 'V'),
(0x34F, 'I'),
(0x350, 'V'),
(0x370, 'M', u'ͱ'),
(0x371, 'V'),
(0x372, 'M', u'ͳ'),
(0x373, 'V'),
(0x374, 'M', u'ʹ'),
(0x375, 'V'),
(0x376, 'M', u'ͷ'),
(0x377, 'V'),
]
def _seg_6():
return [
(0x378, 'X'),
(0x37A, '3', u' ι'),
(0x37B, 'V'),
(0x37E, '3', u';'),
(0x37F, 'M', u'ϳ'),
(0x380, 'X'),
(0x384, '3', u' ́'),
(0x385, '3', u' ̈́'),
(0x386, 'M', u'ά'),
(0x387, 'M', u'·'),
(0x388, 'M', u'έ'),
(0x389, 'M', u'ή'),
(0x38A, 'M', u'ί'),
(0x38B, 'X'),
(0x38C, 'M', u'ό'),
(0x38D, 'X'),
(0x38E, 'M', u'ύ'),
(0x38F, 'M', u'ώ'),
(0x390, 'V'),
(0x391, 'M', u'α'),
(0x392, 'M', u'β'),
(0x393, 'M', u'γ'),
(0x394, 'M', u'δ'),
(0x395, 'M', u'ε'),
(0x396, 'M', u'ζ'),
(0x397, 'M', u'η'),
(0x398, 'M', u'θ'),
(0x399, 'M', u'ι'),
(0x39A, 'M', u'κ'),
(0x39B, 'M', u'λ'),
(0x39C, 'M', u'μ'),
(0x39D, 'M', u'ν'),
(0x39E, 'M', u'ξ'),
(0x39F, 'M', u'ο'),
(0x3A0, 'M', u'π'),
(0x3A1, 'M', u'ρ'),
(0x3A2, 'X'),
(0x3A3, 'M', u'σ'),
(0x3A4, 'M', u'τ'),
(0x3A5, 'M', u'υ'),
(0x3A6, 'M', u'φ'),
(0x3A7, 'M', u'χ'),
(0x3A8, 'M', u'ψ'),
(0x3A9, 'M', u'ω'),
(0x3AA, 'M', u'ϊ'),
(0x3AB, 'M', u'ϋ'),
(0x3AC, 'V'),
(0x3C2, 'D', u'σ'),
(0x3C3, 'V'),
(0x3CF, 'M', u'ϗ'),
(0x3D0, 'M', u'β'),
(0x3D1, 'M', u'θ'),
(0x3D2, 'M', u'υ'),
(0x3D3, 'M', u'ύ'),
(0x3D4, 'M', u'ϋ'),
(0x3D5, 'M', u'φ'),
(0x3D6, 'M', u'π'),
(0x3D7, 'V'),
(0x3D8, 'M', u'ϙ'),
(0x3D9, 'V'),
(0x3DA, 'M', u'ϛ'),
(0x3DB, 'V'),
(0x3DC, 'M', u'ϝ'),
(0x3DD, 'V'),
(0x3DE, 'M', u'ϟ'),
(0x3DF, 'V'),
(0x3E0, 'M', u'ϡ'),
(0x3E1, 'V'),
(0x3E2, 'M', u'ϣ'),
(0x3E3, 'V'),
(0x3E4, 'M', u'ϥ'),
(0x3E5, 'V'),
(0x3E6, 'M', u'ϧ'),
(0x3E7, 'V'),
(0x3E8, 'M', u'ϩ'),
(0x3E9, 'V'),
(0x3EA, 'M', u'ϫ'),
(0x3EB, 'V'),
(0x3EC, 'M', u'ϭ'),
(0x3ED, 'V'),
(0x3EE, 'M', u'ϯ'),
(0x3EF, 'V'),
(0x3F0, 'M', u'κ'),
(0x3F1, 'M', u'ρ'),
(0x3F2, 'M', u'σ'),
(0x3F3, 'V'),
(0x3F4, 'M', u'θ'),
(0x3F5, 'M', u'ε'),
(0x3F6, 'V'),
(0x3F7, 'M', u'ϸ'),
(0x3F8, 'V'),
(0x3F9, 'M', u'σ'),
(0x3FA, 'M', u'ϻ'),
(0x3FB, 'V'),
(0x3FD, 'M', u'ͻ'),
(0x3FE, 'M', u'ͼ'),
(0x3FF, 'M', u'ͽ'),
(0x400, 'M', u'ѐ'),
(0x401, 'M', u'ё'),
(0x402, 'M', u'ђ'),
]
def _seg_7():
return [
(0x403, 'M', u'ѓ'),
(0x404, 'M', u'є'),
(0x405, 'M', u'ѕ'),
(0x406, 'M', u'і'),
(0x407, 'M', u'ї'),
(0x408, 'M', u'ј'),
(0x409, 'M', u'љ'),
(0x40A, 'M', u'њ'),
(0x40B, 'M', u'ћ'),
(0x40C, 'M', u'ќ'),
(0x40D, 'M', u'ѝ'),
(0x40E, 'M', u'ў'),
(0x40F, 'M', u'џ'),
(0x410, 'M', u'а'),
(0x411, 'M', u'б'),
(0x412, 'M', u'в'),
(0x413, 'M', u'г'),
(0x414, 'M', u'д'),
(0x415, 'M', u'е'),
(0x416, 'M', u'ж'),
(0x417, 'M', u'з'),
(0x418, 'M', u'и'),
(0x419, 'M', u'й'),
(0x41A, 'M', u'к'),
(0x41B, 'M', u'л'),
(0x41C, 'M', u'м'),
(0x41D, 'M', u'н'),
(0x41E, 'M', u'о'),
(0x41F, 'M', u'п'),
(0x420, 'M', u'р'),
(0x421, 'M', u'с'),
(0x422, 'M', u'т'),
(0x423, 'M', u'у'),
(0x424, 'M', u'ф'),
(0x425, 'M', u'х'),
(0x426, 'M', u'ц'),
(0x427, 'M', u'ч'),
(0x428, 'M', u'ш'),
(0x429, 'M', u'щ'),
(0x42A, 'M', u'ъ'),
(0x42B, 'M', u'ы'),
(0x42C, 'M', u'ь'),
(0x42D, 'M', u'э'),
(0x42E, 'M', u'ю'),
(0x42F, 'M', u'я'),
(0x430, 'V'),
(0x460, 'M', u'ѡ'),
(0x461, 'V'),
(0x462, 'M', u'ѣ'),
(0x463, 'V'),
(0x464, 'M', u'ѥ'),
(0x465, 'V'),
(0x466, 'M', u'ѧ'),
(0x467, 'V'),
(0x468, 'M', u'ѩ'),
(0x469, 'V'),
(0x46A, 'M', u'ѫ'),
(0x46B, 'V'),
(0x46C, 'M', u'ѭ'),
(0x46D, 'V'),
(0x46E, 'M', u'ѯ'),
(0x46F, 'V'),
(0x470, 'M', u'ѱ'),
(0x471, 'V'),
(0x472, 'M', u'ѳ'),
(0x473, 'V'),
(0x474, 'M', u'ѵ'),
(0x475, 'V'),
(0x476, 'M', u'ѷ'),
(0x477, 'V'),
(0x478, 'M', u'ѹ'),
(0x479, 'V'),
(0x47A, 'M', u'ѻ'),
(0x47B, 'V'),
(0x47C, 'M', u'ѽ'),
(0x47D, 'V'),
(0x47E, 'M', u'ѿ'),
(0x47F, 'V'),
(0x480, 'M', u'ҁ'),
(0x481, 'V'),
(0x48A, 'M', u'ҋ'),
(0x48B, 'V'),
(0x48C, 'M', u'ҍ'),
(0x48D, 'V'),
(0x48E, 'M', u'ҏ'),
(0x48F, 'V'),
(0x490, 'M', u'ґ'),
(0x491, 'V'),
(0x492, 'M', u'ғ'),
(0x493, 'V'),
(0x494, 'M', u'ҕ'),
(0x495, 'V'),
(0x496, 'M', u'җ'),
(0x497, 'V'),
(0x498, 'M', u'ҙ'),
(0x499, 'V'),
(0x49A, 'M', u'қ'),
(0x49B, 'V'),
(0x49C, 'M', u'ҝ'),
(0x49D, 'V'),
]
def _seg_8():
return [
(0x49E, 'M', u'ҟ'),
(0x49F, 'V'),
(0x4A0, 'M', u'ҡ'),
(0x4A1, 'V'),
(0x4A2, 'M', u'ң'),
(0x4A3, 'V'),
(0x4A4, 'M', u'ҥ'),
(0x4A5, 'V'),
(0x4A6, 'M', u'ҧ'),
(0x4A7, 'V'),
(0x4A8, 'M', u'ҩ'),
(0x4A9, 'V'),
(0x4AA, 'M', u'ҫ'),
(0x4AB, 'V'),
(0x4AC, 'M', u'ҭ'),
(0x4AD, 'V'),
(0x4AE, 'M', u'ү'),
(0x4AF, 'V'),
(0x4B0, 'M', u'ұ'),
(0x4B1, 'V'),
(0x4B2, 'M', u'ҳ'),
(0x4B3, 'V'),
(0x4B4, 'M', u'ҵ'),
(0x4B5, 'V'),
(0x4B6, 'M', u'ҷ'),
(0x4B7, 'V'),
(0x4B8, 'M', u'ҹ'),
(0x4B9, 'V'),
(0x4BA, 'M', u'һ'),
(0x4BB, 'V'),
(0x4BC, 'M', u'ҽ'),
(0x4BD, 'V'),
(0x4BE, 'M', u'ҿ'),
(0x4BF, 'V'),
(0x4C0, 'X'),
(0x4C1, 'M', u'ӂ'),
(0x4C2, 'V'),
(0x4C3, 'M', u'ӄ'),
(0x4C4, 'V'),
(0x4C5, 'M', u'ӆ'),
(0x4C6, 'V'),
(0x4C7, 'M', u'ӈ'),
(0x4C8, 'V'),
(0x4C9, 'M', u'ӊ'),
(0x4CA, 'V'),
(0x4CB, 'M', u'ӌ'),
(0x4CC, 'V'),
(0x4CD, 'M', u'ӎ'),
(0x4CE, 'V'),
(0x4D0, 'M', u'ӑ'),
(0x4D1, 'V'),
(0x4D2, 'M', u'ӓ'),
(0x4D3, 'V'),
(0x4D4, 'M', u'ӕ'),
(0x4D5, 'V'),
(0x4D6, 'M', u'ӗ'),
(0x4D7, 'V'),
(0x4D8, 'M', u'ә'),
(0x4D9, 'V'),
(0x4DA, 'M', u'ӛ'),
(0x4DB, 'V'),
(0x4DC, 'M', u'ӝ'),
(0x4DD, 'V'),
(0x4DE, 'M', u'ӟ'),
(0x4DF, 'V'),
(0x4E0, 'M', u'ӡ'),
(0x4E1, 'V'),
(0x4E2, 'M', u'ӣ'),
(0x4E3, 'V'),
(0x4E4, 'M', u'ӥ'),
(0x4E5, 'V'),
(0x4E6, 'M', u'ӧ'),
(0x4E7, 'V'),
(0x4E8, 'M', u'ө'),
(0x4E9, 'V'),
(0x4EA, 'M', u'ӫ'),
(0x4EB, 'V'),
(0x4EC, 'M', u'ӭ'),
(0x4ED, 'V'),
(0x4EE, 'M', u'ӯ'),
(0x4EF, 'V'),
(0x4F0, 'M', u'ӱ'),
(0x4F1, 'V'),
(0x4F2, 'M', u'ӳ'),
(0x4F3, 'V'),
(0x4F4, 'M', u'ӵ'),
(0x4F5, 'V'),
(0x4F6, 'M', u'ӷ'),
(0x4F7, 'V'),
(0x4F8, 'M', u'ӹ'),
(0x4F9, 'V'),
(0x4FA, 'M', u'ӻ'),
(0x4FB, 'V'),
(0x4FC, 'M', u'ӽ'),
(0x4FD, 'V'),
(0x4FE, 'M', u'ӿ'),
(0x4FF, 'V'),
(0x500, 'M', u'ԁ'),
(0x501, 'V'),
(0x502, 'M', u'ԃ'),
]
def _seg_9():
return [
(0x503, 'V'),
(0x504, 'M', u'ԅ'),
(0x505, 'V'),
(0x506, 'M', u'ԇ'),
(0x507, 'V'),
(0x508, 'M', u'ԉ'),
(0x509, 'V'),
(0x50A, 'M', u'ԋ'),
(0x50B, 'V'),
(0x50C, 'M', u'ԍ'),
(0x50D, 'V'),
(0x50E, 'M', u'ԏ'),
(0x50F, 'V'),
(0x510, 'M', u'ԑ'),
(0x511, 'V'),
(0x512, 'M', u'ԓ'),
(0x513, 'V'),
(0x514, 'M', u'ԕ'),
(0x515, 'V'),
(0x516, 'M', u'ԗ'),
(0x517, 'V'),
(0x518, 'M', u'ԙ'),
(0x519, 'V'),
(0x51A, 'M', u'ԛ'),
(0x51B, 'V'),
(0x51C, 'M', u'ԝ'),
(0x51D, 'V'),
(0x51E, 'M', u'ԟ'),
(0x51F, 'V'),
(0x520, 'M', u'ԡ'),
(0x521, 'V'),
(0x522, 'M', u'ԣ'),
(0x523, 'V'),
(0x524, 'M', u'ԥ'),
(0x525, 'V'),
(0x526, 'M', u'ԧ'),
(0x527, 'V'),
(0x528, 'M', u'ԩ'),
(0x529, 'V'),
(0x52A, 'M', u'ԫ'),
(0x52B, 'V'),
(0x52C, 'M', u'ԭ'),
(0x52D, 'V'),
(0x52E, 'M', u'ԯ'),
(0x52F, 'V'),
(0x530, 'X'),
(0x531, 'M', u'ա'),
(0x532, 'M', u'բ'),
(0x533, 'M', u'գ'),
(0x534, 'M', u'դ'),
(0x535, 'M', u'ե'),
(0x536, 'M', u'զ'),
(0x537, 'M', u'է'),
(0x538, 'M', u'ը'),
(0x539, 'M', u'թ'),
(0x53A, 'M', u'ժ'),
(0x53B, 'M', u'ի'),
(0x53C, 'M', u'լ'),
(0x53D, 'M', u'խ'),
(0x53E, 'M', u'ծ'),
(0x53F, 'M', u'կ'),
(0x540, 'M', u'հ'),
(0x541, 'M', u'ձ'),
(0x542, 'M', u'ղ'),
(0x543, 'M', u'ճ'),
(0x544, 'M', u'մ'),
(0x545, 'M', u'յ'),
(0x546, 'M', u'ն'),
(0x547, 'M', u'շ'),
(0x548, 'M', u'ո'),
(0x549, 'M', u'չ'),
(0x54A, 'M', u'պ'),
(0x54B, 'M', u'ջ'),
(0x54C, 'M', u'ռ'),
(0x54D, 'M', u'ս'),
(0x54E, 'M', u'վ'),
(0x54F, 'M', u'տ'),
(0x550, 'M', u'ր'),
(0x551, 'M', u'ց'),
(0x552, 'M', u'ւ'),
(0x553, 'M', u'փ'),
(0x554, 'M', u'ք'),
(0x555, 'M', u'օ'),
(0x556, 'M', u'ֆ'),
(0x557, 'X'),
(0x559, 'V'),
(0x587, 'M', u'եւ'),
(0x588, 'V'),
(0x58B, 'X'),
(0x58D, 'V'),
(0x590, 'X'),
(0x591, 'V'),
(0x5C8, 'X'),
(0x5D0, 'V'),
(0x5EB, 'X'),
(0x5EF, 'V'),
(0x5F5, 'X'),
(0x606, 'V'),
(0x61C, 'X'),
(0x61E, 'V'),
]
def _seg_10():
return [
(0x675, 'M', u'اٴ'),
(0x676, 'M', u'وٴ'),
(0x677, 'M', u'ۇٴ'),
(0x678, 'M', u'يٴ'),
(0x679, 'V'),
(0x6DD, 'X'),
(0x6DE, 'V'),
(0x70E, 'X'),
(0x710, 'V'),
(0x74B, 'X'),
(0x74D, 'V'),
(0x7B2, 'X'),
(0x7C0, 'V'),
(0x7FB, 'X'),
(0x7FD, 'V'),
(0x82E, 'X'),
(0x830, 'V'),
(0x83F, 'X'),
(0x840, 'V'),
(0x85C, 'X'),
(0x85E, 'V'),
(0x85F, 'X'),
(0x860, 'V'),
(0x86B, 'X'),
(0x8A0, 'V'),
(0x8B5, 'X'),
(0x8B6, 'V'),
(0x8C8, 'X'),
(0x8D3, 'V'),
(0x8E2, 'X'),
(0x8E3, 'V'),
(0x958, 'M', u'क़'),
(0x959, 'M', u'ख़'),
(0x95A, 'M', u'ग़'),
(0x95B, 'M', u'ज़'),
(0x95C, 'M', u'ड़'),
(0x95D, 'M', u'ढ़'),
(0x95E, 'M', u'फ़'),
(0x95F, 'M', u'य़'),
(0x960, 'V'),
(0x984, 'X'),
(0x985, 'V'),
(0x98D, 'X'),
(0x98F, 'V'),
(0x991, 'X'),
(0x993, 'V'),
(0x9A9, 'X'),
(0x9AA, 'V'),
(0x9B1, 'X'),
(0x9B2, 'V'),
(0x9B3, 'X'),
(0x9B6, 'V'),
(0x9BA, 'X'),
(0x9BC, 'V'),
(0x9C5, 'X'),
(0x9C7, 'V'),
(0x9C9, 'X'),
(0x9CB, 'V'),
(0x9CF, 'X'),
(0x9D7, 'V'),
(0x9D8, 'X'),
(0x9DC, 'M', u'ড়'),
(0x9DD, 'M', u'ঢ়'),
(0x9DE, 'X'),
(0x9DF, 'M', u'য়'),
(0x9E0, 'V'),
(0x9E4, 'X'),
(0x9E6, 'V'),
(0x9FF, 'X'),
(0xA01, 'V'),
(0xA04, 'X'),
(0xA05, 'V'),
(0xA0B, 'X'),
(0xA0F, 'V'),
(0xA11, 'X'),
(0xA13, 'V'),
(0xA29, 'X'),
(0xA2A, 'V'),
(0xA31, 'X'),
(0xA32, 'V'),
(0xA33, 'M', u'ਲ਼'),
(0xA34, 'X'),
(0xA35, 'V'),
(0xA36, 'M', u'ਸ਼'),
(0xA37, 'X'),
(0xA38, 'V'),
(0xA3A, 'X'),
(0xA3C, 'V'),
(0xA3D, 'X'),
(0xA3E, 'V'),
(0xA43, 'X'),
(0xA47, 'V'),
(0xA49, 'X'),
(0xA4B, 'V'),
(0xA4E, 'X'),
(0xA51, 'V'),
(0xA52, 'X'),
(0xA59, 'M', u'ਖ਼'),
(0xA5A, 'M', u'ਗ਼'),
(0xA5B, 'M', u'ਜ਼'),
]
def _seg_11():
return [
(0xA5C, 'V'),
(0xA5D, 'X'),
(0xA5E, 'M', u'ਫ਼'),
(0xA5F, 'X'),
(0xA66, 'V'),
(0xA77, 'X'),
(0xA81, 'V'),
(0xA84, 'X'),
(0xA85, 'V'),
(0xA8E, 'X'),
(0xA8F, 'V'),
(0xA92, 'X'),
(0xA93, 'V'),
(0xAA9, 'X'),
(0xAAA, 'V'),
(0xAB1, 'X'),
(0xAB2, 'V'),
(0xAB4, 'X'),
(0xAB5, 'V'),
(0xABA, 'X'),
(0xABC, 'V'),
(0xAC6, 'X'),
(0xAC7, 'V'),
(0xACA, 'X'),
(0xACB, 'V'),
(0xACE, 'X'),
(0xAD0, 'V'),
(0xAD1, 'X'),
(0xAE0, 'V'),
(0xAE4, 'X'),
(0xAE6, 'V'),
(0xAF2, 'X'),
(0xAF9, 'V'),
(0xB00, 'X'),
(0xB01, 'V'),
(0xB04, 'X'),
(0xB05, 'V'),
(0xB0D, 'X'),
(0xB0F, 'V'),
(0xB11, 'X'),
(0xB13, 'V'),
(0xB29, 'X'),
(0xB2A, 'V'),
(0xB31, 'X'),
(0xB32, 'V'),
(0xB34, 'X'),
(0xB35, 'V'),
(0xB3A, 'X'),
(0xB3C, 'V'),
(0xB45, 'X'),
(0xB47, 'V'),
(0xB49, 'X'),
(0xB4B, 'V'),
(0xB4E, 'X'),
(0xB55, 'V'),
(0xB58, 'X'),
(0xB5C, 'M', u'ଡ଼'),
(0xB5D, 'M', u'ଢ଼'),
(0xB5E, 'X'),
(0xB5F, 'V'),
(0xB64, 'X'),
(0xB66, 'V'),
(0xB78, 'X'),
(0xB82, 'V'),
(0xB84, 'X'),
(0xB85, 'V'),
(0xB8B, 'X'),
(0xB8E, 'V'),
(0xB91, 'X'),
(0xB92, 'V'),
(0xB96, 'X'),
(0xB99, 'V'),
(0xB9B, 'X'),
(0xB9C, 'V'),
(0xB9D, 'X'),
(0xB9E, 'V'),
(0xBA0, 'X'),
(0xBA3, 'V'),
(0xBA5, 'X'),
(0xBA8, 'V'),
(0xBAB, 'X'),
(0xBAE, 'V'),
(0xBBA, 'X'),
(0xBBE, 'V'),
(0xBC3, 'X'),
(0xBC6, 'V'),
(0xBC9, 'X'),
(0xBCA, 'V'),
(0xBCE, 'X'),
(0xBD0, 'V'),
(0xBD1, 'X'),
(0xBD7, 'V'),
(0xBD8, 'X'),
(0xBE6, 'V'),
(0xBFB, 'X'),
(0xC00, 'V'),
(0xC0D, 'X'),
(0xC0E, 'V'),
(0xC11, 'X'),
(0xC12, 'V'),
]
def _seg_12():
return [
(0xC29, 'X'),
(0xC2A, 'V'),
(0xC3A, 'X'),
(0xC3D, 'V'),
(0xC45, 'X'),
(0xC46, 'V'),
(0xC49, 'X'),
(0xC4A, 'V'),
(0xC4E, 'X'),
(0xC55, 'V'),
(0xC57, 'X'),
(0xC58, 'V'),
(0xC5B, 'X'),
(0xC60, 'V'),
(0xC64, 'X'),
(0xC66, 'V'),
(0xC70, 'X'),
(0xC77, 'V'),
(0xC8D, 'X'),
(0xC8E, 'V'),
(0xC91, 'X'),
(0xC92, 'V'),
(0xCA9, 'X'),
(0xCAA, 'V'),
(0xCB4, 'X'),
(0xCB5, 'V'),
(0xCBA, 'X'),
(0xCBC, 'V'),
(0xCC5, 'X'),
(0xCC6, 'V'),
(0xCC9, 'X'),
(0xCCA, 'V'),
(0xCCE, 'X'),
(0xCD5, 'V'),
(0xCD7, 'X'),
(0xCDE, 'V'),
(0xCDF, 'X'),
(0xCE0, 'V'),
(0xCE4, 'X'),
(0xCE6, 'V'),
(0xCF0, 'X'),
(0xCF1, 'V'),
(0xCF3, 'X'),
(0xD00, 'V'),
(0xD0D, 'X'),
(0xD0E, 'V'),
(0xD11, 'X'),
(0xD12, 'V'),
(0xD45, 'X'),
(0xD46, 'V'),
(0xD49, 'X'),
(0xD4A, 'V'),
(0xD50, 'X'),
(0xD54, 'V'),
(0xD64, 'X'),
(0xD66, 'V'),
(0xD80, 'X'),
(0xD81, 'V'),
(0xD84, 'X'),
(0xD85, 'V'),
(0xD97, 'X'),
(0xD9A, 'V'),
(0xDB2, 'X'),
(0xDB3, 'V'),
(0xDBC, 'X'),
(0xDBD, 'V'),
(0xDBE, 'X'),
(0xDC0, 'V'),
(0xDC7, 'X'),
(0xDCA, 'V'),
(0xDCB, 'X'),
(0xDCF, 'V'),
(0xDD5, 'X'),
(0xDD6, 'V'),
(0xDD7, 'X'),
(0xDD8, 'V'),
(0xDE0, 'X'),
(0xDE6, 'V'),
(0xDF0, 'X'),
(0xDF2, 'V'),
(0xDF5, 'X'),
(0xE01, 'V'),
(0xE33, 'M', u'ํา'),
(0xE34, 'V'),
(0xE3B, 'X'),
(0xE3F, 'V'),
(0xE5C, 'X'),
(0xE81, 'V'),
(0xE83, 'X'),
(0xE84, 'V'),
(0xE85, 'X'),
(0xE86, 'V'),
(0xE8B, 'X'),
(0xE8C, 'V'),
(0xEA4, 'X'),
(0xEA5, 'V'),
(0xEA6, 'X'),
(0xEA7, 'V'),
(0xEB3, 'M', u'ໍາ'),
(0xEB4, 'V'),
]
def _seg_13():
return [
(0xEBE, 'X'),
(0xEC0, 'V'),
(0xEC5, 'X'),
(0xEC6, 'V'),
(0xEC7, 'X'),
(0xEC8, 'V'),
(0xECE, 'X'),
(0xED0, 'V'),
(0xEDA, 'X'),
(0xEDC, 'M', u'ຫນ'),
(0xEDD, 'M', u'ຫມ'),
(0xEDE, 'V'),
(0xEE0, 'X'),
(0xF00, 'V'),
(0xF0C, 'M', u'་'),
(0xF0D, 'V'),
(0xF43, 'M', u'གྷ'),
(0xF44, 'V'),
(0xF48, 'X'),
(0xF49, 'V'),
(0xF4D, 'M', u'ཌྷ'),
(0xF4E, 'V'),
(0xF52, 'M', u'དྷ'),
(0xF53, 'V'),
(0xF57, 'M', u'བྷ'),
(0xF58, 'V'),
(0xF5C, 'M', u'ཛྷ'),
(0xF5D, 'V'),
(0xF69, 'M', u'ཀྵ'),
(0xF6A, 'V'),
(0xF6D, 'X'),
(0xF71, 'V'),
(0xF73, 'M', u'ཱི'),
(0xF74, 'V'),
(0xF75, 'M', u'ཱུ'),
(0xF76, 'M', u'ྲྀ'),
(0xF77, 'M', u'ྲཱྀ'),
(0xF78, 'M', u'ླྀ'),
(0xF79, 'M', u'ླཱྀ'),
(0xF7A, 'V'),
(0xF81, 'M', u'ཱྀ'),
(0xF82, 'V'),
(0xF93, 'M', u'ྒྷ'),
(0xF94, 'V'),
(0xF98, 'X'),
(0xF99, 'V'),
(0xF9D, 'M', u'ྜྷ'),
(0xF9E, 'V'),
(0xFA2, 'M', u'ྡྷ'),
(0xFA3, 'V'),
(0xFA7, 'M', u'ྦྷ'),
(0xFA8, 'V'),
(0xFAC, 'M', u'ྫྷ'),
(0xFAD, 'V'),
(0xFB9, 'M', u'ྐྵ'),
(0xFBA, 'V'),
(0xFBD, 'X'),
(0xFBE, 'V'),
(0xFCD, 'X'),
(0xFCE, 'V'),
(0xFDB, 'X'),
(0x1000, 'V'),
(0x10A0, 'X'),
(0x10C7, 'M', u'ⴧ'),
(0x10C8, 'X'),
(0x10CD, 'M', u'ⴭ'),
(0x10CE, 'X'),
(0x10D0, 'V'),
(0x10FC, 'M', u'ნ'),
(0x10FD, 'V'),
(0x115F, 'X'),
(0x1161, 'V'),
(0x1249, 'X'),
(0x124A, 'V'),
(0x124E, 'X'),
(0x1250, 'V'),
(0x1257, 'X'),
(0x1258, 'V'),
(0x1259, 'X'),
(0x125A, 'V'),
(0x125E, 'X'),
(0x1260, 'V'),
(0x1289, 'X'),
(0x128A, 'V'),
(0x128E, 'X'),
(0x1290, 'V'),
(0x12B1, 'X'),
(0x12B2, 'V'),
(0x12B6, 'X'),
(0x12B8, 'V'),
(0x12BF, 'X'),
(0x12C0, 'V'),
(0x12C1, 'X'),
(0x12C2, 'V'),
(0x12C6, 'X'),
(0x12C8, 'V'),
(0x12D7, 'X'),
(0x12D8, 'V'),
(0x1311, 'X'),
(0x1312, 'V'),
]
def _seg_14():
return [
(0x1316, 'X'),
(0x1318, 'V'),
(0x135B, 'X'),
(0x135D, 'V'),
(0x137D, 'X'),
(0x1380, 'V'),
(0x139A, 'X'),
(0x13A0, 'V'),
(0x13F6, 'X'),
(0x13F8, 'M', u'Ᏸ'),
(0x13F9, 'M', u'Ᏹ'),
(0x13FA, 'M', u'Ᏺ'),
(0x13FB, 'M', u'Ᏻ'),
(0x13FC, 'M', u'Ᏼ'),
(0x13FD, 'M', u'Ᏽ'),
(0x13FE, 'X'),
(0x1400, 'V'),
(0x1680, 'X'),
(0x1681, 'V'),
(0x169D, 'X'),
(0x16A0, 'V'),
(0x16F9, 'X'),
(0x1700, 'V'),
(0x170D, 'X'),
(0x170E, 'V'),
(0x1715, 'X'),
(0x1720, 'V'),
(0x1737, 'X'),
(0x1740, 'V'),
(0x1754, 'X'),
(0x1760, 'V'),
(0x176D, 'X'),
(0x176E, 'V'),
(0x1771, 'X'),
(0x1772, 'V'),
(0x1774, 'X'),
(0x1780, 'V'),
(0x17B4, 'X'),
(0x17B6, 'V'),
(0x17DE, 'X'),
(0x17E0, 'V'),
(0x17EA, 'X'),
(0x17F0, 'V'),
(0x17FA, 'X'),
(0x1800, 'V'),
(0x1806, 'X'),
(0x1807, 'V'),
(0x180B, 'I'),
(0x180E, 'X'),
(0x1810, 'V'),
(0x181A, 'X'),
(0x1820, 'V'),
(0x1879, 'X'),
(0x1880, 'V'),
(0x18AB, 'X'),
(0x18B0, 'V'),
(0x18F6, 'X'),
(0x1900, 'V'),
(0x191F, 'X'),
(0x1920, 'V'),
(0x192C, 'X'),
(0x1930, 'V'),
(0x193C, 'X'),
(0x1940, 'V'),
(0x1941, 'X'),
(0x1944, 'V'),
(0x196E, 'X'),
(0x1970, 'V'),
(0x1975, 'X'),
(0x1980, 'V'),
(0x19AC, 'X'),
(0x19B0, 'V'),
(0x19CA, 'X'),
(0x19D0, 'V'),
(0x19DB, 'X'),
(0x19DE, 'V'),
(0x1A1C, 'X'),
(0x1A1E, 'V'),
(0x1A5F, 'X'),
(0x1A60, 'V'),
(0x1A7D, 'X'),
(0x1A7F, 'V'),
(0x1A8A, 'X'),
(0x1A90, 'V'),
(0x1A9A, 'X'),
(0x1AA0, 'V'),
(0x1AAE, 'X'),
(0x1AB0, 'V'),
(0x1AC1, 'X'),
(0x1B00, 'V'),
(0x1B4C, 'X'),
(0x1B50, 'V'),
(0x1B7D, 'X'),
(0x1B80, 'V'),
(0x1BF4, 'X'),
(0x1BFC, 'V'),
(0x1C38, 'X'),
(0x1C3B, 'V'),
(0x1C4A, 'X'),
(0x1C4D, 'V'),
]
def _seg_15():
return [
(0x1C80, 'M', u'в'),
(0x1C81, 'M', u'д'),
(0x1C82, 'M', u'о'),
(0x1C83, 'M', u'с'),
(0x1C84, 'M', u'т'),
(0x1C86, 'M', u'ъ'),
(0x1C87, 'M', u'ѣ'),
(0x1C88, 'M', u'ꙋ'),
(0x1C89, 'X'),
(0x1C90, 'M', u'ა'),
(0x1C91, 'M', u'ბ'),
(0x1C92, 'M', u'გ'),
(0x1C93, 'M', u'დ'),
(0x1C94, 'M', u'ე'),
(0x1C95, 'M', u'ვ'),
(0x1C96, 'M', u'ზ'),
(0x1C97, 'M', u'თ'),
(0x1C98, 'M', u'ი'),
(0x1C99, 'M', u'კ'),
(0x1C9A, 'M', u'ლ'),
(0x1C9B, 'M', u'მ'),
(0x1C9C, 'M', u'ნ'),
(0x1C9D, 'M', u'ო'),
(0x1C9E, 'M', u'პ'),
(0x1C9F, 'M', u'ჟ'),
(0x1CA0, 'M', u'რ'),
(0x1CA1, 'M', u'ს'),
(0x1CA2, 'M', u'ტ'),
(0x1CA3, 'M', u'უ'),
(0x1CA4, 'M', u'ფ'),
(0x1CA5, 'M', u'ქ'),
(0x1CA6, 'M', u'ღ'),
(0x1CA7, 'M', u'ყ'),
(0x1CA8, 'M', u'შ'),
(0x1CA9, 'M', u'ჩ'),
(0x1CAA, 'M', u'ც'),
(0x1CAB, 'M', u'ძ'),
(0x1CAC, 'M', u'წ'),
(0x1CAD, 'M', u'ჭ'),
(0x1CAE, 'M', u'ხ'),
(0x1CAF, 'M', u'ჯ'),
(0x1CB0, 'M', u'ჰ'),
(0x1CB1, 'M', u'ჱ'),
(0x1CB2, 'M', u'ჲ'),
(0x1CB3, 'M', u'ჳ'),
(0x1CB4, 'M', u'ჴ'),
(0x1CB5, 'M', u'ჵ'),
(0x1CB6, 'M', u'ჶ'),
(0x1CB7, 'M', u'ჷ'),
(0x1CB8, 'M', u'ჸ'),
(0x1CB9, 'M', u'ჹ'),
(0x1CBA, 'M', u'ჺ'),
(0x1CBB, 'X'),
(0x1CBD, 'M', u'ჽ'),
(0x1CBE, 'M', u'ჾ'),
(0x1CBF, 'M', u'ჿ'),
(0x1CC0, 'V'),
(0x1CC8, 'X'),
(0x1CD0, 'V'),
(0x1CFB, 'X'),
(0x1D00, 'V'),
(0x1D2C, 'M', u'a'),
(0x1D2D, 'M', u'æ'),
(0x1D2E, 'M', u'b'),
(0x1D2F, 'V'),
(0x1D30, 'M', u'd'),
(0x1D31, 'M', u'e'),
(0x1D32, 'M', u'ǝ'),
(0x1D33, 'M', u'g'),
(0x1D34, 'M', u'h'),
(0x1D35, 'M', u'i'),
(0x1D36, 'M', u'j'),
(0x1D37, 'M', u'k'),
(0x1D38, 'M', u'l'),
(0x1D39, 'M', u'm'),
(0x1D3A, 'M', u'n'),
(0x1D3B, 'V'),
(0x1D3C, 'M', u'o'),
(0x1D3D, 'M', u'ȣ'),
(0x1D3E, 'M', u'p'),
(0x1D3F, 'M', u'r'),
(0x1D40, 'M', u't'),
(0x1D41, 'M', u'u'),
(0x1D42, 'M', u'w'),
(0x1D43, 'M', u'a'),
(0x1D44, 'M', u'ɐ'),
(0x1D45, 'M', u'ɑ'),
(0x1D46, 'M', u'ᴂ'),
(0x1D47, 'M', u'b'),
(0x1D48, 'M', u'd'),
(0x1D49, 'M', u'e'),
(0x1D4A, 'M', u'ə'),
(0x1D4B, 'M', u'ɛ'),
(0x1D4C, 'M', u'ɜ'),
(0x1D4D, 'M', u'g'),
(0x1D4E, 'V'),
(0x1D4F, 'M', u'k'),
(0x1D50, 'M', u'm'),
(0x1D51, 'M', u'ŋ'),
(0x1D52, 'M', u'o'),
]
def _seg_16():
return [
(0x1D53, 'M', u'ɔ'),
(0x1D54, 'M', u'ᴖ'),
(0x1D55, 'M', u'ᴗ'),
(0x1D56, 'M', u'p'),
(0x1D57, 'M', u't'),
(0x1D58, 'M', u'u'),
(0x1D59, 'M', u'ᴝ'),
(0x1D5A, 'M', u'ɯ'),
(0x1D5B, 'M', u'v'),
(0x1D5C, 'M', u'ᴥ'),
(0x1D5D, 'M', u'β'),
(0x1D5E, 'M', u'γ'),
(0x1D5F, 'M', u'δ'),
(0x1D60, 'M', u'φ'),
(0x1D61, 'M', u'χ'),
(0x1D62, 'M', u'i'),
(0x1D63, 'M', u'r'),
(0x1D64, 'M', u'u'),
(0x1D65, 'M', u'v'),
(0x1D66, 'M', u'β'),
(0x1D67, 'M', u'γ'),
(0x1D68, 'M', u'ρ'),
(0x1D69, 'M', u'φ'),
(0x1D6A, 'M', u'χ'),
(0x1D6B, 'V'),
(0x1D78, 'M', u'н'),
(0x1D79, 'V'),
(0x1D9B, 'M', u'ɒ'),
(0x1D9C, 'M', u'c'),
(0x1D9D, 'M', u'ɕ'),
(0x1D9E, 'M', u'ð'),
(0x1D9F, 'M', u'ɜ'),
(0x1DA0, 'M', u'f'),
(0x1DA1, 'M', u'ɟ'),
(0x1DA2, 'M', u'ɡ'),
(0x1DA3, 'M', u'ɥ'),
(0x1DA4, 'M', u'ɨ'),
(0x1DA5, 'M', u'ɩ'),
(0x1DA6, 'M', u'ɪ'),
(0x1DA7, 'M', u'ᵻ'),
(0x1DA8, 'M', u'ʝ'),
(0x1DA9, 'M', u'ɭ'),
(0x1DAA, 'M', u'ᶅ'),
(0x1DAB, 'M', u'ʟ'),
(0x1DAC, 'M', u'ɱ'),
(0x1DAD, 'M', u'ɰ'),
(0x1DAE, 'M', u'ɲ'),
(0x1DAF, 'M', u'ɳ'),
(0x1DB0, 'M', u'ɴ'),
(0x1DB1, 'M', u'ɵ'),
(0x1DB2, 'M', u'ɸ'),
(0x1DB3, 'M', u'ʂ'),
(0x1DB4, 'M', u'ʃ'),
(0x1DB5, 'M', u'ƫ'),
(0x1DB6, 'M', u'ʉ'),
(0x1DB7, 'M', u'ʊ'),
(0x1DB8, 'M', u'ᴜ'),
(0x1DB9, 'M', u'ʋ'),
(0x1DBA, 'M', u'ʌ'),
(0x1DBB, 'M', u'z'),
(0x1DBC, 'M', u'ʐ'),
(0x1DBD, 'M', u'ʑ'),
(0x1DBE, 'M', u'ʒ'),
(0x1DBF, 'M', u'θ'),
(0x1DC0, 'V'),
(0x1DFA, 'X'),
(0x1DFB, 'V'),
(0x1E00, 'M', u'ḁ'),
(0x1E01, 'V'),
(0x1E02, 'M', u'ḃ'),
(0x1E03, 'V'),
(0x1E04, 'M', u'ḅ'),
(0x1E05, 'V'),
(0x1E06, 'M', u'ḇ'),
(0x1E07, 'V'),
(0x1E08, 'M', u'ḉ'),
(0x1E09, 'V'),
(0x1E0A, 'M', u'ḋ'),
(0x1E0B, 'V'),
(0x1E0C, 'M', u'ḍ'),
(0x1E0D, 'V'),
(0x1E0E, 'M', u'ḏ'),
(0x1E0F, 'V'),
(0x1E10, 'M', u'ḑ'),
(0x1E11, 'V'),
(0x1E12, 'M', u'ḓ'),
(0x1E13, 'V'),
(0x1E14, 'M', u'ḕ'),
(0x1E15, 'V'),
(0x1E16, 'M', u'ḗ'),
(0x1E17, 'V'),
(0x1E18, 'M', u'ḙ'),
(0x1E19, 'V'),
(0x1E1A, 'M', u'ḛ'),
(0x1E1B, 'V'),
(0x1E1C, 'M', u'ḝ'),
(0x1E1D, 'V'),
(0x1E1E, 'M', u'ḟ'),
(0x1E1F, 'V'),
(0x1E20, 'M', u'ḡ'),
]
def _seg_17():
return [
(0x1E21, 'V'),
(0x1E22, 'M', u'ḣ'),
(0x1E23, 'V'),
(0x1E24, 'M', u'ḥ'),
(0x1E25, 'V'),
(0x1E26, 'M', u'ḧ'),
(0x1E27, 'V'),
(0x1E28, 'M', u'ḩ'),
(0x1E29, 'V'),
(0x1E2A, 'M', u'ḫ'),
(0x1E2B, 'V'),
(0x1E2C, 'M', u'ḭ'),
(0x1E2D, 'V'),
(0x1E2E, 'M', u'ḯ'),
(0x1E2F, 'V'),
(0x1E30, 'M', u'ḱ'),
(0x1E31, 'V'),
(0x1E32, 'M', u'ḳ'),
(0x1E33, 'V'),
(0x1E34, 'M', u'ḵ'),
(0x1E35, 'V'),
(0x1E36, 'M', u'ḷ'),
(0x1E37, 'V'),
(0x1E38, 'M', u'ḹ'),
(0x1E39, 'V'),
(0x1E3A, 'M', u'ḻ'),
(0x1E3B, 'V'),
(0x1E3C, 'M', u'ḽ'),
(0x1E3D, 'V'),
(0x1E3E, 'M', u'ḿ'),
(0x1E3F, 'V'),
(0x1E40, 'M', u'ṁ'),
(0x1E41, 'V'),
(0x1E42, 'M', u'ṃ'),
(0x1E43, 'V'),
(0x1E44, 'M', u'ṅ'),
(0x1E45, 'V'),
(0x1E46, 'M', u'ṇ'),
(0x1E47, 'V'),
(0x1E48, 'M', u'ṉ'),
(0x1E49, 'V'),
(0x1E4A, 'M', u'ṋ'),
(0x1E4B, 'V'),
(0x1E4C, 'M', u'ṍ'),
(0x1E4D, 'V'),
(0x1E4E, 'M', u'ṏ'),
(0x1E4F, 'V'),
(0x1E50, 'M', u'ṑ'),
(0x1E51, 'V'),
(0x1E52, 'M', u'ṓ'),
(0x1E53, 'V'),
(0x1E54, 'M', u'ṕ'),
(0x1E55, 'V'),
(0x1E56, 'M', u'ṗ'),
(0x1E57, 'V'),
(0x1E58, 'M', u'ṙ'),
(0x1E59, 'V'),
(0x1E5A, 'M', u'ṛ'),
(0x1E5B, 'V'),
(0x1E5C, 'M', u'ṝ'),
(0x1E5D, 'V'),
(0x1E5E, 'M', u'ṟ'),
(0x1E5F, 'V'),
(0x1E60, 'M', u'ṡ'),
(0x1E61, 'V'),
(0x1E62, 'M', u'ṣ'),
(0x1E63, 'V'),
(0x1E64, 'M', u'ṥ'),
(0x1E65, 'V'),
(0x1E66, 'M', u'ṧ'),
(0x1E67, 'V'),
(0x1E68, 'M', u'ṩ'),
(0x1E69, 'V'),
(0x1E6A, 'M', u'ṫ'),
(0x1E6B, 'V'),
(0x1E6C, 'M', u'ṭ'),
(0x1E6D, 'V'),
(0x1E6E, 'M', u'ṯ'),
(0x1E6F, 'V'),
(0x1E70, 'M', u'ṱ'),
(0x1E71, 'V'),
(0x1E72, 'M', u'ṳ'),
(0x1E73, 'V'),
(0x1E74, 'M', u'ṵ'),
(0x1E75, 'V'),
(0x1E76, 'M', u'ṷ'),
(0x1E77, 'V'),
(0x1E78, 'M', u'ṹ'),
(0x1E79, 'V'),
(0x1E7A, 'M', u'ṻ'),
(0x1E7B, 'V'),
(0x1E7C, 'M', u'ṽ'),
(0x1E7D, 'V'),
(0x1E7E, 'M', u'ṿ'),
(0x1E7F, 'V'),
(0x1E80, 'M', u'ẁ'),
(0x1E81, 'V'),
(0x1E82, 'M', u'ẃ'),
(0x1E83, 'V'),
(0x1E84, 'M', u'ẅ'),
]
def _seg_18():
return [
(0x1E85, 'V'),
(0x1E86, 'M', u'ẇ'),
(0x1E87, 'V'),
(0x1E88, 'M', u'ẉ'),
(0x1E89, 'V'),
(0x1E8A, 'M', u'ẋ'),
(0x1E8B, 'V'),
(0x1E8C, 'M', u'ẍ'),
(0x1E8D, 'V'),
(0x1E8E, 'M', u'ẏ'),
(0x1E8F, 'V'),
(0x1E90, 'M', u'ẑ'),
(0x1E91, 'V'),
(0x1E92, 'M', u'ẓ'),
(0x1E93, 'V'),
(0x1E94, 'M', u'ẕ'),
(0x1E95, 'V'),
(0x1E9A, 'M', u'aʾ'),
(0x1E9B, 'M', u'ṡ'),
(0x1E9C, 'V'),
(0x1E9E, 'M', u'ss'),
(0x1E9F, 'V'),
(0x1EA0, 'M', u'ạ'),
(0x1EA1, 'V'),
(0x1EA2, 'M', u'ả'),
(0x1EA3, 'V'),
(0x1EA4, 'M', u'ấ'),
(0x1EA5, 'V'),
(0x1EA6, 'M', u'ầ'),
(0x1EA7, 'V'),
(0x1EA8, 'M', u'ẩ'),
(0x1EA9, 'V'),
(0x1EAA, 'M', u'ẫ'),
(0x1EAB, 'V'),
(0x1EAC, 'M', u'ậ'),
(0x1EAD, 'V'),
(0x1EAE, 'M', u'ắ'),
(0x1EAF, 'V'),
(0x1EB0, 'M', u'ằ'),
(0x1EB1, 'V'),
(0x1EB2, 'M', u'ẳ'),
(0x1EB3, 'V'),
(0x1EB4, 'M', u'ẵ'),
(0x1EB5, 'V'),
(0x1EB6, 'M', u'ặ'),
(0x1EB7, 'V'),
(0x1EB8, 'M', u'ẹ'),
(0x1EB9, 'V'),
(0x1EBA, 'M', u'ẻ'),
(0x1EBB, 'V'),
(0x1EBC, 'M', u'ẽ'),
(0x1EBD, 'V'),
(0x1EBE, 'M', u'ế'),
(0x1EBF, 'V'),
(0x1EC0, 'M', u'ề'),
(0x1EC1, 'V'),
(0x1EC2, 'M', u'ể'),
(0x1EC3, 'V'),
(0x1EC4, 'M', u'ễ'),
(0x1EC5, 'V'),
(0x1EC6, 'M', u'ệ'),
(0x1EC7, 'V'),
(0x1EC8, 'M', u'ỉ'),
(0x1EC9, 'V'),
(0x1ECA, 'M', u'ị'),
(0x1ECB, 'V'),
(0x1ECC, 'M', u'ọ'),
(0x1ECD, 'V'),
(0x1ECE, 'M', u'ỏ'),
(0x1ECF, 'V'),
(0x1ED0, 'M', u'ố'),
(0x1ED1, 'V'),
(0x1ED2, 'M', u'ồ'),
(0x1ED3, 'V'),
(0x1ED4, 'M', u'ổ'),
(0x1ED5, 'V'),
(0x1ED6, 'M', u'ỗ'),
(0x1ED7, 'V'),
(0x1ED8, 'M', u'ộ'),
(0x1ED9, 'V'),
(0x1EDA, 'M', u'ớ'),
(0x1EDB, 'V'),
(0x1EDC, 'M', u'ờ'),
(0x1EDD, 'V'),
(0x1EDE, 'M', u'ở'),
(0x1EDF, 'V'),
(0x1EE0, 'M', u'ỡ'),
(0x1EE1, 'V'),
(0x1EE2, 'M', u'ợ'),
(0x1EE3, 'V'),
(0x1EE4, 'M', u'ụ'),
(0x1EE5, 'V'),
(0x1EE6, 'M', u'ủ'),
(0x1EE7, 'V'),
(0x1EE8, 'M', u'ứ'),
(0x1EE9, 'V'),
(0x1EEA, 'M', u'ừ'),
(0x1EEB, 'V'),
(0x1EEC, 'M', u'ử'),
(0x1EED, 'V'),
]
def _seg_19():
return [
(0x1EEE, 'M', u'ữ'),
(0x1EEF, 'V'),
(0x1EF0, 'M', u'ự'),
(0x1EF1, 'V'),
(0x1EF2, 'M', u'ỳ'),
(0x1EF3, 'V'),
(0x1EF4, 'M', u'ỵ'),
(0x1EF5, 'V'),
(0x1EF6, 'M', u'ỷ'),
(0x1EF7, 'V'),
(0x1EF8, 'M', u'ỹ'),
(0x1EF9, 'V'),
(0x1EFA, 'M', u'ỻ'),
(0x1EFB, 'V'),
(0x1EFC, 'M', u'ỽ'),
(0x1EFD, 'V'),
(0x1EFE, 'M', u'ỿ'),
(0x1EFF, 'V'),
(0x1F08, 'M', u'ἀ'),
(0x1F09, 'M', u'ἁ'),
(0x1F0A, 'M', u'ἂ'),
(0x1F0B, 'M', u'ἃ'),
(0x1F0C, 'M', u'ἄ'),
(0x1F0D, 'M', u'ἅ'),
(0x1F0E, 'M', u'ἆ'),
(0x1F0F, 'M', u'ἇ'),
(0x1F10, 'V'),
(0x1F16, 'X'),
(0x1F18, 'M', u'ἐ'),
(0x1F19, 'M', u'ἑ'),
(0x1F1A, 'M', u'ἒ'),
(0x1F1B, 'M', u'ἓ'),
(0x1F1C, 'M', u'ἔ'),
(0x1F1D, 'M', u'ἕ'),
(0x1F1E, 'X'),
(0x1F20, 'V'),
(0x1F28, 'M', u'ἠ'),
(0x1F29, 'M', u'ἡ'),
(0x1F2A, 'M', u'ἢ'),
(0x1F2B, 'M', u'ἣ'),
(0x1F2C, 'M', u'ἤ'),
(0x1F2D, 'M', u'ἥ'),
(0x1F2E, 'M', u'ἦ'),
(0x1F2F, 'M', u'ἧ'),
(0x1F30, 'V'),
(0x1F38, 'M', u'ἰ'),
(0x1F39, 'M', u'ἱ'),
(0x1F3A, 'M', u'ἲ'),
(0x1F3B, 'M', u'ἳ'),
(0x1F3C, 'M', u'ἴ'),
(0x1F3D, 'M', u'ἵ'),
(0x1F3E, 'M', u'ἶ'),
(0x1F3F, 'M', u'ἷ'),
(0x1F40, 'V'),
(0x1F46, 'X'),
(0x1F48, 'M', u'ὀ'),
(0x1F49, 'M', u'ὁ'),
(0x1F4A, 'M', u'ὂ'),
(0x1F4B, 'M', u'ὃ'),
(0x1F4C, 'M', u'ὄ'),
(0x1F4D, 'M', u'ὅ'),
(0x1F4E, 'X'),
(0x1F50, 'V'),
(0x1F58, 'X'),
(0x1F59, 'M', u'ὑ'),
(0x1F5A, 'X'),
(0x1F5B, 'M', u'ὓ'),
(0x1F5C, 'X'),
(0x1F5D, 'M', u'ὕ'),
(0x1F5E, 'X'),
(0x1F5F, 'M', u'ὗ'),
(0x1F60, 'V'),
(0x1F68, 'M', u'ὠ'),
(0x1F69, 'M', u'ὡ'),
(0x1F6A, 'M', u'ὢ'),
(0x1F6B, 'M', u'ὣ'),
(0x1F6C, 'M', u'ὤ'),
(0x1F6D, 'M', u'ὥ'),
(0x1F6E, 'M', u'ὦ'),
(0x1F6F, 'M', u'ὧ'),
(0x1F70, 'V'),
(0x1F71, 'M', u'ά'),
(0x1F72, 'V'),
(0x1F73, 'M', u'έ'),
(0x1F74, 'V'),
(0x1F75, 'M', u'ή'),
(0x1F76, 'V'),
(0x1F77, 'M', u'ί'),
(0x1F78, 'V'),
(0x1F79, 'M', u'ό'),
(0x1F7A, 'V'),
(0x1F7B, 'M', u'ύ'),
(0x1F7C, 'V'),
(0x1F7D, 'M', u'ώ'),
(0x1F7E, 'X'),
(0x1F80, 'M', u'ἀι'),
(0x1F81, 'M', u'ἁι'),
(0x1F82, 'M', u'ἂι'),
(0x1F83, 'M', u'ἃι'),
(0x1F84, 'M', u'ἄι'),
]
def _seg_20():
return [
(0x1F85, 'M', u'ἅι'),
(0x1F86, 'M', u'ἆι'),
(0x1F87, 'M', u'ἇι'),
(0x1F88, 'M', u'ἀι'),
(0x1F89, 'M', u'ἁι'),
(0x1F8A, 'M', u'ἂι'),
(0x1F8B, 'M', u'ἃι'),
(0x1F8C, 'M', u'ἄι'),
(0x1F8D, 'M', u'ἅι'),
(0x1F8E, 'M', u'ἆι'),
(0x1F8F, 'M', u'ἇι'),
(0x1F90, 'M', u'ἠι'),
(0x1F91, 'M', u'ἡι'),
(0x1F92, 'M', u'ἢι'),
(0x1F93, 'M', u'ἣι'),
(0x1F94, 'M', u'ἤι'),
(0x1F95, 'M', u'ἥι'),
(0x1F96, 'M', u'ἦι'),
(0x1F97, 'M', u'ἧι'),
(0x1F98, 'M', u'ἠι'),
(0x1F99, 'M', u'ἡι'),
(0x1F9A, 'M', u'ἢι'),
(0x1F9B, 'M', u'ἣι'),
(0x1F9C, 'M', u'ἤι'),
(0x1F9D, 'M', u'ἥι'),
(0x1F9E, 'M', u'ἦι'),
(0x1F9F, 'M', u'ἧι'),
(0x1FA0, 'M', u'ὠι'),
(0x1FA1, 'M', u'ὡι'),
(0x1FA2, 'M', u'ὢι'),
(0x1FA3, 'M', u'ὣι'),
(0x1FA4, 'M', u'ὤι'),
(0x1FA5, 'M', u'ὥι'),
(0x1FA6, 'M', u'ὦι'),
(0x1FA7, 'M', u'ὧι'),
(0x1FA8, 'M', u'ὠι'),
(0x1FA9, 'M', u'ὡι'),
(0x1FAA, 'M', u'ὢι'),
(0x1FAB, 'M', u'ὣι'),
(0x1FAC, 'M', u'ὤι'),
(0x1FAD, 'M', u'ὥι'),
(0x1FAE, 'M', u'ὦι'),
(0x1FAF, 'M', u'ὧι'),
(0x1FB0, 'V'),
(0x1FB2, 'M', u'ὰι'),
(0x1FB3, 'M', u'αι'),
(0x1FB4, 'M', u'άι'),
(0x1FB5, 'X'),
(0x1FB6, 'V'),
(0x1FB7, 'M', u'ᾶι'),
(0x1FB8, 'M', u'ᾰ'),
(0x1FB9, 'M', u'ᾱ'),
(0x1FBA, 'M', u'ὰ'),
(0x1FBB, 'M', u'ά'),
(0x1FBC, 'M', u'αι'),
(0x1FBD, '3', u' ̓'),
(0x1FBE, 'M', u'ι'),
(0x1FBF, '3', u' ̓'),
(0x1FC0, '3', u' ͂'),
(0x1FC1, '3', u' ̈͂'),
(0x1FC2, 'M', u'ὴι'),
(0x1FC3, 'M', u'ηι'),
(0x1FC4, 'M', u'ήι'),
(0x1FC5, 'X'),
(0x1FC6, 'V'),
(0x1FC7, 'M', u'ῆι'),
(0x1FC8, 'M', u'ὲ'),
(0x1FC9, 'M', u'έ'),
(0x1FCA, 'M', u'ὴ'),
(0x1FCB, 'M', u'ή'),
(0x1FCC, 'M', u'ηι'),
(0x1FCD, '3', u' ̓̀'),
(0x1FCE, '3', u' ̓́'),
(0x1FCF, '3', u' ̓͂'),
(0x1FD0, 'V'),
(0x1FD3, 'M', u'ΐ'),
(0x1FD4, 'X'),
(0x1FD6, 'V'),
(0x1FD8, 'M', u'ῐ'),
(0x1FD9, 'M', u'ῑ'),
(0x1FDA, 'M', u'ὶ'),
(0x1FDB, 'M', u'ί'),
(0x1FDC, 'X'),
(0x1FDD, '3', u' ̔̀'),
(0x1FDE, '3', u' ̔́'),
(0x1FDF, '3', u' ̔͂'),
(0x1FE0, 'V'),
(0x1FE3, 'M', u'ΰ'),
(0x1FE4, 'V'),
(0x1FE8, 'M', u'ῠ'),
(0x1FE9, 'M', u'ῡ'),
(0x1FEA, 'M', u'ὺ'),
(0x1FEB, 'M', u'ύ'),
(0x1FEC, 'M', u'ῥ'),
(0x1FED, '3', u' ̈̀'),
(0x1FEE, '3', u' ̈́'),
(0x1FEF, '3', u'`'),
(0x1FF0, 'X'),
(0x1FF2, 'M', u'ὼι'),
(0x1FF3, 'M', u'ωι'),
]
def _seg_21():
return [
(0x1FF4, 'M', u'ώι'),
(0x1FF5, 'X'),
(0x1FF6, 'V'),
(0x1FF7, 'M', u'ῶι'),
(0x1FF8, 'M', u'ὸ'),
(0x1FF9, 'M', u'ό'),
(0x1FFA, 'M', u'ὼ'),
(0x1FFB, 'M', u'ώ'),
(0x1FFC, 'M', u'ωι'),
(0x1FFD, '3', u' ́'),
(0x1FFE, '3', u' ̔'),
(0x1FFF, 'X'),
(0x2000, '3', u' '),
(0x200B, 'I'),
(0x200C, 'D', u''),
(0x200E, 'X'),
(0x2010, 'V'),
(0x2011, 'M', u'‐'),
(0x2012, 'V'),
(0x2017, '3', u' ̳'),
(0x2018, 'V'),
(0x2024, 'X'),
(0x2027, 'V'),
(0x2028, 'X'),
(0x202F, '3', u' '),
(0x2030, 'V'),
(0x2033, 'M', u'′′'),
(0x2034, 'M', u'′′′'),
(0x2035, 'V'),
(0x2036, 'M', u'‵‵'),
(0x2037, 'M', u'‵‵‵'),
(0x2038, 'V'),
(0x203C, '3', u'!!'),
(0x203D, 'V'),
(0x203E, '3', u' ̅'),
(0x203F, 'V'),
(0x2047, '3', u'??'),
(0x2048, '3', u'?!'),
(0x2049, '3', u'!?'),
(0x204A, 'V'),
(0x2057, 'M', u'′′′′'),
(0x2058, 'V'),
(0x205F, '3', u' '),
(0x2060, 'I'),
(0x2061, 'X'),
(0x2064, 'I'),
(0x2065, 'X'),
(0x2070, 'M', u'0'),
(0x2071, 'M', u'i'),
(0x2072, 'X'),
(0x2074, 'M', u'4'),
(0x2075, 'M', u'5'),
(0x2076, 'M', u'6'),
(0x2077, 'M', u'7'),
(0x2078, 'M', u'8'),
(0x2079, 'M', u'9'),
(0x207A, '3', u'+'),
(0x207B, 'M', u'−'),
(0x207C, '3', u'='),
(0x207D, '3', u'('),
(0x207E, '3', u')'),
(0x207F, 'M', u'n'),
(0x2080, 'M', u'0'),
(0x2081, 'M', u'1'),
(0x2082, 'M', u'2'),
(0x2083, 'M', u'3'),
(0x2084, 'M', u'4'),
(0x2085, 'M', u'5'),
(0x2086, 'M', u'6'),
(0x2087, 'M', u'7'),
(0x2088, 'M', u'8'),
(0x2089, 'M', u'9'),
(0x208A, '3', u'+'),
(0x208B, 'M', u'−'),
(0x208C, '3', u'='),
(0x208D, '3', u'('),
(0x208E, '3', u')'),
(0x208F, 'X'),
(0x2090, 'M', u'a'),
(0x2091, 'M', u'e'),
(0x2092, 'M', u'o'),
(0x2093, 'M', u'x'),
(0x2094, 'M', u'ə'),
(0x2095, 'M', u'h'),
(0x2096, 'M', u'k'),
(0x2097, 'M', u'l'),
(0x2098, 'M', u'm'),
(0x2099, 'M', u'n'),
(0x209A, 'M', u'p'),
(0x209B, 'M', u's'),
(0x209C, 'M', u't'),
(0x209D, 'X'),
(0x20A0, 'V'),
(0x20A8, 'M', u'rs'),
(0x20A9, 'V'),
(0x20C0, 'X'),
(0x20D0, 'V'),
(0x20F1, 'X'),
(0x2100, '3', u'a/c'),
(0x2101, '3', u'a/s'),
]
def _seg_22():
return [
(0x2102, 'M', u'c'),
(0x2103, 'M', u'°c'),
(0x2104, 'V'),
(0x2105, '3', u'c/o'),
(0x2106, '3', u'c/u'),
(0x2107, 'M', u'ɛ'),
(0x2108, 'V'),
(0x2109, 'M', u'°f'),
(0x210A, 'M', u'g'),
(0x210B, 'M', u'h'),
(0x210F, 'M', u'ħ'),
(0x2110, 'M', u'i'),
(0x2112, 'M', u'l'),
(0x2114, 'V'),
(0x2115, 'M', u'n'),
(0x2116, 'M', u'no'),
(0x2117, 'V'),
(0x2119, 'M', u'p'),
(0x211A, 'M', u'q'),
(0x211B, 'M', u'r'),
(0x211E, 'V'),
(0x2120, 'M', u'sm'),
(0x2121, 'M', u'tel'),
(0x2122, 'M', u'tm'),
(0x2123, 'V'),
(0x2124, 'M', u'z'),
(0x2125, 'V'),
(0x2126, 'M', u'ω'),
(0x2127, 'V'),
(0x2128, 'M', u'z'),
(0x2129, 'V'),
(0x212A, 'M', u'k'),
(0x212B, 'M', u'å'),
(0x212C, 'M', u'b'),
(0x212D, 'M', u'c'),
(0x212E, 'V'),
(0x212F, 'M', u'e'),
(0x2131, 'M', u'f'),
(0x2132, 'X'),
(0x2133, 'M', u'm'),
(0x2134, 'M', u'o'),
(0x2135, 'M', u'א'),
(0x2136, 'M', u'ב'),
(0x2137, 'M', u'ג'),
(0x2138, 'M', u'ד'),
(0x2139, 'M', u'i'),
(0x213A, 'V'),
(0x213B, 'M', u'fax'),
(0x213C, 'M', u'π'),
(0x213D, 'M', u'γ'),
(0x213F, 'M', u'π'),
(0x2140, 'M', u'∑'),
(0x2141, 'V'),
(0x2145, 'M', u'd'),
(0x2147, 'M', u'e'),
(0x2148, 'M', u'i'),
(0x2149, 'M', u'j'),
(0x214A, 'V'),
(0x2150, 'M', u'1⁄7'),
(0x2151, 'M', u'1⁄9'),
(0x2152, 'M', u'1⁄10'),
(0x2153, 'M', u'1⁄3'),
(0x2154, 'M', u'2⁄3'),
(0x2155, 'M', u'1⁄5'),
(0x2156, 'M', u'2⁄5'),
(0x2157, 'M', u'3⁄5'),
(0x2158, 'M', u'4⁄5'),
(0x2159, 'M', u'1⁄6'),
(0x215A, 'M', u'5⁄6'),
(0x215B, 'M', u'1⁄8'),
(0x215C, 'M', u'3⁄8'),
(0x215D, 'M', u'5⁄8'),
(0x215E, 'M', u'7⁄8'),
(0x215F, 'M', u'1⁄'),
(0x2160, 'M', u'i'),
(0x2161, 'M', u'ii'),
(0x2162, 'M', u'iii'),
(0x2163, 'M', u'iv'),
(0x2164, 'M', u'v'),
(0x2165, 'M', u'vi'),
(0x2166, 'M', u'vii'),
(0x2167, 'M', u'viii'),
(0x2168, 'M', u'ix'),
(0x2169, 'M', u'x'),
(0x216A, 'M', u'xi'),
(0x216B, 'M', u'xii'),
(0x216C, 'M', u'l'),
(0x216D, 'M', u'c'),
(0x216E, 'M', u'd'),
(0x216F, 'M', u'm'),
(0x2170, 'M', u'i'),
(0x2171, 'M', u'ii'),
(0x2172, 'M', u'iii'),
(0x2173, 'M', u'iv'),
(0x2174, 'M', u'v'),
(0x2175, 'M', u'vi'),
(0x2176, 'M', u'vii'),
(0x2177, 'M', u'viii'),
(0x2178, 'M', u'ix'),
(0x2179, 'M', u'x'),
]
def _seg_23():
return [
(0x217A, 'M', u'xi'),
(0x217B, 'M', u'xii'),
(0x217C, 'M', u'l'),
(0x217D, 'M', u'c'),
(0x217E, 'M', u'd'),
(0x217F, 'M', u'm'),
(0x2180, 'V'),
(0x2183, 'X'),
(0x2184, 'V'),
(0x2189, 'M', u'0⁄3'),
(0x218A, 'V'),
(0x218C, 'X'),
(0x2190, 'V'),
(0x222C, 'M', u'∫∫'),
(0x222D, 'M', u'∫∫∫'),
(0x222E, 'V'),
(0x222F, 'M', u'∮∮'),
(0x2230, 'M', u'∮∮∮'),
(0x2231, 'V'),
(0x2260, '3'),
(0x2261, 'V'),
(0x226E, '3'),
(0x2270, 'V'),
(0x2329, 'M', u'〈'),
(0x232A, 'M', u'〉'),
(0x232B, 'V'),
(0x2427, 'X'),
(0x2440, 'V'),
(0x244B, 'X'),
(0x2460, 'M', u'1'),
(0x2461, 'M', u'2'),
(0x2462, 'M', u'3'),
(0x2463, 'M', u'4'),
(0x2464, 'M', u'5'),
(0x2465, 'M', u'6'),
(0x2466, 'M', u'7'),
(0x2467, 'M', u'8'),
(0x2468, 'M', u'9'),
(0x2469, 'M', u'10'),
(0x246A, 'M', u'11'),
(0x246B, 'M', u'12'),
(0x246C, 'M', u'13'),
(0x246D, 'M', u'14'),
(0x246E, 'M', u'15'),
(0x246F, 'M', u'16'),
(0x2470, 'M', u'17'),
(0x2471, 'M', u'18'),
(0x2472, 'M', u'19'),
(0x2473, 'M', u'20'),
(0x2474, '3', u'(1)'),
(0x2475, '3', u'(2)'),
(0x2476, '3', u'(3)'),
(0x2477, '3', u'(4)'),
(0x2478, '3', u'(5)'),
(0x2479, '3', u'(6)'),
(0x247A, '3', u'(7)'),
(0x247B, '3', u'(8)'),
(0x247C, '3', u'(9)'),
(0x247D, '3', u'(10)'),
(0x247E, '3', u'(11)'),
(0x247F, '3', u'(12)'),
(0x2480, '3', u'(13)'),
(0x2481, '3', u'(14)'),
(0x2482, '3', u'(15)'),
(0x2483, '3', u'(16)'),
(0x2484, '3', u'(17)'),
(0x2485, '3', u'(18)'),
(0x2486, '3', u'(19)'),
(0x2487, '3', u'(20)'),
(0x2488, 'X'),
(0x249C, '3', u'(a)'),
(0x249D, '3', u'(b)'),
(0x249E, '3', u'(c)'),
(0x249F, '3', u'(d)'),
(0x24A0, '3', u'(e)'),
(0x24A1, '3', u'(f)'),
(0x24A2, '3', u'(g)'),
(0x24A3, '3', u'(h)'),
(0x24A4, '3', u'(i)'),
(0x24A5, '3', u'(j)'),
(0x24A6, '3', u'(k)'),
(0x24A7, '3', u'(l)'),
(0x24A8, '3', u'(m)'),
(0x24A9, '3', u'(n)'),
(0x24AA, '3', u'(o)'),
(0x24AB, '3', u'(p)'),
(0x24AC, '3', u'(q)'),
(0x24AD, '3', u'(r)'),
(0x24AE, '3', u'(s)'),
(0x24AF, '3', u'(t)'),
(0x24B0, '3', u'(u)'),
(0x24B1, '3', u'(v)'),
(0x24B2, '3', u'(w)'),
(0x24B3, '3', u'(x)'),
(0x24B4, '3', u'(y)'),
(0x24B5, '3', u'(z)'),
(0x24B6, 'M', u'a'),
(0x24B7, 'M', u'b'),
(0x24B8, 'M', u'c'),
(0x24B9, 'M', u'd'),
]
def _seg_24():
return [
(0x24BA, 'M', u'e'),
(0x24BB, 'M', u'f'),
(0x24BC, 'M', u'g'),
(0x24BD, 'M', u'h'),
(0x24BE, 'M', u'i'),
(0x24BF, 'M', u'j'),
(0x24C0, 'M', u'k'),
(0x24C1, 'M', u'l'),
(0x24C2, 'M', u'm'),
(0x24C3, 'M', u'n'),
(0x24C4, 'M', u'o'),
(0x24C5, 'M', u'p'),
(0x24C6, 'M', u'q'),
(0x24C7, 'M', u'r'),
(0x24C8, 'M', u's'),
(0x24C9, 'M', u't'),
(0x24CA, 'M', u'u'),
(0x24CB, 'M', u'v'),
(0x24CC, 'M', u'w'),
(0x24CD, 'M', u'x'),
(0x24CE, 'M', u'y'),
(0x24CF, 'M', u'z'),
(0x24D0, 'M', u'a'),
(0x24D1, 'M', u'b'),
(0x24D2, 'M', u'c'),
(0x24D3, 'M', u'd'),
(0x24D4, 'M', u'e'),
(0x24D5, 'M', u'f'),
(0x24D6, 'M', u'g'),
(0x24D7, 'M', u'h'),
(0x24D8, 'M', u'i'),
(0x24D9, 'M', u'j'),
(0x24DA, 'M', u'k'),
(0x24DB, 'M', u'l'),
(0x24DC, 'M', u'm'),
(0x24DD, 'M', u'n'),
(0x24DE, 'M', u'o'),
(0x24DF, 'M', u'p'),
(0x24E0, 'M', u'q'),
(0x24E1, 'M', u'r'),
(0x24E2, 'M', u's'),
(0x24E3, 'M', u't'),
(0x24E4, 'M', u'u'),
(0x24E5, 'M', u'v'),
(0x24E6, 'M', u'w'),
(0x24E7, 'M', u'x'),
(0x24E8, 'M', u'y'),
(0x24E9, 'M', u'z'),
(0x24EA, 'M', u'0'),
(0x24EB, 'V'),
(0x2A0C, 'M', u'∫∫∫∫'),
(0x2A0D, 'V'),
(0x2A74, '3', u'::='),
(0x2A75, '3', u'=='),
(0x2A76, '3', u'==='),
(0x2A77, 'V'),
(0x2ADC, 'M', u'⫝̸'),
(0x2ADD, 'V'),
(0x2B74, 'X'),
(0x2B76, 'V'),
(0x2B96, 'X'),
(0x2B97, 'V'),
(0x2C00, 'M', u'ⰰ'),
(0x2C01, 'M', u'ⰱ'),
(0x2C02, 'M', u'ⰲ'),
(0x2C03, 'M', u'ⰳ'),
(0x2C04, 'M', u'ⰴ'),
(0x2C05, 'M', u'ⰵ'),
(0x2C06, 'M', u'ⰶ'),
(0x2C07, 'M', u'ⰷ'),
(0x2C08, 'M', u'ⰸ'),
(0x2C09, 'M', u'ⰹ'),
(0x2C0A, 'M', u'ⰺ'),
(0x2C0B, 'M', u'ⰻ'),
(0x2C0C, 'M', u'ⰼ'),
(0x2C0D, 'M', u'ⰽ'),
(0x2C0E, 'M', u'ⰾ'),
(0x2C0F, 'M', u'ⰿ'),
(0x2C10, 'M', u'ⱀ'),
(0x2C11, 'M', u'ⱁ'),
(0x2C12, 'M', u'ⱂ'),
(0x2C13, 'M', u'ⱃ'),
(0x2C14, 'M', u'ⱄ'),
(0x2C15, 'M', u'ⱅ'),
(0x2C16, 'M', u'ⱆ'),
(0x2C17, 'M', u'ⱇ'),
(0x2C18, 'M', u'ⱈ'),
(0x2C19, 'M', u'ⱉ'),
(0x2C1A, 'M', u'ⱊ'),
(0x2C1B, 'M', u'ⱋ'),
(0x2C1C, 'M', u'ⱌ'),
(0x2C1D, 'M', u'ⱍ'),
(0x2C1E, 'M', u'ⱎ'),
(0x2C1F, 'M', u'ⱏ'),
(0x2C20, 'M', u'ⱐ'),
(0x2C21, 'M', u'ⱑ'),
(0x2C22, 'M', u'ⱒ'),
(0x2C23, 'M', u'ⱓ'),
(0x2C24, 'M', u'ⱔ'),
(0x2C25, 'M', u'ⱕ'),
]
def _seg_25():
return [
(0x2C26, 'M', u'ⱖ'),
(0x2C27, 'M', u'ⱗ'),
(0x2C28, 'M', u'ⱘ'),
(0x2C29, 'M', u'ⱙ'),
(0x2C2A, 'M', u'ⱚ'),
(0x2C2B, 'M', u'ⱛ'),
(0x2C2C, 'M', u'ⱜ'),
(0x2C2D, 'M', u'ⱝ'),
(0x2C2E, 'M', u'ⱞ'),
(0x2C2F, 'X'),
(0x2C30, 'V'),
(0x2C5F, 'X'),
(0x2C60, 'M', u'ⱡ'),
(0x2C61, 'V'),
(0x2C62, 'M', u'ɫ'),
(0x2C63, 'M', u'ᵽ'),
(0x2C64, 'M', u'ɽ'),
(0x2C65, 'V'),
(0x2C67, 'M', u'ⱨ'),
(0x2C68, 'V'),
(0x2C69, 'M', u'ⱪ'),
(0x2C6A, 'V'),
(0x2C6B, 'M', u'ⱬ'),
(0x2C6C, 'V'),
(0x2C6D, 'M', u'ɑ'),
(0x2C6E, 'M', u'ɱ'),
(0x2C6F, 'M', u'ɐ'),
(0x2C70, 'M', u'ɒ'),
(0x2C71, 'V'),
(0x2C72, 'M', u'ⱳ'),
(0x2C73, 'V'),
(0x2C75, 'M', u'ⱶ'),
(0x2C76, 'V'),
(0x2C7C, 'M', u'j'),
(0x2C7D, 'M', u'v'),
(0x2C7E, 'M', u'ȿ'),
(0x2C7F, 'M', u'ɀ'),
(0x2C80, 'M', u'ⲁ'),
(0x2C81, 'V'),
(0x2C82, 'M', u'ⲃ'),
(0x2C83, 'V'),
(0x2C84, 'M', u'ⲅ'),
(0x2C85, 'V'),
(0x2C86, 'M', u'ⲇ'),
(0x2C87, 'V'),
(0x2C88, 'M', u'ⲉ'),
(0x2C89, 'V'),
(0x2C8A, 'M', u'ⲋ'),
(0x2C8B, 'V'),
(0x2C8C, 'M', u'ⲍ'),
(0x2C8D, 'V'),
(0x2C8E, 'M', u'ⲏ'),
(0x2C8F, 'V'),
(0x2C90, 'M', u'ⲑ'),
(0x2C91, 'V'),
(0x2C92, 'M', u'ⲓ'),
(0x2C93, 'V'),
(0x2C94, 'M', u'ⲕ'),
(0x2C95, 'V'),
(0x2C96, 'M', u'ⲗ'),
(0x2C97, 'V'),
(0x2C98, 'M', u'ⲙ'),
(0x2C99, 'V'),
(0x2C9A, 'M', u'ⲛ'),
(0x2C9B, 'V'),
(0x2C9C, 'M', u'ⲝ'),
(0x2C9D, 'V'),
(0x2C9E, 'M', u'ⲟ'),
(0x2C9F, 'V'),
(0x2CA0, 'M', u'ⲡ'),
(0x2CA1, 'V'),
(0x2CA2, 'M', u'ⲣ'),
(0x2CA3, 'V'),
(0x2CA4, 'M', u'ⲥ'),
(0x2CA5, 'V'),
(0x2CA6, 'M', u'ⲧ'),
(0x2CA7, 'V'),
(0x2CA8, 'M', u'ⲩ'),
(0x2CA9, 'V'),
(0x2CAA, 'M', u'ⲫ'),
(0x2CAB, 'V'),
(0x2CAC, 'M', u'ⲭ'),
(0x2CAD, 'V'),
(0x2CAE, 'M', u'ⲯ'),
(0x2CAF, 'V'),
(0x2CB0, 'M', u'ⲱ'),
(0x2CB1, 'V'),
(0x2CB2, 'M', u'ⲳ'),
(0x2CB3, 'V'),
(0x2CB4, 'M', u'ⲵ'),
(0x2CB5, 'V'),
(0x2CB6, 'M', u'ⲷ'),
(0x2CB7, 'V'),
(0x2CB8, 'M', u'ⲹ'),
(0x2CB9, 'V'),
(0x2CBA, 'M', u'ⲻ'),
(0x2CBB, 'V'),
(0x2CBC, 'M', u'ⲽ'),
(0x2CBD, 'V'),
(0x2CBE, 'M', u'ⲿ'),
]
def _seg_26():
return [
(0x2CBF, 'V'),
(0x2CC0, 'M', u'ⳁ'),
(0x2CC1, 'V'),
(0x2CC2, 'M', u'ⳃ'),
(0x2CC3, 'V'),
(0x2CC4, 'M', u'ⳅ'),
(0x2CC5, 'V'),
(0x2CC6, 'M', u'ⳇ'),
(0x2CC7, 'V'),
(0x2CC8, 'M', u'ⳉ'),
(0x2CC9, 'V'),
(0x2CCA, 'M', u'ⳋ'),
(0x2CCB, 'V'),
(0x2CCC, 'M', u'ⳍ'),
(0x2CCD, 'V'),
(0x2CCE, 'M', u'ⳏ'),
(0x2CCF, 'V'),
(0x2CD0, 'M', u'ⳑ'),
(0x2CD1, 'V'),
(0x2CD2, 'M', u'ⳓ'),
(0x2CD3, 'V'),
(0x2CD4, 'M', u'ⳕ'),
(0x2CD5, 'V'),
(0x2CD6, 'M', u'ⳗ'),
(0x2CD7, 'V'),
(0x2CD8, 'M', u'ⳙ'),
(0x2CD9, 'V'),
(0x2CDA, 'M', u'ⳛ'),
(0x2CDB, 'V'),
(0x2CDC, 'M', u'ⳝ'),
(0x2CDD, 'V'),
(0x2CDE, 'M', u'ⳟ'),
(0x2CDF, 'V'),
(0x2CE0, 'M', u'ⳡ'),
(0x2CE1, 'V'),
(0x2CE2, 'M', u'ⳣ'),
(0x2CE3, 'V'),
(0x2CEB, 'M', u'ⳬ'),
(0x2CEC, 'V'),
(0x2CED, 'M', u'ⳮ'),
(0x2CEE, 'V'),
(0x2CF2, 'M', u'ⳳ'),
(0x2CF3, 'V'),
(0x2CF4, 'X'),
(0x2CF9, 'V'),
(0x2D26, 'X'),
(0x2D27, 'V'),
(0x2D28, 'X'),
(0x2D2D, 'V'),
(0x2D2E, 'X'),
(0x2D30, 'V'),
(0x2D68, 'X'),
(0x2D6F, 'M', u'ⵡ'),
(0x2D70, 'V'),
(0x2D71, 'X'),
(0x2D7F, 'V'),
(0x2D97, 'X'),
(0x2DA0, 'V'),
(0x2DA7, 'X'),
(0x2DA8, 'V'),
(0x2DAF, 'X'),
(0x2DB0, 'V'),
(0x2DB7, 'X'),
(0x2DB8, 'V'),
(0x2DBF, 'X'),
(0x2DC0, 'V'),
(0x2DC7, 'X'),
(0x2DC8, 'V'),
(0x2DCF, 'X'),
(0x2DD0, 'V'),
(0x2DD7, 'X'),
(0x2DD8, 'V'),
(0x2DDF, 'X'),
(0x2DE0, 'V'),
(0x2E53, 'X'),
(0x2E80, 'V'),
(0x2E9A, 'X'),
(0x2E9B, 'V'),
(0x2E9F, 'M', u'母'),
(0x2EA0, 'V'),
(0x2EF3, 'M', u'龟'),
(0x2EF4, 'X'),
(0x2F00, 'M', u'一'),
(0x2F01, 'M', u'丨'),
(0x2F02, 'M', u'丶'),
(0x2F03, 'M', u'丿'),
(0x2F04, 'M', u'乙'),
(0x2F05, 'M', u'亅'),
(0x2F06, 'M', u'二'),
(0x2F07, 'M', u'亠'),
(0x2F08, 'M', u'人'),
(0x2F09, 'M', u'儿'),
(0x2F0A, 'M', u'入'),
(0x2F0B, 'M', u'八'),
(0x2F0C, 'M', u'冂'),
(0x2F0D, 'M', u'冖'),
(0x2F0E, 'M', u'冫'),
(0x2F0F, 'M', u'几'),
(0x2F10, 'M', u'凵'),
(0x2F11, 'M', u'刀'),
]
def _seg_27():
return [
(0x2F12, 'M', u'力'),
(0x2F13, 'M', u'勹'),
(0x2F14, 'M', u'匕'),
(0x2F15, 'M', u'匚'),
(0x2F16, 'M', u'匸'),
(0x2F17, 'M', u'十'),
(0x2F18, 'M', u'卜'),
(0x2F19, 'M', u'卩'),
(0x2F1A, 'M', u'厂'),
(0x2F1B, 'M', u'厶'),
(0x2F1C, 'M', u'又'),
(0x2F1D, 'M', u'口'),
(0x2F1E, 'M', u'囗'),
(0x2F1F, 'M', u'土'),
(0x2F20, 'M', u'士'),
(0x2F21, 'M', u'夂'),
(0x2F22, 'M', u'夊'),
(0x2F23, 'M', u'夕'),
(0x2F24, 'M', u'大'),
(0x2F25, 'M', u'女'),
(0x2F26, 'M', u'子'),
(0x2F27, 'M', u'宀'),
(0x2F28, 'M', u'寸'),
(0x2F29, 'M', u'小'),
(0x2F2A, 'M', u'尢'),
(0x2F2B, 'M', u'尸'),
(0x2F2C, 'M', u'屮'),
(0x2F2D, 'M', u'山'),
(0x2F2E, 'M', u'巛'),
(0x2F2F, 'M', u'工'),
(0x2F30, 'M', u'己'),
(0x2F31, 'M', u'巾'),
(0x2F32, 'M', u'干'),
(0x2F33, 'M', u'幺'),
(0x2F34, 'M', u'广'),
(0x2F35, 'M', u'廴'),
(0x2F36, 'M', u'廾'),
(0x2F37, 'M', u'弋'),
(0x2F38, 'M', u'弓'),
(0x2F39, 'M', u'彐'),
(0x2F3A, 'M', u'彡'),
(0x2F3B, 'M', u'彳'),
(0x2F3C, 'M', u'心'),
(0x2F3D, 'M', u'戈'),
(0x2F3E, 'M', u'戶'),
(0x2F3F, 'M', u'手'),
(0x2F40, 'M', u'支'),
(0x2F41, 'M', u'攴'),
(0x2F42, 'M', u'文'),
(0x2F43, 'M', u'斗'),
(0x2F44, 'M', u'斤'),
(0x2F45, 'M', u'方'),
(0x2F46, 'M', u'无'),
(0x2F47, 'M', u'日'),
(0x2F48, 'M', u'曰'),
(0x2F49, 'M', u'月'),
(0x2F4A, 'M', u'木'),
(0x2F4B, 'M', u'欠'),
(0x2F4C, 'M', u'止'),
(0x2F4D, 'M', u'歹'),
(0x2F4E, 'M', u'殳'),
(0x2F4F, 'M', u'毋'),
(0x2F50, 'M', u'比'),
(0x2F51, 'M', u'毛'),
(0x2F52, 'M', u'氏'),
(0x2F53, 'M', u'气'),
(0x2F54, 'M', u'水'),
(0x2F55, 'M', u'火'),
(0x2F56, 'M', u'爪'),
(0x2F57, 'M', u'父'),
(0x2F58, 'M', u'爻'),
(0x2F59, 'M', u'爿'),
(0x2F5A, 'M', u'片'),
(0x2F5B, 'M', u'牙'),
(0x2F5C, 'M', u'牛'),
(0x2F5D, 'M', u'犬'),
(0x2F5E, 'M', u'玄'),
(0x2F5F, 'M', u'玉'),
(0x2F60, 'M', u'瓜'),
(0x2F61, 'M', u'瓦'),
(0x2F62, 'M', u'甘'),
(0x2F63, 'M', u'生'),
(0x2F64, 'M', u'用'),
(0x2F65, 'M', u'田'),
(0x2F66, 'M', u'疋'),
(0x2F67, 'M', u'疒'),
(0x2F68, 'M', u'癶'),
(0x2F69, 'M', u'白'),
(0x2F6A, 'M', u'皮'),
(0x2F6B, 'M', u'皿'),
(0x2F6C, 'M', u'目'),
(0x2F6D, 'M', u'矛'),
(0x2F6E, 'M', u'矢'),
(0x2F6F, 'M', u'石'),
(0x2F70, 'M', u'示'),
(0x2F71, 'M', u'禸'),
(0x2F72, 'M', u'禾'),
(0x2F73, 'M', u'穴'),
(0x2F74, 'M', u'立'),
(0x2F75, 'M', u'竹'),
]
def _seg_28():
return [
(0x2F76, 'M', u'米'),
(0x2F77, 'M', u'糸'),
(0x2F78, 'M', u'缶'),
(0x2F79, 'M', u'网'),
(0x2F7A, 'M', u'羊'),
(0x2F7B, 'M', u'羽'),
(0x2F7C, 'M', u'老'),
(0x2F7D, 'M', u'而'),
(0x2F7E, 'M', u'耒'),
(0x2F7F, 'M', u'耳'),
(0x2F80, 'M', u'聿'),
(0x2F81, 'M', u'肉'),
(0x2F82, 'M', u'臣'),
(0x2F83, 'M', u'自'),
(0x2F84, 'M', u'至'),
(0x2F85, 'M', u'臼'),
(0x2F86, 'M', u'舌'),
(0x2F87, 'M', u'舛'),
(0x2F88, 'M', u'舟'),
(0x2F89, 'M', u'艮'),
(0x2F8A, 'M', u'色'),
(0x2F8B, 'M', u'艸'),
(0x2F8C, 'M', u'虍'),
(0x2F8D, 'M', u'虫'),
(0x2F8E, 'M', u'血'),
(0x2F8F, 'M', u'行'),
(0x2F90, 'M', u'衣'),
(0x2F91, 'M', u'襾'),
(0x2F92, 'M', u'見'),
(0x2F93, 'M', u'角'),
(0x2F94, 'M', u'言'),
(0x2F95, 'M', u'谷'),
(0x2F96, 'M', u'豆'),
(0x2F97, 'M', u'豕'),
(0x2F98, 'M', u'豸'),
(0x2F99, 'M', u'貝'),
(0x2F9A, 'M', u'赤'),
(0x2F9B, 'M', u'走'),
(0x2F9C, 'M', u'足'),
(0x2F9D, 'M', u'身'),
(0x2F9E, 'M', u'車'),
(0x2F9F, 'M', u'辛'),
(0x2FA0, 'M', u'辰'),
(0x2FA1, 'M', u'辵'),
(0x2FA2, 'M', u'邑'),
(0x2FA3, 'M', u'酉'),
(0x2FA4, 'M', u'釆'),
(0x2FA5, 'M', u'里'),
(0x2FA6, 'M', u'金'),
(0x2FA7, 'M', u'長'),
(0x2FA8, 'M', u'門'),
(0x2FA9, 'M', u'阜'),
(0x2FAA, 'M', u'隶'),
(0x2FAB, 'M', u'隹'),
(0x2FAC, 'M', u'雨'),
(0x2FAD, 'M', u'靑'),
(0x2FAE, 'M', u'非'),
(0x2FAF, 'M', u'面'),
(0x2FB0, 'M', u'革'),
(0x2FB1, 'M', u'韋'),
(0x2FB2, 'M', u'韭'),
(0x2FB3, 'M', u'音'),
(0x2FB4, 'M', u'頁'),
(0x2FB5, 'M', u'風'),
(0x2FB6, 'M', u'飛'),
(0x2FB7, 'M', u'食'),
(0x2FB8, 'M', u'首'),
(0x2FB9, 'M', u'香'),
(0x2FBA, 'M', u'馬'),
(0x2FBB, 'M', u'骨'),
(0x2FBC, 'M', u'高'),
(0x2FBD, 'M', u'髟'),
(0x2FBE, 'M', u'鬥'),
(0x2FBF, 'M', u'鬯'),
(0x2FC0, 'M', u'鬲'),
(0x2FC1, 'M', u'鬼'),
(0x2FC2, 'M', u'魚'),
(0x2FC3, 'M', u'鳥'),
(0x2FC4, 'M', u'鹵'),
(0x2FC5, 'M', u'鹿'),
(0x2FC6, 'M', u'麥'),
(0x2FC7, 'M', u'麻'),
(0x2FC8, 'M', u'黃'),
(0x2FC9, 'M', u'黍'),
(0x2FCA, 'M', u'黑'),
(0x2FCB, 'M', u'黹'),
(0x2FCC, 'M', u'黽'),
(0x2FCD, 'M', u'鼎'),
(0x2FCE, 'M', u'鼓'),
(0x2FCF, 'M', u'鼠'),
(0x2FD0, 'M', u'鼻'),
(0x2FD1, 'M', u'齊'),
(0x2FD2, 'M', u'齒'),
(0x2FD3, 'M', u'龍'),
(0x2FD4, 'M', u'龜'),
(0x2FD5, 'M', u'龠'),
(0x2FD6, 'X'),
(0x3000, '3', u' '),
(0x3001, 'V'),
(0x3002, 'M', u'.'),
]
def _seg_29():
return [
(0x3003, 'V'),
(0x3036, 'M', u'〒'),
(0x3037, 'V'),
(0x3038, 'M', u'十'),
(0x3039, 'M', u'卄'),
(0x303A, 'M', u'卅'),
(0x303B, 'V'),
(0x3040, 'X'),
(0x3041, 'V'),
(0x3097, 'X'),
(0x3099, 'V'),
(0x309B, '3', u' ゙'),
(0x309C, '3', u' ゚'),
(0x309D, 'V'),
(0x309F, 'M', u'より'),
(0x30A0, 'V'),
(0x30FF, 'M', u'コト'),
(0x3100, 'X'),
(0x3105, 'V'),
(0x3130, 'X'),
(0x3131, 'M', u'ᄀ'),
(0x3132, 'M', u'ᄁ'),
(0x3133, 'M', u'ᆪ'),
(0x3134, 'M', u'ᄂ'),
(0x3135, 'M', u'ᆬ'),
(0x3136, 'M', u'ᆭ'),
(0x3137, 'M', u'ᄃ'),
(0x3138, 'M', u'ᄄ'),
(0x3139, 'M', u'ᄅ'),
(0x313A, 'M', u'ᆰ'),
(0x313B, 'M', u'ᆱ'),
(0x313C, 'M', u'ᆲ'),
(0x313D, 'M', u'ᆳ'),
(0x313E, 'M', u'ᆴ'),
(0x313F, 'M', u'ᆵ'),
(0x3140, 'M', u'ᄚ'),
(0x3141, 'M', u'ᄆ'),
(0x3142, 'M', u'ᄇ'),
(0x3143, 'M', u'ᄈ'),
(0x3144, 'M', u'ᄡ'),
(0x3145, 'M', u'ᄉ'),
(0x3146, 'M', u'ᄊ'),
(0x3147, 'M', u'ᄋ'),
(0x3148, 'M', u'ᄌ'),
(0x3149, 'M', u'ᄍ'),
(0x314A, 'M', u'ᄎ'),
(0x314B, 'M', u'ᄏ'),
(0x314C, 'M', u'ᄐ'),
(0x314D, 'M', u'ᄑ'),
(0x314E, 'M', u'ᄒ'),
(0x314F, 'M', u'ᅡ'),
(0x3150, 'M', u'ᅢ'),
(0x3151, 'M', u'ᅣ'),
(0x3152, 'M', u'ᅤ'),
(0x3153, 'M', u'ᅥ'),
(0x3154, 'M', u'ᅦ'),
(0x3155, 'M', u'ᅧ'),
(0x3156, 'M', u'ᅨ'),
(0x3157, 'M', u'ᅩ'),
(0x3158, 'M', u'ᅪ'),
(0x3159, 'M', u'ᅫ'),
(0x315A, 'M', u'ᅬ'),
(0x315B, 'M', u'ᅭ'),
(0x315C, 'M', u'ᅮ'),
(0x315D, 'M', u'ᅯ'),
(0x315E, 'M', u'ᅰ'),
(0x315F, 'M', u'ᅱ'),
(0x3160, 'M', u'ᅲ'),
(0x3161, 'M', u'ᅳ'),
(0x3162, 'M', u'ᅴ'),
(0x3163, 'M', u'ᅵ'),
(0x3164, 'X'),
(0x3165, 'M', u'ᄔ'),
(0x3166, 'M', u'ᄕ'),
(0x3167, 'M', u'ᇇ'),
(0x3168, 'M', u'ᇈ'),
(0x3169, 'M', u'ᇌ'),
(0x316A, 'M', u'ᇎ'),
(0x316B, 'M', u'ᇓ'),
(0x316C, 'M', u'ᇗ'),
(0x316D, 'M', u'ᇙ'),
(0x316E, 'M', u'ᄜ'),
(0x316F, 'M', u'ᇝ'),
(0x3170, 'M', u'ᇟ'),
(0x3171, 'M', u'ᄝ'),
(0x3172, 'M', u'ᄞ'),
(0x3173, 'M', u'ᄠ'),
(0x3174, 'M', u'ᄢ'),
(0x3175, 'M', u'ᄣ'),
(0x3176, 'M', u'ᄧ'),
(0x3177, 'M', u'ᄩ'),
(0x3178, 'M', u'ᄫ'),
(0x3179, 'M', u'ᄬ'),
(0x317A, 'M', u'ᄭ'),
(0x317B, 'M', u'ᄮ'),
(0x317C, 'M', u'ᄯ'),
(0x317D, 'M', u'ᄲ'),
(0x317E, 'M', u'ᄶ'),
(0x317F, 'M', u'ᅀ'),
(0x3180, 'M', u'ᅇ'),
]
def _seg_30():
return [
(0x3181, 'M', u'ᅌ'),
(0x3182, 'M', u'ᇱ'),
(0x3183, 'M', u'ᇲ'),
(0x3184, 'M', u'ᅗ'),
(0x3185, 'M', u'ᅘ'),
(0x3186, 'M', u'ᅙ'),
(0x3187, 'M', u'ᆄ'),
(0x3188, 'M', u'ᆅ'),
(0x3189, 'M', u'ᆈ'),
(0x318A, 'M', u'ᆑ'),
(0x318B, 'M', u'ᆒ'),
(0x318C, 'M', u'ᆔ'),
(0x318D, 'M', u'ᆞ'),
(0x318E, 'M', u'ᆡ'),
(0x318F, 'X'),
(0x3190, 'V'),
(0x3192, 'M', u'一'),
(0x3193, 'M', u'二'),
(0x3194, 'M', u'三'),
(0x3195, 'M', u'四'),
(0x3196, 'M', u'上'),
(0x3197, 'M', u'中'),
(0x3198, 'M', u'下'),
(0x3199, 'M', u'甲'),
(0x319A, 'M', u'乙'),
(0x319B, 'M', u'丙'),
(0x319C, 'M', u'丁'),
(0x319D, 'M', u'天'),
(0x319E, 'M', u'地'),
(0x319F, 'M', u'人'),
(0x31A0, 'V'),
(0x31E4, 'X'),
(0x31F0, 'V'),
(0x3200, '3', u'(ᄀ)'),
(0x3201, '3', u'(ᄂ)'),
(0x3202, '3', u'(ᄃ)'),
(0x3203, '3', u'(ᄅ)'),
(0x3204, '3', u'(ᄆ)'),
(0x3205, '3', u'(ᄇ)'),
(0x3206, '3', u'(ᄉ)'),
(0x3207, '3', u'(ᄋ)'),
(0x3208, '3', u'(ᄌ)'),
(0x3209, '3', u'(ᄎ)'),
(0x320A, '3', u'(ᄏ)'),
(0x320B, '3', u'(ᄐ)'),
(0x320C, '3', u'(ᄑ)'),
(0x320D, '3', u'(ᄒ)'),
(0x320E, '3', u'(가)'),
(0x320F, '3', u'(나)'),
(0x3210, '3', u'(다)'),
(0x3211, '3', u'(라)'),
(0x3212, '3', u'(마)'),
(0x3213, '3', u'(바)'),
(0x3214, '3', u'(사)'),
(0x3215, '3', u'(아)'),
(0x3216, '3', u'(자)'),
(0x3217, '3', u'(차)'),
(0x3218, '3', u'(카)'),
(0x3219, '3', u'(타)'),
(0x321A, '3', u'(파)'),
(0x321B, '3', u'(하)'),
(0x321C, '3', u'(주)'),
(0x321D, '3', u'(오전)'),
(0x321E, '3', u'(오후)'),
(0x321F, 'X'),
(0x3220, '3', u'(一)'),
(0x3221, '3', u'(二)'),
(0x3222, '3', u'(三)'),
(0x3223, '3', u'(四)'),
(0x3224, '3', u'(五)'),
(0x3225, '3', u'(六)'),
(0x3226, '3', u'(七)'),
(0x3227, '3', u'(八)'),
(0x3228, '3', u'(九)'),
(0x3229, '3', u'(十)'),
(0x322A, '3', u'(月)'),
(0x322B, '3', u'(火)'),
(0x322C, '3', u'(水)'),
(0x322D, '3', u'(木)'),
(0x322E, '3', u'(金)'),
(0x322F, '3', u'(土)'),
(0x3230, '3', u'(日)'),
(0x3231, '3', u'(株)'),
(0x3232, '3', u'(有)'),
(0x3233, '3', u'(社)'),
(0x3234, '3', u'(名)'),
(0x3235, '3', u'(特)'),
(0x3236, '3', u'(財)'),
(0x3237, '3', u'(祝)'),
(0x3238, '3', u'(労)'),
(0x3239, '3', u'(代)'),
(0x323A, '3', u'(呼)'),
(0x323B, '3', u'(学)'),
(0x323C, '3', u'(監)'),
(0x323D, '3', u'(企)'),
(0x323E, '3', u'(資)'),
(0x323F, '3', u'(協)'),
(0x3240, '3', u'(祭)'),
(0x3241, '3', u'(休)'),
(0x3242, '3', u'(自)'),
]
def _seg_31():
return [
(0x3243, '3', u'(至)'),
(0x3244, 'M', u'問'),
(0x3245, 'M', u'幼'),
(0x3246, 'M', u'文'),
(0x3247, 'M', u'箏'),
(0x3248, 'V'),
(0x3250, 'M', u'pte'),
(0x3251, 'M', u'21'),
(0x3252, 'M', u'22'),
(0x3253, 'M', u'23'),
(0x3254, 'M', u'24'),
(0x3255, 'M', u'25'),
(0x3256, 'M', u'26'),
(0x3257, 'M', u'27'),
(0x3258, 'M', u'28'),
(0x3259, 'M', u'29'),
(0x325A, 'M', u'30'),
(0x325B, 'M', u'31'),
(0x325C, 'M', u'32'),
(0x325D, 'M', u'33'),
(0x325E, 'M', u'34'),
(0x325F, 'M', u'35'),
(0x3260, 'M', u'ᄀ'),
(0x3261, 'M', u'ᄂ'),
(0x3262, 'M', u'ᄃ'),
(0x3263, 'M', u'ᄅ'),
(0x3264, 'M', u'ᄆ'),
(0x3265, 'M', u'ᄇ'),
(0x3266, 'M', u'ᄉ'),
(0x3267, 'M', u'ᄋ'),
(0x3268, 'M', u'ᄌ'),
(0x3269, 'M', u'ᄎ'),
(0x326A, 'M', u'ᄏ'),
(0x326B, 'M', u'ᄐ'),
(0x326C, 'M', u'ᄑ'),
(0x326D, 'M', u'ᄒ'),
(0x326E, 'M', u'가'),
(0x326F, 'M', u'나'),
(0x3270, 'M', u'다'),
(0x3271, 'M', u'라'),
(0x3272, 'M', u'마'),
(0x3273, 'M', u'바'),
(0x3274, 'M', u'사'),
(0x3275, 'M', u'아'),
(0x3276, 'M', u'자'),
(0x3277, 'M', u'차'),
(0x3278, 'M', u'카'),
(0x3279, 'M', u'타'),
(0x327A, 'M', u'파'),
(0x327B, 'M', u'하'),
(0x327C, 'M', u'참고'),
(0x327D, 'M', u'주의'),
(0x327E, 'M', u'우'),
(0x327F, 'V'),
(0x3280, 'M', u'一'),
(0x3281, 'M', u'二'),
(0x3282, 'M', u'三'),
(0x3283, 'M', u'四'),
(0x3284, 'M', u'五'),
(0x3285, 'M', u'六'),
(0x3286, 'M', u'七'),
(0x3287, 'M', u'八'),
(0x3288, 'M', u'九'),
(0x3289, 'M', u'十'),
(0x328A, 'M', u'月'),
(0x328B, 'M', u'火'),
(0x328C, 'M', u'水'),
(0x328D, 'M', u'木'),
(0x328E, 'M', u'金'),
(0x328F, 'M', u'土'),
(0x3290, 'M', u'日'),
(0x3291, 'M', u'株'),
(0x3292, 'M', u'有'),
(0x3293, 'M', u'社'),
(0x3294, 'M', u'名'),
(0x3295, 'M', u'特'),
(0x3296, 'M', u'財'),
(0x3297, 'M', u'祝'),
(0x3298, 'M', u'労'),
(0x3299, 'M', u'秘'),
(0x329A, 'M', u'男'),
(0x329B, 'M', u'女'),
(0x329C, 'M', u'適'),
(0x329D, 'M', u'優'),
(0x329E, 'M', u'印'),
(0x329F, 'M', u'注'),
(0x32A0, 'M', u'項'),
(0x32A1, 'M', u'休'),
(0x32A2, 'M', u'写'),
(0x32A3, 'M', u'正'),
(0x32A4, 'M', u'上'),
(0x32A5, 'M', u'中'),
(0x32A6, 'M', u'下'),
(0x32A7, 'M', u'左'),
(0x32A8, 'M', u'右'),
(0x32A9, 'M', u'医'),
(0x32AA, 'M', u'宗'),
(0x32AB, 'M', u'学'),
(0x32AC, 'M', u'監'),
(0x32AD, 'M', u'企'),
]
def _seg_32():
return [
(0x32AE, 'M', u'資'),
(0x32AF, 'M', u'協'),
(0x32B0, 'M', u'夜'),
(0x32B1, 'M', u'36'),
(0x32B2, 'M', u'37'),
(0x32B3, 'M', u'38'),
(0x32B4, 'M', u'39'),
(0x32B5, 'M', u'40'),
(0x32B6, 'M', u'41'),
(0x32B7, 'M', u'42'),
(0x32B8, 'M', u'43'),
(0x32B9, 'M', u'44'),
(0x32BA, 'M', u'45'),
(0x32BB, 'M', u'46'),
(0x32BC, 'M', u'47'),
(0x32BD, 'M', u'48'),
(0x32BE, 'M', u'49'),
(0x32BF, 'M', u'50'),
(0x32C0, 'M', u'1月'),
(0x32C1, 'M', u'2月'),
(0x32C2, 'M', u'3月'),
(0x32C3, 'M', u'4月'),
(0x32C4, 'M', u'5月'),
(0x32C5, 'M', u'6月'),
(0x32C6, 'M', u'7月'),
(0x32C7, 'M', u'8月'),
(0x32C8, 'M', u'9月'),
(0x32C9, 'M', u'10月'),
(0x32CA, 'M', u'11月'),
(0x32CB, 'M', u'12月'),
(0x32CC, 'M', u'hg'),
(0x32CD, 'M', u'erg'),
(0x32CE, 'M', u'ev'),
(0x32CF, 'M', u'ltd'),
(0x32D0, 'M', u'ア'),
(0x32D1, 'M', u'イ'),
(0x32D2, 'M', u'ウ'),
(0x32D3, 'M', u'エ'),
(0x32D4, 'M', u'オ'),
(0x32D5, 'M', u'カ'),
(0x32D6, 'M', u'キ'),
(0x32D7, 'M', u'ク'),
(0x32D8, 'M', u'ケ'),
(0x32D9, 'M', u'コ'),
(0x32DA, 'M', u'サ'),
(0x32DB, 'M', u'シ'),
(0x32DC, 'M', u'ス'),
(0x32DD, 'M', u'セ'),
(0x32DE, 'M', u'ソ'),
(0x32DF, 'M', u'タ'),
(0x32E0, 'M', u'チ'),
(0x32E1, 'M', u'ツ'),
(0x32E2, 'M', u'テ'),
(0x32E3, 'M', u'ト'),
(0x32E4, 'M', u'ナ'),
(0x32E5, 'M', u'ニ'),
(0x32E6, 'M', u'ヌ'),
(0x32E7, 'M', u'ネ'),
(0x32E8, 'M', u'ノ'),
(0x32E9, 'M', u'ハ'),
(0x32EA, 'M', u'ヒ'),
(0x32EB, 'M', u'フ'),
(0x32EC, 'M', u'ヘ'),
(0x32ED, 'M', u'ホ'),
(0x32EE, 'M', u'マ'),
(0x32EF, 'M', u'ミ'),
(0x32F0, 'M', u'ム'),
(0x32F1, 'M', u'メ'),
(0x32F2, 'M', u'モ'),
(0x32F3, 'M', u'ヤ'),
(0x32F4, 'M', u'ユ'),
(0x32F5, 'M', u'ヨ'),
(0x32F6, 'M', u'ラ'),
(0x32F7, 'M', u'リ'),
(0x32F8, 'M', u'ル'),
(0x32F9, 'M', u'レ'),
(0x32FA, 'M', u'ロ'),
(0x32FB, 'M', u'ワ'),
(0x32FC, 'M', u'ヰ'),
(0x32FD, 'M', u'ヱ'),
(0x32FE, 'M', u'ヲ'),
(0x32FF, 'M', u'令和'),
(0x3300, 'M', u'アパート'),
(0x3301, 'M', u'アルファ'),
(0x3302, 'M', u'アンペア'),
(0x3303, 'M', u'アール'),
(0x3304, 'M', u'イニング'),
(0x3305, 'M', u'インチ'),
(0x3306, 'M', u'ウォン'),
(0x3307, 'M', u'エスクード'),
(0x3308, 'M', u'エーカー'),
(0x3309, 'M', u'オンス'),
(0x330A, 'M', u'オーム'),
(0x330B, 'M', u'カイリ'),
(0x330C, 'M', u'カラット'),
(0x330D, 'M', u'カロリー'),
(0x330E, 'M', u'ガロン'),
(0x330F, 'M', u'ガンマ'),
(0x3310, 'M', u'ギガ'),
(0x3311, 'M', u'ギニー'),
]
def _seg_33():
return [
(0x3312, 'M', u'キュリー'),
(0x3313, 'M', u'ギルダー'),
(0x3314, 'M', u'キロ'),
(0x3315, 'M', u'キログラム'),
(0x3316, 'M', u'キロメートル'),
(0x3317, 'M', u'キロワット'),
(0x3318, 'M', u'グラム'),
(0x3319, 'M', u'グラムトン'),
(0x331A, 'M', u'クルゼイロ'),
(0x331B, 'M', u'クローネ'),
(0x331C, 'M', u'ケース'),
(0x331D, 'M', u'コルナ'),
(0x331E, 'M', u'コーポ'),
(0x331F, 'M', u'サイクル'),
(0x3320, 'M', u'サンチーム'),
(0x3321, 'M', u'シリング'),
(0x3322, 'M', u'センチ'),
(0x3323, 'M', u'セント'),
(0x3324, 'M', u'ダース'),
(0x3325, 'M', u'デシ'),
(0x3326, 'M', u'ドル'),
(0x3327, 'M', u'トン'),
(0x3328, 'M', u'ナノ'),
(0x3329, 'M', u'ノット'),
(0x332A, 'M', u'ハイツ'),
(0x332B, 'M', u'パーセント'),
(0x332C, 'M', u'パーツ'),
(0x332D, 'M', u'バーレル'),
(0x332E, 'M', u'ピアストル'),
(0x332F, 'M', u'ピクル'),
(0x3330, 'M', u'ピコ'),
(0x3331, 'M', u'ビル'),
(0x3332, 'M', u'ファラッド'),
(0x3333, 'M', u'フィート'),
(0x3334, 'M', u'ブッシェル'),
(0x3335, 'M', u'フラン'),
(0x3336, 'M', u'ヘクタール'),
(0x3337, 'M', u'ペソ'),
(0x3338, 'M', u'ペニヒ'),
(0x3339, 'M', u'ヘルツ'),
(0x333A, 'M', u'ペンス'),
(0x333B, 'M', u'ページ'),
(0x333C, 'M', u'ベータ'),
(0x333D, 'M', u'ポイント'),
(0x333E, 'M', u'ボルト'),
(0x333F, 'M', u'ホン'),
(0x3340, 'M', u'ポンド'),
(0x3341, 'M', u'ホール'),
(0x3342, 'M', u'ホーン'),
(0x3343, 'M', u'マイクロ'),
(0x3344, 'M', u'マイル'),
(0x3345, 'M', u'マッハ'),
(0x3346, 'M', u'マルク'),
(0x3347, 'M', u'マンション'),
(0x3348, 'M', u'ミクロン'),
(0x3349, 'M', u'ミリ'),
(0x334A, 'M', u'ミリバール'),
(0x334B, 'M', u'メガ'),
(0x334C, 'M', u'メガトン'),
(0x334D, 'M', u'メートル'),
(0x334E, 'M', u'ヤード'),
(0x334F, 'M', u'ヤール'),
(0x3350, 'M', u'ユアン'),
(0x3351, 'M', u'リットル'),
(0x3352, 'M', u'リラ'),
(0x3353, 'M', u'ルピー'),
(0x3354, 'M', u'ルーブル'),
(0x3355, 'M', u'レム'),
(0x3356, 'M', u'レントゲン'),
(0x3357, 'M', u'ワット'),
(0x3358, 'M', u'0点'),
(0x3359, 'M', u'1点'),
(0x335A, 'M', u'2点'),
(0x335B, 'M', u'3点'),
(0x335C, 'M', u'4点'),
(0x335D, 'M', u'5点'),
(0x335E, 'M', u'6点'),
(0x335F, 'M', u'7点'),
(0x3360, 'M', u'8点'),
(0x3361, 'M', u'9点'),
(0x3362, 'M', u'10点'),
(0x3363, 'M', u'11点'),
(0x3364, 'M', u'12点'),
(0x3365, 'M', u'13点'),
(0x3366, 'M', u'14点'),
(0x3367, 'M', u'15点'),
(0x3368, 'M', u'16点'),
(0x3369, 'M', u'17点'),
(0x336A, 'M', u'18点'),
(0x336B, 'M', u'19点'),
(0x336C, 'M', u'20点'),
(0x336D, 'M', u'21点'),
(0x336E, 'M', u'22点'),
(0x336F, 'M', u'23点'),
(0x3370, 'M', u'24点'),
(0x3371, 'M', u'hpa'),
(0x3372, 'M', u'da'),
(0x3373, 'M', u'au'),
(0x3374, 'M', u'bar'),
(0x3375, 'M', u'ov'),
]
def _seg_34():
return [
(0x3376, 'M', u'pc'),
(0x3377, 'M', u'dm'),
(0x3378, 'M', u'dm2'),
(0x3379, 'M', u'dm3'),
(0x337A, 'M', u'iu'),
(0x337B, 'M', u'平成'),
(0x337C, 'M', u'昭和'),
(0x337D, 'M', u'大正'),
(0x337E, 'M', u'明治'),
(0x337F, 'M', u'株式会社'),
(0x3380, 'M', u'pa'),
(0x3381, 'M', u'na'),
(0x3382, 'M', u'μa'),
(0x3383, 'M', u'ma'),
(0x3384, 'M', u'ka'),
(0x3385, 'M', u'kb'),
(0x3386, 'M', u'mb'),
(0x3387, 'M', u'gb'),
(0x3388, 'M', u'cal'),
(0x3389, 'M', u'kcal'),
(0x338A, 'M', u'pf'),
(0x338B, 'M', u'nf'),
(0x338C, 'M', u'μf'),
(0x338D, 'M', u'μg'),
(0x338E, 'M', u'mg'),
(0x338F, 'M', u'kg'),
(0x3390, 'M', u'hz'),
(0x3391, 'M', u'khz'),
(0x3392, 'M', u'mhz'),
(0x3393, 'M', u'ghz'),
(0x3394, 'M', u'thz'),
(0x3395, 'M', u'μl'),
(0x3396, 'M', u'ml'),
(0x3397, 'M', u'dl'),
(0x3398, 'M', u'kl'),
(0x3399, 'M', u'fm'),
(0x339A, 'M', u'nm'),
(0x339B, 'M', u'μm'),
(0x339C, 'M', u'mm'),
(0x339D, 'M', u'cm'),
(0x339E, 'M', u'km'),
(0x339F, 'M', u'mm2'),
(0x33A0, 'M', u'cm2'),
(0x33A1, 'M', u'm2'),
(0x33A2, 'M', u'km2'),
(0x33A3, 'M', u'mm3'),
(0x33A4, 'M', u'cm3'),
(0x33A5, 'M', u'm3'),
(0x33A6, 'M', u'km3'),
(0x33A7, 'M', u'm∕s'),
(0x33A8, 'M', u'm∕s2'),
(0x33A9, 'M', u'pa'),
(0x33AA, 'M', u'kpa'),
(0x33AB, 'M', u'mpa'),
(0x33AC, 'M', u'gpa'),
(0x33AD, 'M', u'rad'),
(0x33AE, 'M', u'rad∕s'),
(0x33AF, 'M', u'rad∕s2'),
(0x33B0, 'M', u'ps'),
(0x33B1, 'M', u'ns'),
(0x33B2, 'M', u'μs'),
(0x33B3, 'M', u'ms'),
(0x33B4, 'M', u'pv'),
(0x33B5, 'M', u'nv'),
(0x33B6, 'M', u'μv'),
(0x33B7, 'M', u'mv'),
(0x33B8, 'M', u'kv'),
(0x33B9, 'M', u'mv'),
(0x33BA, 'M', u'pw'),
(0x33BB, 'M', u'nw'),
(0x33BC, 'M', u'μw'),
(0x33BD, 'M', u'mw'),
(0x33BE, 'M', u'kw'),
(0x33BF, 'M', u'mw'),
(0x33C0, 'M', u'kω'),
(0x33C1, 'M', u'mω'),
(0x33C2, 'X'),
(0x33C3, 'M', u'bq'),
(0x33C4, 'M', u'cc'),
(0x33C5, 'M', u'cd'),
(0x33C6, 'M', u'c∕kg'),
(0x33C7, 'X'),
(0x33C8, 'M', u'db'),
(0x33C9, 'M', u'gy'),
(0x33CA, 'M', u'ha'),
(0x33CB, 'M', u'hp'),
(0x33CC, 'M', u'in'),
(0x33CD, 'M', u'kk'),
(0x33CE, 'M', u'km'),
(0x33CF, 'M', u'kt'),
(0x33D0, 'M', u'lm'),
(0x33D1, 'M', u'ln'),
(0x33D2, 'M', u'log'),
(0x33D3, 'M', u'lx'),
(0x33D4, 'M', u'mb'),
(0x33D5, 'M', u'mil'),
(0x33D6, 'M', u'mol'),
(0x33D7, 'M', u'ph'),
(0x33D8, 'X'),
(0x33D9, 'M', u'ppm'),
]
def _seg_35():
return [
(0x33DA, 'M', u'pr'),
(0x33DB, 'M', u'sr'),
(0x33DC, 'M', u'sv'),
(0x33DD, 'M', u'wb'),
(0x33DE, 'M', u'v∕m'),
(0x33DF, 'M', u'a∕m'),
(0x33E0, 'M', u'1日'),
(0x33E1, 'M', u'2日'),
(0x33E2, 'M', u'3日'),
(0x33E3, 'M', u'4日'),
(0x33E4, 'M', u'5日'),
(0x33E5, 'M', u'6日'),
(0x33E6, 'M', u'7日'),
(0x33E7, 'M', u'8日'),
(0x33E8, 'M', u'9日'),
(0x33E9, 'M', u'10日'),
(0x33EA, 'M', u'11日'),
(0x33EB, 'M', u'12日'),
(0x33EC, 'M', u'13日'),
(0x33ED, 'M', u'14日'),
(0x33EE, 'M', u'15日'),
(0x33EF, 'M', u'16日'),
(0x33F0, 'M', u'17日'),
(0x33F1, 'M', u'18日'),
(0x33F2, 'M', u'19日'),
(0x33F3, 'M', u'20日'),
(0x33F4, 'M', u'21日'),
(0x33F5, 'M', u'22日'),
(0x33F6, 'M', u'23日'),
(0x33F7, 'M', u'24日'),
(0x33F8, 'M', u'25日'),
(0x33F9, 'M', u'26日'),
(0x33FA, 'M', u'27日'),
(0x33FB, 'M', u'28日'),
(0x33FC, 'M', u'29日'),
(0x33FD, 'M', u'30日'),
(0x33FE, 'M', u'31日'),
(0x33FF, 'M', u'gal'),
(0x3400, 'V'),
(0x9FFD, 'X'),
(0xA000, 'V'),
(0xA48D, 'X'),
(0xA490, 'V'),
(0xA4C7, 'X'),
(0xA4D0, 'V'),
(0xA62C, 'X'),
(0xA640, 'M', u'ꙁ'),
(0xA641, 'V'),
(0xA642, 'M', u'ꙃ'),
(0xA643, 'V'),
(0xA644, 'M', u'ꙅ'),
(0xA645, 'V'),
(0xA646, 'M', u'ꙇ'),
(0xA647, 'V'),
(0xA648, 'M', u'ꙉ'),
(0xA649, 'V'),
(0xA64A, 'M', u'ꙋ'),
(0xA64B, 'V'),
(0xA64C, 'M', u'ꙍ'),
(0xA64D, 'V'),
(0xA64E, 'M', u'ꙏ'),
(0xA64F, 'V'),
(0xA650, 'M', u'ꙑ'),
(0xA651, 'V'),
(0xA652, 'M', u'ꙓ'),
(0xA653, 'V'),
(0xA654, 'M', u'ꙕ'),
(0xA655, 'V'),
(0xA656, 'M', u'ꙗ'),
(0xA657, 'V'),
(0xA658, 'M', u'ꙙ'),
(0xA659, 'V'),
(0xA65A, 'M', u'ꙛ'),
(0xA65B, 'V'),
(0xA65C, 'M', u'ꙝ'),
(0xA65D, 'V'),
(0xA65E, 'M', u'ꙟ'),
(0xA65F, 'V'),
(0xA660, 'M', u'ꙡ'),
(0xA661, 'V'),
(0xA662, 'M', u'ꙣ'),
(0xA663, 'V'),
(0xA664, 'M', u'ꙥ'),
(0xA665, 'V'),
(0xA666, 'M', u'ꙧ'),
(0xA667, 'V'),
(0xA668, 'M', u'ꙩ'),
(0xA669, 'V'),
(0xA66A, 'M', u'ꙫ'),
(0xA66B, 'V'),
(0xA66C, 'M', u'ꙭ'),
(0xA66D, 'V'),
(0xA680, 'M', u'ꚁ'),
(0xA681, 'V'),
(0xA682, 'M', u'ꚃ'),
(0xA683, 'V'),
(0xA684, 'M', u'ꚅ'),
(0xA685, 'V'),
(0xA686, 'M', u'ꚇ'),
(0xA687, 'V'),
]
def _seg_36():
return [
(0xA688, 'M', u'ꚉ'),
(0xA689, 'V'),
(0xA68A, 'M', u'ꚋ'),
(0xA68B, 'V'),
(0xA68C, 'M', u'ꚍ'),
(0xA68D, 'V'),
(0xA68E, 'M', u'ꚏ'),
(0xA68F, 'V'),
(0xA690, 'M', u'ꚑ'),
(0xA691, 'V'),
(0xA692, 'M', u'ꚓ'),
(0xA693, 'V'),
(0xA694, 'M', u'ꚕ'),
(0xA695, 'V'),
(0xA696, 'M', u'ꚗ'),
(0xA697, 'V'),
(0xA698, 'M', u'ꚙ'),
(0xA699, 'V'),
(0xA69A, 'M', u'ꚛ'),
(0xA69B, 'V'),
(0xA69C, 'M', u'ъ'),
(0xA69D, 'M', u'ь'),
(0xA69E, 'V'),
(0xA6F8, 'X'),
(0xA700, 'V'),
(0xA722, 'M', u'ꜣ'),
(0xA723, 'V'),
(0xA724, 'M', u'ꜥ'),
(0xA725, 'V'),
(0xA726, 'M', u'ꜧ'),
(0xA727, 'V'),
(0xA728, 'M', u'ꜩ'),
(0xA729, 'V'),
(0xA72A, 'M', u'ꜫ'),
(0xA72B, 'V'),
(0xA72C, 'M', u'ꜭ'),
(0xA72D, 'V'),
(0xA72E, 'M', u'ꜯ'),
(0xA72F, 'V'),
(0xA732, 'M', u'ꜳ'),
(0xA733, 'V'),
(0xA734, 'M', u'ꜵ'),
(0xA735, 'V'),
(0xA736, 'M', u'ꜷ'),
(0xA737, 'V'),
(0xA738, 'M', u'ꜹ'),
(0xA739, 'V'),
(0xA73A, 'M', u'ꜻ'),
(0xA73B, 'V'),
(0xA73C, 'M', u'ꜽ'),
(0xA73D, 'V'),
(0xA73E, 'M', u'ꜿ'),
(0xA73F, 'V'),
(0xA740, 'M', u'ꝁ'),
(0xA741, 'V'),
(0xA742, 'M', u'ꝃ'),
(0xA743, 'V'),
(0xA744, 'M', u'ꝅ'),
(0xA745, 'V'),
(0xA746, 'M', u'ꝇ'),
(0xA747, 'V'),
(0xA748, 'M', u'ꝉ'),
(0xA749, 'V'),
(0xA74A, 'M', u'ꝋ'),
(0xA74B, 'V'),
(0xA74C, 'M', u'ꝍ'),
(0xA74D, 'V'),
(0xA74E, 'M', u'ꝏ'),
(0xA74F, 'V'),
(0xA750, 'M', u'ꝑ'),
(0xA751, 'V'),
(0xA752, 'M', u'ꝓ'),
(0xA753, 'V'),
(0xA754, 'M', u'ꝕ'),
(0xA755, 'V'),
(0xA756, 'M', u'ꝗ'),
(0xA757, 'V'),
(0xA758, 'M', u'ꝙ'),
(0xA759, 'V'),
(0xA75A, 'M', u'ꝛ'),
(0xA75B, 'V'),
(0xA75C, 'M', u'ꝝ'),
(0xA75D, 'V'),
(0xA75E, 'M', u'ꝟ'),
(0xA75F, 'V'),
(0xA760, 'M', u'ꝡ'),
(0xA761, 'V'),
(0xA762, 'M', u'ꝣ'),
(0xA763, 'V'),
(0xA764, 'M', u'ꝥ'),
(0xA765, 'V'),
(0xA766, 'M', u'ꝧ'),
(0xA767, 'V'),
(0xA768, 'M', u'ꝩ'),
(0xA769, 'V'),
(0xA76A, 'M', u'ꝫ'),
(0xA76B, 'V'),
(0xA76C, 'M', u'ꝭ'),
(0xA76D, 'V'),
(0xA76E, 'M', u'ꝯ'),
]
def _seg_37():
return [
(0xA76F, 'V'),
(0xA770, 'M', u'ꝯ'),
(0xA771, 'V'),
(0xA779, 'M', u'ꝺ'),
(0xA77A, 'V'),
(0xA77B, 'M', u'ꝼ'),
(0xA77C, 'V'),
(0xA77D, 'M', u'ᵹ'),
(0xA77E, 'M', u'ꝿ'),
(0xA77F, 'V'),
(0xA780, 'M', u'ꞁ'),
(0xA781, 'V'),
(0xA782, 'M', u'ꞃ'),
(0xA783, 'V'),
(0xA784, 'M', u'ꞅ'),
(0xA785, 'V'),
(0xA786, 'M', u'ꞇ'),
(0xA787, 'V'),
(0xA78B, 'M', u'ꞌ'),
(0xA78C, 'V'),
(0xA78D, 'M', u'ɥ'),
(0xA78E, 'V'),
(0xA790, 'M', u'ꞑ'),
(0xA791, 'V'),
(0xA792, 'M', u'ꞓ'),
(0xA793, 'V'),
(0xA796, 'M', u'ꞗ'),
(0xA797, 'V'),
(0xA798, 'M', u'ꞙ'),
(0xA799, 'V'),
(0xA79A, 'M', u'ꞛ'),
(0xA79B, 'V'),
(0xA79C, 'M', u'ꞝ'),
(0xA79D, 'V'),
(0xA79E, 'M', u'ꞟ'),
(0xA79F, 'V'),
(0xA7A0, 'M', u'ꞡ'),
(0xA7A1, 'V'),
(0xA7A2, 'M', u'ꞣ'),
(0xA7A3, 'V'),
(0xA7A4, 'M', u'ꞥ'),
(0xA7A5, 'V'),
(0xA7A6, 'M', u'ꞧ'),
(0xA7A7, 'V'),
(0xA7A8, 'M', u'ꞩ'),
(0xA7A9, 'V'),
(0xA7AA, 'M', u'ɦ'),
(0xA7AB, 'M', u'ɜ'),
(0xA7AC, 'M', u'ɡ'),
(0xA7AD, 'M', u'ɬ'),
(0xA7AE, 'M', u'ɪ'),
(0xA7AF, 'V'),
(0xA7B0, 'M', u'ʞ'),
(0xA7B1, 'M', u'ʇ'),
(0xA7B2, 'M', u'ʝ'),
(0xA7B3, 'M', u'ꭓ'),
(0xA7B4, 'M', u'ꞵ'),
(0xA7B5, 'V'),
(0xA7B6, 'M', u'ꞷ'),
(0xA7B7, 'V'),
(0xA7B8, 'M', u'ꞹ'),
(0xA7B9, 'V'),
(0xA7BA, 'M', u'ꞻ'),
(0xA7BB, 'V'),
(0xA7BC, 'M', u'ꞽ'),
(0xA7BD, 'V'),
(0xA7BE, 'M', u'ꞿ'),
(0xA7BF, 'V'),
(0xA7C0, 'X'),
(0xA7C2, 'M', u'ꟃ'),
(0xA7C3, 'V'),
(0xA7C4, 'M', u'ꞔ'),
(0xA7C5, 'M', u'ʂ'),
(0xA7C6, 'M', u'ᶎ'),
(0xA7C7, 'M', u'ꟈ'),
(0xA7C8, 'V'),
(0xA7C9, 'M', u'ꟊ'),
(0xA7CA, 'V'),
(0xA7CB, 'X'),
(0xA7F5, 'M', u'ꟶ'),
(0xA7F6, 'V'),
(0xA7F8, 'M', u'ħ'),
(0xA7F9, 'M', u'œ'),
(0xA7FA, 'V'),
(0xA82D, 'X'),
(0xA830, 'V'),
(0xA83A, 'X'),
(0xA840, 'V'),
(0xA878, 'X'),
(0xA880, 'V'),
(0xA8C6, 'X'),
(0xA8CE, 'V'),
(0xA8DA, 'X'),
(0xA8E0, 'V'),
(0xA954, 'X'),
(0xA95F, 'V'),
(0xA97D, 'X'),
(0xA980, 'V'),
(0xA9CE, 'X'),
(0xA9CF, 'V'),
]
def _seg_38():
return [
(0xA9DA, 'X'),
(0xA9DE, 'V'),
(0xA9FF, 'X'),
(0xAA00, 'V'),
(0xAA37, 'X'),
(0xAA40, 'V'),
(0xAA4E, 'X'),
(0xAA50, 'V'),
(0xAA5A, 'X'),
(0xAA5C, 'V'),
(0xAAC3, 'X'),
(0xAADB, 'V'),
(0xAAF7, 'X'),
(0xAB01, 'V'),
(0xAB07, 'X'),
(0xAB09, 'V'),
(0xAB0F, 'X'),
(0xAB11, 'V'),
(0xAB17, 'X'),
(0xAB20, 'V'),
(0xAB27, 'X'),
(0xAB28, 'V'),
(0xAB2F, 'X'),
(0xAB30, 'V'),
(0xAB5C, 'M', u'ꜧ'),
(0xAB5D, 'M', u'ꬷ'),
(0xAB5E, 'M', u'ɫ'),
(0xAB5F, 'M', u'ꭒ'),
(0xAB60, 'V'),
(0xAB69, 'M', u'ʍ'),
(0xAB6A, 'V'),
(0xAB6C, 'X'),
(0xAB70, 'M', u'Ꭰ'),
(0xAB71, 'M', u'Ꭱ'),
(0xAB72, 'M', u'Ꭲ'),
(0xAB73, 'M', u'Ꭳ'),
(0xAB74, 'M', u'Ꭴ'),
(0xAB75, 'M', u'Ꭵ'),
(0xAB76, 'M', u'Ꭶ'),
(0xAB77, 'M', u'Ꭷ'),
(0xAB78, 'M', u'Ꭸ'),
(0xAB79, 'M', u'Ꭹ'),
(0xAB7A, 'M', u'Ꭺ'),
(0xAB7B, 'M', u'Ꭻ'),
(0xAB7C, 'M', u'Ꭼ'),
(0xAB7D, 'M', u'Ꭽ'),
(0xAB7E, 'M', u'Ꭾ'),
(0xAB7F, 'M', u'Ꭿ'),
(0xAB80, 'M', u'Ꮀ'),
(0xAB81, 'M', u'Ꮁ'),
(0xAB82, 'M', u'Ꮂ'),
(0xAB83, 'M', u'Ꮃ'),
(0xAB84, 'M', u'Ꮄ'),
(0xAB85, 'M', u'Ꮅ'),
(0xAB86, 'M', u'Ꮆ'),
(0xAB87, 'M', u'Ꮇ'),
(0xAB88, 'M', u'Ꮈ'),
(0xAB89, 'M', u'Ꮉ'),
(0xAB8A, 'M', u'Ꮊ'),
(0xAB8B, 'M', u'Ꮋ'),
(0xAB8C, 'M', u'Ꮌ'),
(0xAB8D, 'M', u'Ꮍ'),
(0xAB8E, 'M', u'Ꮎ'),
(0xAB8F, 'M', u'Ꮏ'),
(0xAB90, 'M', u'Ꮐ'),
(0xAB91, 'M', u'Ꮑ'),
(0xAB92, 'M', u'Ꮒ'),
(0xAB93, 'M', u'Ꮓ'),
(0xAB94, 'M', u'Ꮔ'),
(0xAB95, 'M', u'Ꮕ'),
(0xAB96, 'M', u'Ꮖ'),
(0xAB97, 'M', u'Ꮗ'),
(0xAB98, 'M', u'Ꮘ'),
(0xAB99, 'M', u'Ꮙ'),
(0xAB9A, 'M', u'Ꮚ'),
(0xAB9B, 'M', u'Ꮛ'),
(0xAB9C, 'M', u'Ꮜ'),
(0xAB9D, 'M', u'Ꮝ'),
(0xAB9E, 'M', u'Ꮞ'),
(0xAB9F, 'M', u'Ꮟ'),
(0xABA0, 'M', u'Ꮠ'),
(0xABA1, 'M', u'Ꮡ'),
(0xABA2, 'M', u'Ꮢ'),
(0xABA3, 'M', u'Ꮣ'),
(0xABA4, 'M', u'Ꮤ'),
(0xABA5, 'M', u'Ꮥ'),
(0xABA6, 'M', u'Ꮦ'),
(0xABA7, 'M', u'Ꮧ'),
(0xABA8, 'M', u'Ꮨ'),
(0xABA9, 'M', u'Ꮩ'),
(0xABAA, 'M', u'Ꮪ'),
(0xABAB, 'M', u'Ꮫ'),
(0xABAC, 'M', u'Ꮬ'),
(0xABAD, 'M', u'Ꮭ'),
(0xABAE, 'M', u'Ꮮ'),
(0xABAF, 'M', u'Ꮯ'),
(0xABB0, 'M', u'Ꮰ'),
(0xABB1, 'M', u'Ꮱ'),
(0xABB2, 'M', u'Ꮲ'),
(0xABB3, 'M', u'Ꮳ'),
]
def _seg_39():
return [
(0xABB4, 'M', u'Ꮴ'),
(0xABB5, 'M', u'Ꮵ'),
(0xABB6, 'M', u'Ꮶ'),
(0xABB7, 'M', u'Ꮷ'),
(0xABB8, 'M', u'Ꮸ'),
(0xABB9, 'M', u'Ꮹ'),
(0xABBA, 'M', u'Ꮺ'),
(0xABBB, 'M', u'Ꮻ'),
(0xABBC, 'M', u'Ꮼ'),
(0xABBD, 'M', u'Ꮽ'),
(0xABBE, 'M', u'Ꮾ'),
(0xABBF, 'M', u'Ꮿ'),
(0xABC0, 'V'),
(0xABEE, 'X'),
(0xABF0, 'V'),
(0xABFA, 'X'),
(0xAC00, 'V'),
(0xD7A4, 'X'),
(0xD7B0, 'V'),
(0xD7C7, 'X'),
(0xD7CB, 'V'),
(0xD7FC, 'X'),
(0xF900, 'M', u'豈'),
(0xF901, 'M', u'更'),
(0xF902, 'M', u'車'),
(0xF903, 'M', u'賈'),
(0xF904, 'M', u'滑'),
(0xF905, 'M', u'串'),
(0xF906, 'M', u'句'),
(0xF907, 'M', u'龜'),
(0xF909, 'M', u'契'),
(0xF90A, 'M', u'金'),
(0xF90B, 'M', u'喇'),
(0xF90C, 'M', u'奈'),
(0xF90D, 'M', u'懶'),
(0xF90E, 'M', u'癩'),
(0xF90F, 'M', u'羅'),
(0xF910, 'M', u'蘿'),
(0xF911, 'M', u'螺'),
(0xF912, 'M', u'裸'),
(0xF913, 'M', u'邏'),
(0xF914, 'M', u'樂'),
(0xF915, 'M', u'洛'),
(0xF916, 'M', u'烙'),
(0xF917, 'M', u'珞'),
(0xF918, 'M', u'落'),
(0xF919, 'M', u'酪'),
(0xF91A, 'M', u'駱'),
(0xF91B, 'M', u'亂'),
(0xF91C, 'M', u'卵'),
(0xF91D, 'M', u'欄'),
(0xF91E, 'M', u'爛'),
(0xF91F, 'M', u'蘭'),
(0xF920, 'M', u'鸞'),
(0xF921, 'M', u'嵐'),
(0xF922, 'M', u'濫'),
(0xF923, 'M', u'藍'),
(0xF924, 'M', u'襤'),
(0xF925, 'M', u'拉'),
(0xF926, 'M', u'臘'),
(0xF927, 'M', u'蠟'),
(0xF928, 'M', u'廊'),
(0xF929, 'M', u'朗'),
(0xF92A, 'M', u'浪'),
(0xF92B, 'M', u'狼'),
(0xF92C, 'M', u'郎'),
(0xF92D, 'M', u'來'),
(0xF92E, 'M', u'冷'),
(0xF92F, 'M', u'勞'),
(0xF930, 'M', u'擄'),
(0xF931, 'M', u'櫓'),
(0xF932, 'M', u'爐'),
(0xF933, 'M', u'盧'),
(0xF934, 'M', u'老'),
(0xF935, 'M', u'蘆'),
(0xF936, 'M', u'虜'),
(0xF937, 'M', u'路'),
(0xF938, 'M', u'露'),
(0xF939, 'M', u'魯'),
(0xF93A, 'M', u'鷺'),
(0xF93B, 'M', u'碌'),
(0xF93C, 'M', u'祿'),
(0xF93D, 'M', u'綠'),
(0xF93E, 'M', u'菉'),
(0xF93F, 'M', u'錄'),
(0xF940, 'M', u'鹿'),
(0xF941, 'M', u'論'),
(0xF942, 'M', u'壟'),
(0xF943, 'M', u'弄'),
(0xF944, 'M', u'籠'),
(0xF945, 'M', u'聾'),
(0xF946, 'M', u'牢'),
(0xF947, 'M', u'磊'),
(0xF948, 'M', u'賂'),
(0xF949, 'M', u'雷'),
(0xF94A, 'M', u'壘'),
(0xF94B, 'M', u'屢'),
(0xF94C, 'M', u'樓'),
(0xF94D, 'M', u'淚'),
(0xF94E, 'M', u'漏'),
]
def _seg_40():
return [
(0xF94F, 'M', u'累'),
(0xF950, 'M', u'縷'),
(0xF951, 'M', u'陋'),
(0xF952, 'M', u'勒'),
(0xF953, 'M', u'肋'),
(0xF954, 'M', u'凜'),
(0xF955, 'M', u'凌'),
(0xF956, 'M', u'稜'),
(0xF957, 'M', u'綾'),
(0xF958, 'M', u'菱'),
(0xF959, 'M', u'陵'),
(0xF95A, 'M', u'讀'),
(0xF95B, 'M', u'拏'),
(0xF95C, 'M', u'樂'),
(0xF95D, 'M', u'諾'),
(0xF95E, 'M', u'丹'),
(0xF95F, 'M', u'寧'),
(0xF960, 'M', u'怒'),
(0xF961, 'M', u'率'),
(0xF962, 'M', u'異'),
(0xF963, 'M', u'北'),
(0xF964, 'M', u'磻'),
(0xF965, 'M', u'便'),
(0xF966, 'M', u'復'),
(0xF967, 'M', u'不'),
(0xF968, 'M', u'泌'),
(0xF969, 'M', u'數'),
(0xF96A, 'M', u'索'),
(0xF96B, 'M', u'參'),
(0xF96C, 'M', u'塞'),
(0xF96D, 'M', u'省'),
(0xF96E, 'M', u'葉'),
(0xF96F, 'M', u'說'),
(0xF970, 'M', u'殺'),
(0xF971, 'M', u'辰'),
(0xF972, 'M', u'沈'),
(0xF973, 'M', u'拾'),
(0xF974, 'M', u'若'),
(0xF975, 'M', u'掠'),
(0xF976, 'M', u'略'),
(0xF977, 'M', u'亮'),
(0xF978, 'M', u'兩'),
(0xF979, 'M', u'凉'),
(0xF97A, 'M', u'梁'),
(0xF97B, 'M', u'糧'),
(0xF97C, 'M', u'良'),
(0xF97D, 'M', u'諒'),
(0xF97E, 'M', u'量'),
(0xF97F, 'M', u'勵'),
(0xF980, 'M', u'呂'),
(0xF981, 'M', u'女'),
(0xF982, 'M', u'廬'),
(0xF983, 'M', u'旅'),
(0xF984, 'M', u'濾'),
(0xF985, 'M', u'礪'),
(0xF986, 'M', u'閭'),
(0xF987, 'M', u'驪'),
(0xF988, 'M', u'麗'),
(0xF989, 'M', u'黎'),
(0xF98A, 'M', u'力'),
(0xF98B, 'M', u'曆'),
(0xF98C, 'M', u'歷'),
(0xF98D, 'M', u'轢'),
(0xF98E, 'M', u'年'),
(0xF98F, 'M', u'憐'),
(0xF990, 'M', u'戀'),
(0xF991, 'M', u'撚'),
(0xF992, 'M', u'漣'),
(0xF993, 'M', u'煉'),
(0xF994, 'M', u'璉'),
(0xF995, 'M', u'秊'),
(0xF996, 'M', u'練'),
(0xF997, 'M', u'聯'),
(0xF998, 'M', u'輦'),
(0xF999, 'M', u'蓮'),
(0xF99A, 'M', u'連'),
(0xF99B, 'M', u'鍊'),
(0xF99C, 'M', u'列'),
(0xF99D, 'M', u'劣'),
(0xF99E, 'M', u'咽'),
(0xF99F, 'M', u'烈'),
(0xF9A0, 'M', u'裂'),
(0xF9A1, 'M', u'說'),
(0xF9A2, 'M', u'廉'),
(0xF9A3, 'M', u'念'),
(0xF9A4, 'M', u'捻'),
(0xF9A5, 'M', u'殮'),
(0xF9A6, 'M', u'簾'),
(0xF9A7, 'M', u'獵'),
(0xF9A8, 'M', u'令'),
(0xF9A9, 'M', u'囹'),
(0xF9AA, 'M', u'寧'),
(0xF9AB, 'M', u'嶺'),
(0xF9AC, 'M', u'怜'),
(0xF9AD, 'M', u'玲'),
(0xF9AE, 'M', u'瑩'),
(0xF9AF, 'M', u'羚'),
(0xF9B0, 'M', u'聆'),
(0xF9B1, 'M', u'鈴'),
(0xF9B2, 'M', u'零'),
]
def _seg_41():
return [
(0xF9B3, 'M', u'靈'),
(0xF9B4, 'M', u'領'),
(0xF9B5, 'M', u'例'),
(0xF9B6, 'M', u'禮'),
(0xF9B7, 'M', u'醴'),
(0xF9B8, 'M', u'隸'),
(0xF9B9, 'M', u'惡'),
(0xF9BA, 'M', u'了'),
(0xF9BB, 'M', u'僚'),
(0xF9BC, 'M', u'寮'),
(0xF9BD, 'M', u'尿'),
(0xF9BE, 'M', u'料'),
(0xF9BF, 'M', u'樂'),
(0xF9C0, 'M', u'燎'),
(0xF9C1, 'M', u'療'),
(0xF9C2, 'M', u'蓼'),
(0xF9C3, 'M', u'遼'),
(0xF9C4, 'M', u'龍'),
(0xF9C5, 'M', u'暈'),
(0xF9C6, 'M', u'阮'),
(0xF9C7, 'M', u'劉'),
(0xF9C8, 'M', u'杻'),
(0xF9C9, 'M', u'柳'),
(0xF9CA, 'M', u'流'),
(0xF9CB, 'M', u'溜'),
(0xF9CC, 'M', u'琉'),
(0xF9CD, 'M', u'留'),
(0xF9CE, 'M', u'硫'),
(0xF9CF, 'M', u'紐'),
(0xF9D0, 'M', u'類'),
(0xF9D1, 'M', u'六'),
(0xF9D2, 'M', u'戮'),
(0xF9D3, 'M', u'陸'),
(0xF9D4, 'M', u'倫'),
(0xF9D5, 'M', u'崙'),
(0xF9D6, 'M', u'淪'),
(0xF9D7, 'M', u'輪'),
(0xF9D8, 'M', u'律'),
(0xF9D9, 'M', u'慄'),
(0xF9DA, 'M', u'栗'),
(0xF9DB, 'M', u'率'),
(0xF9DC, 'M', u'隆'),
(0xF9DD, 'M', u'利'),
(0xF9DE, 'M', u'吏'),
(0xF9DF, 'M', u'履'),
(0xF9E0, 'M', u'易'),
(0xF9E1, 'M', u'李'),
(0xF9E2, 'M', u'梨'),
(0xF9E3, 'M', u'泥'),
(0xF9E4, 'M', u'理'),
(0xF9E5, 'M', u'痢'),
(0xF9E6, 'M', u'罹'),
(0xF9E7, 'M', u'裏'),
(0xF9E8, 'M', u'裡'),
(0xF9E9, 'M', u'里'),
(0xF9EA, 'M', u'離'),
(0xF9EB, 'M', u'匿'),
(0xF9EC, 'M', u'溺'),
(0xF9ED, 'M', u'吝'),
(0xF9EE, 'M', u'燐'),
(0xF9EF, 'M', u'璘'),
(0xF9F0, 'M', u'藺'),
(0xF9F1, 'M', u'隣'),
(0xF9F2, 'M', u'鱗'),
(0xF9F3, 'M', u'麟'),
(0xF9F4, 'M', u'林'),
(0xF9F5, 'M', u'淋'),
(0xF9F6, 'M', u'臨'),
(0xF9F7, 'M', u'立'),
(0xF9F8, 'M', u'笠'),
(0xF9F9, 'M', u'粒'),
(0xF9FA, 'M', u'狀'),
(0xF9FB, 'M', u'炙'),
(0xF9FC, 'M', u'識'),
(0xF9FD, 'M', u'什'),
(0xF9FE, 'M', u'茶'),
(0xF9FF, 'M', u'刺'),
(0xFA00, 'M', u'切'),
(0xFA01, 'M', u'度'),
(0xFA02, 'M', u'拓'),
(0xFA03, 'M', u'糖'),
(0xFA04, 'M', u'宅'),
(0xFA05, 'M', u'洞'),
(0xFA06, 'M', u'暴'),
(0xFA07, 'M', u'輻'),
(0xFA08, 'M', u'行'),
(0xFA09, 'M', u'降'),
(0xFA0A, 'M', u'見'),
(0xFA0B, 'M', u'廓'),
(0xFA0C, 'M', u'兀'),
(0xFA0D, 'M', u'嗀'),
(0xFA0E, 'V'),
(0xFA10, 'M', u'塚'),
(0xFA11, 'V'),
(0xFA12, 'M', u'晴'),
(0xFA13, 'V'),
(0xFA15, 'M', u'凞'),
(0xFA16, 'M', u'猪'),
(0xFA17, 'M', u'益'),
(0xFA18, 'M', u'礼'),
]
def _seg_42():
return [
(0xFA19, 'M', u'神'),
(0xFA1A, 'M', u'祥'),
(0xFA1B, 'M', u'福'),
(0xFA1C, 'M', u'靖'),
(0xFA1D, 'M', u'精'),
(0xFA1E, 'M', u'羽'),
(0xFA1F, 'V'),
(0xFA20, 'M', u'蘒'),
(0xFA21, 'V'),
(0xFA22, 'M', u'諸'),
(0xFA23, 'V'),
(0xFA25, 'M', u'逸'),
(0xFA26, 'M', u'都'),
(0xFA27, 'V'),
(0xFA2A, 'M', u'飯'),
(0xFA2B, 'M', u'飼'),
(0xFA2C, 'M', u'館'),
(0xFA2D, 'M', u'鶴'),
(0xFA2E, 'M', u'郞'),
(0xFA2F, 'M', u'隷'),
(0xFA30, 'M', u'侮'),
(0xFA31, 'M', u'僧'),
(0xFA32, 'M', u'免'),
(0xFA33, 'M', u'勉'),
(0xFA34, 'M', u'勤'),
(0xFA35, 'M', u'卑'),
(0xFA36, 'M', u'喝'),
(0xFA37, 'M', u'嘆'),
(0xFA38, 'M', u'器'),
(0xFA39, 'M', u'塀'),
(0xFA3A, 'M', u'墨'),
(0xFA3B, 'M', u'層'),
(0xFA3C, 'M', u'屮'),
(0xFA3D, 'M', u'悔'),
(0xFA3E, 'M', u'慨'),
(0xFA3F, 'M', u'憎'),
(0xFA40, 'M', u'懲'),
(0xFA41, 'M', u'敏'),
(0xFA42, 'M', u'既'),
(0xFA43, 'M', u'暑'),
(0xFA44, 'M', u'梅'),
(0xFA45, 'M', u'海'),
(0xFA46, 'M', u'渚'),
(0xFA47, 'M', u'漢'),
(0xFA48, 'M', u'煮'),
(0xFA49, 'M', u'爫'),
(0xFA4A, 'M', u'琢'),
(0xFA4B, 'M', u'碑'),
(0xFA4C, 'M', u'社'),
(0xFA4D, 'M', u'祉'),
(0xFA4E, 'M', u'祈'),
(0xFA4F, 'M', u'祐'),
(0xFA50, 'M', u'祖'),
(0xFA51, 'M', u'祝'),
(0xFA52, 'M', u'禍'),
(0xFA53, 'M', u'禎'),
(0xFA54, 'M', u'穀'),
(0xFA55, 'M', u'突'),
(0xFA56, 'M', u'節'),
(0xFA57, 'M', u'練'),
(0xFA58, 'M', u'縉'),
(0xFA59, 'M', u'繁'),
(0xFA5A, 'M', u'署'),
(0xFA5B, 'M', u'者'),
(0xFA5C, 'M', u'臭'),
(0xFA5D, 'M', u'艹'),
(0xFA5F, 'M', u'著'),
(0xFA60, 'M', u'褐'),
(0xFA61, 'M', u'視'),
(0xFA62, 'M', u'謁'),
(0xFA63, 'M', u'謹'),
(0xFA64, 'M', u'賓'),
(0xFA65, 'M', u'贈'),
(0xFA66, 'M', u'辶'),
(0xFA67, 'M', u'逸'),
(0xFA68, 'M', u'難'),
(0xFA69, 'M', u'響'),
(0xFA6A, 'M', u'頻'),
(0xFA6B, 'M', u'恵'),
(0xFA6C, 'M', u'𤋮'),
(0xFA6D, 'M', u'舘'),
(0xFA6E, 'X'),
(0xFA70, 'M', u'並'),
(0xFA71, 'M', u'况'),
(0xFA72, 'M', u'全'),
(0xFA73, 'M', u'侀'),
(0xFA74, 'M', u'充'),
(0xFA75, 'M', u'冀'),
(0xFA76, 'M', u'勇'),
(0xFA77, 'M', u'勺'),
(0xFA78, 'M', u'喝'),
(0xFA79, 'M', u'啕'),
(0xFA7A, 'M', u'喙'),
(0xFA7B, 'M', u'嗢'),
(0xFA7C, 'M', u'塚'),
(0xFA7D, 'M', u'墳'),
(0xFA7E, 'M', u'奄'),
(0xFA7F, 'M', u'奔'),
(0xFA80, 'M', u'婢'),
(0xFA81, 'M', u'嬨'),
]
def _seg_43():
return [
(0xFA82, 'M', u'廒'),
(0xFA83, 'M', u'廙'),
(0xFA84, 'M', u'彩'),
(0xFA85, 'M', u'徭'),
(0xFA86, 'M', u'惘'),
(0xFA87, 'M', u'慎'),
(0xFA88, 'M', u'愈'),
(0xFA89, 'M', u'憎'),
(0xFA8A, 'M', u'慠'),
(0xFA8B, 'M', u'懲'),
(0xFA8C, 'M', u'戴'),
(0xFA8D, 'M', u'揄'),
(0xFA8E, 'M', u'搜'),
(0xFA8F, 'M', u'摒'),
(0xFA90, 'M', u'敖'),
(0xFA91, 'M', u'晴'),
(0xFA92, 'M', u'朗'),
(0xFA93, 'M', u'望'),
(0xFA94, 'M', u'杖'),
(0xFA95, 'M', u'歹'),
(0xFA96, 'M', u'殺'),
(0xFA97, 'M', u'流'),
(0xFA98, 'M', u'滛'),
(0xFA99, 'M', u'滋'),
(0xFA9A, 'M', u'漢'),
(0xFA9B, 'M', u'瀞'),
(0xFA9C, 'M', u'煮'),
(0xFA9D, 'M', u'瞧'),
(0xFA9E, 'M', u'爵'),
(0xFA9F, 'M', u'犯'),
(0xFAA0, 'M', u'猪'),
(0xFAA1, 'M', u'瑱'),
(0xFAA2, 'M', u'甆'),
(0xFAA3, 'M', u'画'),
(0xFAA4, 'M', u'瘝'),
(0xFAA5, 'M', u'瘟'),
(0xFAA6, 'M', u'益'),
(0xFAA7, 'M', u'盛'),
(0xFAA8, 'M', u'直'),
(0xFAA9, 'M', u'睊'),
(0xFAAA, 'M', u'着'),
(0xFAAB, 'M', u'磌'),
(0xFAAC, 'M', u'窱'),
(0xFAAD, 'M', u'節'),
(0xFAAE, 'M', u'类'),
(0xFAAF, 'M', u'絛'),
(0xFAB0, 'M', u'練'),
(0xFAB1, 'M', u'缾'),
(0xFAB2, 'M', u'者'),
(0xFAB3, 'M', u'荒'),
(0xFAB4, 'M', u'華'),
(0xFAB5, 'M', u'蝹'),
(0xFAB6, 'M', u'襁'),
(0xFAB7, 'M', u'覆'),
(0xFAB8, 'M', u'視'),
(0xFAB9, 'M', u'調'),
(0xFABA, 'M', u'諸'),
(0xFABB, 'M', u'請'),
(0xFABC, 'M', u'謁'),
(0xFABD, 'M', u'諾'),
(0xFABE, 'M', u'諭'),
(0xFABF, 'M', u'謹'),
(0xFAC0, 'M', u'變'),
(0xFAC1, 'M', u'贈'),
(0xFAC2, 'M', u'輸'),
(0xFAC3, 'M', u'遲'),
(0xFAC4, 'M', u'醙'),
(0xFAC5, 'M', u'鉶'),
(0xFAC6, 'M', u'陼'),
(0xFAC7, 'M', u'難'),
(0xFAC8, 'M', u'靖'),
(0xFAC9, 'M', u'韛'),
(0xFACA, 'M', u'響'),
(0xFACB, 'M', u'頋'),
(0xFACC, 'M', u'頻'),
(0xFACD, 'M', u'鬒'),
(0xFACE, 'M', u'龜'),
(0xFACF, 'M', u'𢡊'),
(0xFAD0, 'M', u'𢡄'),
(0xFAD1, 'M', u'𣏕'),
(0xFAD2, 'M', u'㮝'),
(0xFAD3, 'M', u'䀘'),
(0xFAD4, 'M', u'䀹'),
(0xFAD5, 'M', u'𥉉'),
(0xFAD6, 'M', u'𥳐'),
(0xFAD7, 'M', u'𧻓'),
(0xFAD8, 'M', u'齃'),
(0xFAD9, 'M', u'龎'),
(0xFADA, 'X'),
(0xFB00, 'M', u'ff'),
(0xFB01, 'M', u'fi'),
(0xFB02, 'M', u'fl'),
(0xFB03, 'M', u'ffi'),
(0xFB04, 'M', u'ffl'),
(0xFB05, 'M', u'st'),
(0xFB07, 'X'),
(0xFB13, 'M', u'մն'),
(0xFB14, 'M', u'մե'),
(0xFB15, 'M', u'մի'),
(0xFB16, 'M', u'վն'),
]
def _seg_44():
return [
(0xFB17, 'M', u'մխ'),
(0xFB18, 'X'),
(0xFB1D, 'M', u'יִ'),
(0xFB1E, 'V'),
(0xFB1F, 'M', u'ײַ'),
(0xFB20, 'M', u'ע'),
(0xFB21, 'M', u'א'),
(0xFB22, 'M', u'ד'),
(0xFB23, 'M', u'ה'),
(0xFB24, 'M', u'כ'),
(0xFB25, 'M', u'ל'),
(0xFB26, 'M', u'ם'),
(0xFB27, 'M', u'ר'),
(0xFB28, 'M', u'ת'),
(0xFB29, '3', u'+'),
(0xFB2A, 'M', u'שׁ'),
(0xFB2B, 'M', u'שׂ'),
(0xFB2C, 'M', u'שּׁ'),
(0xFB2D, 'M', u'שּׂ'),
(0xFB2E, 'M', u'אַ'),
(0xFB2F, 'M', u'אָ'),
(0xFB30, 'M', u'אּ'),
(0xFB31, 'M', u'בּ'),
(0xFB32, 'M', u'גּ'),
(0xFB33, 'M', u'דּ'),
(0xFB34, 'M', u'הּ'),
(0xFB35, 'M', u'וּ'),
(0xFB36, 'M', u'זּ'),
(0xFB37, 'X'),
(0xFB38, 'M', u'טּ'),
(0xFB39, 'M', u'יּ'),
(0xFB3A, 'M', u'ךּ'),
(0xFB3B, 'M', u'כּ'),
(0xFB3C, 'M', u'לּ'),
(0xFB3D, 'X'),
(0xFB3E, 'M', u'מּ'),
(0xFB3F, 'X'),
(0xFB40, 'M', u'נּ'),
(0xFB41, 'M', u'סּ'),
(0xFB42, 'X'),
(0xFB43, 'M', u'ףּ'),
(0xFB44, 'M', u'פּ'),
(0xFB45, 'X'),
(0xFB46, 'M', u'צּ'),
(0xFB47, 'M', u'קּ'),
(0xFB48, 'M', u'רּ'),
(0xFB49, 'M', u'שּ'),
(0xFB4A, 'M', u'תּ'),
(0xFB4B, 'M', u'וֹ'),
(0xFB4C, 'M', u'בֿ'),
(0xFB4D, 'M', u'כֿ'),
(0xFB4E, 'M', u'פֿ'),
(0xFB4F, 'M', u'אל'),
(0xFB50, 'M', u'ٱ'),
(0xFB52, 'M', u'ٻ'),
(0xFB56, 'M', u'پ'),
(0xFB5A, 'M', u'ڀ'),
(0xFB5E, 'M', u'ٺ'),
(0xFB62, 'M', u'ٿ'),
(0xFB66, 'M', u'ٹ'),
(0xFB6A, 'M', u'ڤ'),
(0xFB6E, 'M', u'ڦ'),
(0xFB72, 'M', u'ڄ'),
(0xFB76, 'M', u'ڃ'),
(0xFB7A, 'M', u'چ'),
(0xFB7E, 'M', u'ڇ'),
(0xFB82, 'M', u'ڍ'),
(0xFB84, 'M', u'ڌ'),
(0xFB86, 'M', u'ڎ'),
(0xFB88, 'M', u'ڈ'),
(0xFB8A, 'M', u'ژ'),
(0xFB8C, 'M', u'ڑ'),
(0xFB8E, 'M', u'ک'),
(0xFB92, 'M', u'گ'),
(0xFB96, 'M', u'ڳ'),
(0xFB9A, 'M', u'ڱ'),
(0xFB9E, 'M', u'ں'),
(0xFBA0, 'M', u'ڻ'),
(0xFBA4, 'M', u'ۀ'),
(0xFBA6, 'M', u'ہ'),
(0xFBAA, 'M', u'ھ'),
(0xFBAE, 'M', u'ے'),
(0xFBB0, 'M', u'ۓ'),
(0xFBB2, 'V'),
(0xFBC2, 'X'),
(0xFBD3, 'M', u'ڭ'),
(0xFBD7, 'M', u'ۇ'),
(0xFBD9, 'M', u'ۆ'),
(0xFBDB, 'M', u'ۈ'),
(0xFBDD, 'M', u'ۇٴ'),
(0xFBDE, 'M', u'ۋ'),
(0xFBE0, 'M', u'ۅ'),
(0xFBE2, 'M', u'ۉ'),
(0xFBE4, 'M', u'ې'),
(0xFBE8, 'M', u'ى'),
(0xFBEA, 'M', u'ئا'),
(0xFBEC, 'M', u'ئە'),
(0xFBEE, 'M', u'ئو'),
(0xFBF0, 'M', u'ئۇ'),
(0xFBF2, 'M', u'ئۆ'),
]
def _seg_45():
return [
(0xFBF4, 'M', u'ئۈ'),
(0xFBF6, 'M', u'ئې'),
(0xFBF9, 'M', u'ئى'),
(0xFBFC, 'M', u'ی'),
(0xFC00, 'M', u'ئج'),
(0xFC01, 'M', u'ئح'),
(0xFC02, 'M', u'ئم'),
(0xFC03, 'M', u'ئى'),
(0xFC04, 'M', u'ئي'),
(0xFC05, 'M', u'بج'),
(0xFC06, 'M', u'بح'),
(0xFC07, 'M', u'بخ'),
(0xFC08, 'M', u'بم'),
(0xFC09, 'M', u'بى'),
(0xFC0A, 'M', u'بي'),
(0xFC0B, 'M', u'تج'),
(0xFC0C, 'M', u'تح'),
(0xFC0D, 'M', u'تخ'),
(0xFC0E, 'M', u'تم'),
(0xFC0F, 'M', u'تى'),
(0xFC10, 'M', u'تي'),
(0xFC11, 'M', u'ثج'),
(0xFC12, 'M', u'ثم'),
(0xFC13, 'M', u'ثى'),
(0xFC14, 'M', u'ثي'),
(0xFC15, 'M', u'جح'),
(0xFC16, 'M', u'جم'),
(0xFC17, 'M', u'حج'),
(0xFC18, 'M', u'حم'),
(0xFC19, 'M', u'خج'),
(0xFC1A, 'M', u'خح'),
(0xFC1B, 'M', u'خم'),
(0xFC1C, 'M', u'سج'),
(0xFC1D, 'M', u'سح'),
(0xFC1E, 'M', u'سخ'),
(0xFC1F, 'M', u'سم'),
(0xFC20, 'M', u'صح'),
(0xFC21, 'M', u'صم'),
(0xFC22, 'M', u'ضج'),
(0xFC23, 'M', u'ضح'),
(0xFC24, 'M', u'ضخ'),
(0xFC25, 'M', u'ضم'),
(0xFC26, 'M', u'طح'),
(0xFC27, 'M', u'طم'),
(0xFC28, 'M', u'ظم'),
(0xFC29, 'M', u'عج'),
(0xFC2A, 'M', u'عم'),
(0xFC2B, 'M', u'غج'),
(0xFC2C, 'M', u'غم'),
(0xFC2D, 'M', u'فج'),
(0xFC2E, 'M', u'فح'),
(0xFC2F, 'M', u'فخ'),
(0xFC30, 'M', u'فم'),
(0xFC31, 'M', u'فى'),
(0xFC32, 'M', u'في'),
(0xFC33, 'M', u'قح'),
(0xFC34, 'M', u'قم'),
(0xFC35, 'M', u'قى'),
(0xFC36, 'M', u'قي'),
(0xFC37, 'M', u'كا'),
(0xFC38, 'M', u'كج'),
(0xFC39, 'M', u'كح'),
(0xFC3A, 'M', u'كخ'),
(0xFC3B, 'M', u'كل'),
(0xFC3C, 'M', u'كم'),
(0xFC3D, 'M', u'كى'),
(0xFC3E, 'M', u'كي'),
(0xFC3F, 'M', u'لج'),
(0xFC40, 'M', u'لح'),
(0xFC41, 'M', u'لخ'),
(0xFC42, 'M', u'لم'),
(0xFC43, 'M', u'لى'),
(0xFC44, 'M', u'لي'),
(0xFC45, 'M', u'مج'),
(0xFC46, 'M', u'مح'),
(0xFC47, 'M', u'مخ'),
(0xFC48, 'M', u'مم'),
(0xFC49, 'M', u'مى'),
(0xFC4A, 'M', u'مي'),
(0xFC4B, 'M', u'نج'),
(0xFC4C, 'M', u'نح'),
(0xFC4D, 'M', u'نخ'),
(0xFC4E, 'M', u'نم'),
(0xFC4F, 'M', u'نى'),
(0xFC50, 'M', u'ني'),
(0xFC51, 'M', u'هج'),
(0xFC52, 'M', u'هم'),
(0xFC53, 'M', u'هى'),
(0xFC54, 'M', u'هي'),
(0xFC55, 'M', u'يج'),
(0xFC56, 'M', u'يح'),
(0xFC57, 'M', u'يخ'),
(0xFC58, 'M', u'يم'),
(0xFC59, 'M', u'يى'),
(0xFC5A, 'M', u'يي'),
(0xFC5B, 'M', u'ذٰ'),
(0xFC5C, 'M', u'رٰ'),
(0xFC5D, 'M', u'ىٰ'),
(0xFC5E, '3', u' ٌّ'),
(0xFC5F, '3', u' ٍّ'),
]
def _seg_46():
return [
(0xFC60, '3', u' َّ'),
(0xFC61, '3', u' ُّ'),
(0xFC62, '3', u' ِّ'),
(0xFC63, '3', u' ّٰ'),
(0xFC64, 'M', u'ئر'),
(0xFC65, 'M', u'ئز'),
(0xFC66, 'M', u'ئم'),
(0xFC67, 'M', u'ئن'),
(0xFC68, 'M', u'ئى'),
(0xFC69, 'M', u'ئي'),
(0xFC6A, 'M', u'بر'),
(0xFC6B, 'M', u'بز'),
(0xFC6C, 'M', u'بم'),
(0xFC6D, 'M', u'بن'),
(0xFC6E, 'M', u'بى'),
(0xFC6F, 'M', u'بي'),
(0xFC70, 'M', u'تر'),
(0xFC71, 'M', u'تز'),
(0xFC72, 'M', u'تم'),
(0xFC73, 'M', u'تن'),
(0xFC74, 'M', u'تى'),
(0xFC75, 'M', u'تي'),
(0xFC76, 'M', u'ثر'),
(0xFC77, 'M', u'ثز'),
(0xFC78, 'M', u'ثم'),
(0xFC79, 'M', u'ثن'),
(0xFC7A, 'M', u'ثى'),
(0xFC7B, 'M', u'ثي'),
(0xFC7C, 'M', u'فى'),
(0xFC7D, 'M', u'في'),
(0xFC7E, 'M', u'قى'),
(0xFC7F, 'M', u'قي'),
(0xFC80, 'M', u'كا'),
(0xFC81, 'M', u'كل'),
(0xFC82, 'M', u'كم'),
(0xFC83, 'M', u'كى'),
(0xFC84, 'M', u'كي'),
(0xFC85, 'M', u'لم'),
(0xFC86, 'M', u'لى'),
(0xFC87, 'M', u'لي'),
(0xFC88, 'M', u'ما'),
(0xFC89, 'M', u'مم'),
(0xFC8A, 'M', u'نر'),
(0xFC8B, 'M', u'نز'),
(0xFC8C, 'M', u'نم'),
(0xFC8D, 'M', u'نن'),
(0xFC8E, 'M', u'نى'),
(0xFC8F, 'M', u'ني'),
(0xFC90, 'M', u'ىٰ'),
(0xFC91, 'M', u'ير'),
(0xFC92, 'M', u'يز'),
(0xFC93, 'M', u'يم'),
(0xFC94, 'M', u'ين'),
(0xFC95, 'M', u'يى'),
(0xFC96, 'M', u'يي'),
(0xFC97, 'M', u'ئج'),
(0xFC98, 'M', u'ئح'),
(0xFC99, 'M', u'ئخ'),
(0xFC9A, 'M', u'ئم'),
(0xFC9B, 'M', u'ئه'),
(0xFC9C, 'M', u'بج'),
(0xFC9D, 'M', u'بح'),
(0xFC9E, 'M', u'بخ'),
(0xFC9F, 'M', u'بم'),
(0xFCA0, 'M', u'به'),
(0xFCA1, 'M', u'تج'),
(0xFCA2, 'M', u'تح'),
(0xFCA3, 'M', u'تخ'),
(0xFCA4, 'M', u'تم'),
(0xFCA5, 'M', u'ته'),
(0xFCA6, 'M', u'ثم'),
(0xFCA7, 'M', u'جح'),
(0xFCA8, 'M', u'جم'),
(0xFCA9, 'M', u'حج'),
(0xFCAA, 'M', u'حم'),
(0xFCAB, 'M', u'خج'),
(0xFCAC, 'M', u'خم'),
(0xFCAD, 'M', u'سج'),
(0xFCAE, 'M', u'سح'),
(0xFCAF, 'M', u'سخ'),
(0xFCB0, 'M', u'سم'),
(0xFCB1, 'M', u'صح'),
(0xFCB2, 'M', u'صخ'),
(0xFCB3, 'M', u'صم'),
(0xFCB4, 'M', u'ضج'),
(0xFCB5, 'M', u'ضح'),
(0xFCB6, 'M', u'ضخ'),
(0xFCB7, 'M', u'ضم'),
(0xFCB8, 'M', u'طح'),
(0xFCB9, 'M', u'ظم'),
(0xFCBA, 'M', u'عج'),
(0xFCBB, 'M', u'عم'),
(0xFCBC, 'M', u'غج'),
(0xFCBD, 'M', u'غم'),
(0xFCBE, 'M', u'فج'),
(0xFCBF, 'M', u'فح'),
(0xFCC0, 'M', u'فخ'),
(0xFCC1, 'M', u'فم'),
(0xFCC2, 'M', u'قح'),
(0xFCC3, 'M', u'قم'),
]
def _seg_47():
return [
(0xFCC4, 'M', u'كج'),
(0xFCC5, 'M', u'كح'),
(0xFCC6, 'M', u'كخ'),
(0xFCC7, 'M', u'كل'),
(0xFCC8, 'M', u'كم'),
(0xFCC9, 'M', u'لج'),
(0xFCCA, 'M', u'لح'),
(0xFCCB, 'M', u'لخ'),
(0xFCCC, 'M', u'لم'),
(0xFCCD, 'M', u'له'),
(0xFCCE, 'M', u'مج'),
(0xFCCF, 'M', u'مح'),
(0xFCD0, 'M', u'مخ'),
(0xFCD1, 'M', u'مم'),
(0xFCD2, 'M', u'نج'),
(0xFCD3, 'M', u'نح'),
(0xFCD4, 'M', u'نخ'),
(0xFCD5, 'M', u'نم'),
(0xFCD6, 'M', u'نه'),
(0xFCD7, 'M', u'هج'),
(0xFCD8, 'M', u'هم'),
(0xFCD9, 'M', u'هٰ'),
(0xFCDA, 'M', u'يج'),
(0xFCDB, 'M', u'يح'),
(0xFCDC, 'M', u'يخ'),
(0xFCDD, 'M', u'يم'),
(0xFCDE, 'M', u'يه'),
(0xFCDF, 'M', u'ئم'),
(0xFCE0, 'M', u'ئه'),
(0xFCE1, 'M', u'بم'),
(0xFCE2, 'M', u'به'),
(0xFCE3, 'M', u'تم'),
(0xFCE4, 'M', u'ته'),
(0xFCE5, 'M', u'ثم'),
(0xFCE6, 'M', u'ثه'),
(0xFCE7, 'M', u'سم'),
(0xFCE8, 'M', u'سه'),
(0xFCE9, 'M', u'شم'),
(0xFCEA, 'M', u'شه'),
(0xFCEB, 'M', u'كل'),
(0xFCEC, 'M', u'كم'),
(0xFCED, 'M', u'لم'),
(0xFCEE, 'M', u'نم'),
(0xFCEF, 'M', u'نه'),
(0xFCF0, 'M', u'يم'),
(0xFCF1, 'M', u'يه'),
(0xFCF2, 'M', u'ـَّ'),
(0xFCF3, 'M', u'ـُّ'),
(0xFCF4, 'M', u'ـِّ'),
(0xFCF5, 'M', u'طى'),
(0xFCF6, 'M', u'طي'),
(0xFCF7, 'M', u'عى'),
(0xFCF8, 'M', u'عي'),
(0xFCF9, 'M', u'غى'),
(0xFCFA, 'M', u'غي'),
(0xFCFB, 'M', u'سى'),
(0xFCFC, 'M', u'سي'),
(0xFCFD, 'M', u'شى'),
(0xFCFE, 'M', u'شي'),
(0xFCFF, 'M', u'حى'),
(0xFD00, 'M', u'حي'),
(0xFD01, 'M', u'جى'),
(0xFD02, 'M', u'جي'),
(0xFD03, 'M', u'خى'),
(0xFD04, 'M', u'خي'),
(0xFD05, 'M', u'صى'),
(0xFD06, 'M', u'صي'),
(0xFD07, 'M', u'ضى'),
(0xFD08, 'M', u'ضي'),
(0xFD09, 'M', u'شج'),
(0xFD0A, 'M', u'شح'),
(0xFD0B, 'M', u'شخ'),
(0xFD0C, 'M', u'شم'),
(0xFD0D, 'M', u'شر'),
(0xFD0E, 'M', u'سر'),
(0xFD0F, 'M', u'صر'),
(0xFD10, 'M', u'ضر'),
(0xFD11, 'M', u'طى'),
(0xFD12, 'M', u'طي'),
(0xFD13, 'M', u'عى'),
(0xFD14, 'M', u'عي'),
(0xFD15, 'M', u'غى'),
(0xFD16, 'M', u'غي'),
(0xFD17, 'M', u'سى'),
(0xFD18, 'M', u'سي'),
(0xFD19, 'M', u'شى'),
(0xFD1A, 'M', u'شي'),
(0xFD1B, 'M', u'حى'),
(0xFD1C, 'M', u'حي'),
(0xFD1D, 'M', u'جى'),
(0xFD1E, 'M', u'جي'),
(0xFD1F, 'M', u'خى'),
(0xFD20, 'M', u'خي'),
(0xFD21, 'M', u'صى'),
(0xFD22, 'M', u'صي'),
(0xFD23, 'M', u'ضى'),
(0xFD24, 'M', u'ضي'),
(0xFD25, 'M', u'شج'),
(0xFD26, 'M', u'شح'),
(0xFD27, 'M', u'شخ'),
]
def _seg_48():
return [
(0xFD28, 'M', u'شم'),
(0xFD29, 'M', u'شر'),
(0xFD2A, 'M', u'سر'),
(0xFD2B, 'M', u'صر'),
(0xFD2C, 'M', u'ضر'),
(0xFD2D, 'M', u'شج'),
(0xFD2E, 'M', u'شح'),
(0xFD2F, 'M', u'شخ'),
(0xFD30, 'M', u'شم'),
(0xFD31, 'M', u'سه'),
(0xFD32, 'M', u'شه'),
(0xFD33, 'M', u'طم'),
(0xFD34, 'M', u'سج'),
(0xFD35, 'M', u'سح'),
(0xFD36, 'M', u'سخ'),
(0xFD37, 'M', u'شج'),
(0xFD38, 'M', u'شح'),
(0xFD39, 'M', u'شخ'),
(0xFD3A, 'M', u'طم'),
(0xFD3B, 'M', u'ظم'),
(0xFD3C, 'M', u'اً'),
(0xFD3E, 'V'),
(0xFD40, 'X'),
(0xFD50, 'M', u'تجم'),
(0xFD51, 'M', u'تحج'),
(0xFD53, 'M', u'تحم'),
(0xFD54, 'M', u'تخم'),
(0xFD55, 'M', u'تمج'),
(0xFD56, 'M', u'تمح'),
(0xFD57, 'M', u'تمخ'),
(0xFD58, 'M', u'جمح'),
(0xFD5A, 'M', u'حمي'),
(0xFD5B, 'M', u'حمى'),
(0xFD5C, 'M', u'سحج'),
(0xFD5D, 'M', u'سجح'),
(0xFD5E, 'M', u'سجى'),
(0xFD5F, 'M', u'سمح'),
(0xFD61, 'M', u'سمج'),
(0xFD62, 'M', u'سمم'),
(0xFD64, 'M', u'صحح'),
(0xFD66, 'M', u'صمم'),
(0xFD67, 'M', u'شحم'),
(0xFD69, 'M', u'شجي'),
(0xFD6A, 'M', u'شمخ'),
(0xFD6C, 'M', u'شمم'),
(0xFD6E, 'M', u'ضحى'),
(0xFD6F, 'M', u'ضخم'),
(0xFD71, 'M', u'طمح'),
(0xFD73, 'M', u'طمم'),
(0xFD74, 'M', u'طمي'),
(0xFD75, 'M', u'عجم'),
(0xFD76, 'M', u'عمم'),
(0xFD78, 'M', u'عمى'),
(0xFD79, 'M', u'غمم'),
(0xFD7A, 'M', u'غمي'),
(0xFD7B, 'M', u'غمى'),
(0xFD7C, 'M', u'فخم'),
(0xFD7E, 'M', u'قمح'),
(0xFD7F, 'M', u'قمم'),
(0xFD80, 'M', u'لحم'),
(0xFD81, 'M', u'لحي'),
(0xFD82, 'M', u'لحى'),
(0xFD83, 'M', u'لجج'),
(0xFD85, 'M', u'لخم'),
(0xFD87, 'M', u'لمح'),
(0xFD89, 'M', u'محج'),
(0xFD8A, 'M', u'محم'),
(0xFD8B, 'M', u'محي'),
(0xFD8C, 'M', u'مجح'),
(0xFD8D, 'M', u'مجم'),
(0xFD8E, 'M', u'مخج'),
(0xFD8F, 'M', u'مخم'),
(0xFD90, 'X'),
(0xFD92, 'M', u'مجخ'),
(0xFD93, 'M', u'همج'),
(0xFD94, 'M', u'همم'),
(0xFD95, 'M', u'نحم'),
(0xFD96, 'M', u'نحى'),
(0xFD97, 'M', u'نجم'),
(0xFD99, 'M', u'نجى'),
(0xFD9A, 'M', u'نمي'),
(0xFD9B, 'M', u'نمى'),
(0xFD9C, 'M', u'يمم'),
(0xFD9E, 'M', u'بخي'),
(0xFD9F, 'M', u'تجي'),
(0xFDA0, 'M', u'تجى'),
(0xFDA1, 'M', u'تخي'),
(0xFDA2, 'M', u'تخى'),
(0xFDA3, 'M', u'تمي'),
(0xFDA4, 'M', u'تمى'),
(0xFDA5, 'M', u'جمي'),
(0xFDA6, 'M', u'جحى'),
(0xFDA7, 'M', u'جمى'),
(0xFDA8, 'M', u'سخى'),
(0xFDA9, 'M', u'صحي'),
(0xFDAA, 'M', u'شحي'),
(0xFDAB, 'M', u'ضحي'),
(0xFDAC, 'M', u'لجي'),
(0xFDAD, 'M', u'لمي'),
(0xFDAE, 'M', u'يحي'),
]
def _seg_49():
return [
(0xFDAF, 'M', u'يجي'),
(0xFDB0, 'M', u'يمي'),
(0xFDB1, 'M', u'ممي'),
(0xFDB2, 'M', u'قمي'),
(0xFDB3, 'M', u'نحي'),
(0xFDB4, 'M', u'قمح'),
(0xFDB5, 'M', u'لحم'),
(0xFDB6, 'M', u'عمي'),
(0xFDB7, 'M', u'كمي'),
(0xFDB8, 'M', u'نجح'),
(0xFDB9, 'M', u'مخي'),
(0xFDBA, 'M', u'لجم'),
(0xFDBB, 'M', u'كمم'),
(0xFDBC, 'M', u'لجم'),
(0xFDBD, 'M', u'نجح'),
(0xFDBE, 'M', u'جحي'),
(0xFDBF, 'M', u'حجي'),
(0xFDC0, 'M', u'مجي'),
(0xFDC1, 'M', u'فمي'),
(0xFDC2, 'M', u'بحي'),
(0xFDC3, 'M', u'كمم'),
(0xFDC4, 'M', u'عجم'),
(0xFDC5, 'M', u'صمم'),
(0xFDC6, 'M', u'سخي'),
(0xFDC7, 'M', u'نجي'),
(0xFDC8, 'X'),
(0xFDF0, 'M', u'صلے'),
(0xFDF1, 'M', u'قلے'),
(0xFDF2, 'M', u'الله'),
(0xFDF3, 'M', u'اكبر'),
(0xFDF4, 'M', u'محمد'),
(0xFDF5, 'M', u'صلعم'),
(0xFDF6, 'M', u'رسول'),
(0xFDF7, 'M', u'عليه'),
(0xFDF8, 'M', u'وسلم'),
(0xFDF9, 'M', u'صلى'),
(0xFDFA, '3', u'صلى الله عليه وسلم'),
(0xFDFB, '3', u'جل جلاله'),
(0xFDFC, 'M', u'ریال'),
(0xFDFD, 'V'),
(0xFDFE, 'X'),
(0xFE00, 'I'),
(0xFE10, '3', u','),
(0xFE11, 'M', u'、'),
(0xFE12, 'X'),
(0xFE13, '3', u':'),
(0xFE14, '3', u';'),
(0xFE15, '3', u'!'),
(0xFE16, '3', u'?'),
(0xFE17, 'M', u'〖'),
(0xFE18, 'M', u'〗'),
(0xFE19, 'X'),
(0xFE20, 'V'),
(0xFE30, 'X'),
(0xFE31, 'M', u'—'),
(0xFE32, 'M', u'–'),
(0xFE33, '3', u'_'),
(0xFE35, '3', u'('),
(0xFE36, '3', u')'),
(0xFE37, '3', u'{'),
(0xFE38, '3', u'}'),
(0xFE39, 'M', u'〔'),
(0xFE3A, 'M', u'〕'),
(0xFE3B, 'M', u'【'),
(0xFE3C, 'M', u'】'),
(0xFE3D, 'M', u'《'),
(0xFE3E, 'M', u'》'),
(0xFE3F, 'M', u'〈'),
(0xFE40, 'M', u'〉'),
(0xFE41, 'M', u'「'),
(0xFE42, 'M', u'」'),
(0xFE43, 'M', u'『'),
(0xFE44, 'M', u'』'),
(0xFE45, 'V'),
(0xFE47, '3', u'['),
(0xFE48, '3', u']'),
(0xFE49, '3', u' ̅'),
(0xFE4D, '3', u'_'),
(0xFE50, '3', u','),
(0xFE51, 'M', u'、'),
(0xFE52, 'X'),
(0xFE54, '3', u';'),
(0xFE55, '3', u':'),
(0xFE56, '3', u'?'),
(0xFE57, '3', u'!'),
(0xFE58, 'M', u'—'),
(0xFE59, '3', u'('),
(0xFE5A, '3', u')'),
(0xFE5B, '3', u'{'),
(0xFE5C, '3', u'}'),
(0xFE5D, 'M', u'〔'),
(0xFE5E, 'M', u'〕'),
(0xFE5F, '3', u'#'),
(0xFE60, '3', u'&'),
(0xFE61, '3', u'*'),
(0xFE62, '3', u'+'),
(0xFE63, 'M', u'-'),
(0xFE64, '3', u'<'),
(0xFE65, '3', u'>'),
(0xFE66, '3', u'='),
]
def _seg_50():
return [
(0xFE67, 'X'),
(0xFE68, '3', u'\\'),
(0xFE69, '3', u'$'),
(0xFE6A, '3', u'%'),
(0xFE6B, '3', u'@'),
(0xFE6C, 'X'),
(0xFE70, '3', u' ً'),
(0xFE71, 'M', u'ـً'),
(0xFE72, '3', u' ٌ'),
(0xFE73, 'V'),
(0xFE74, '3', u' ٍ'),
(0xFE75, 'X'),
(0xFE76, '3', u' َ'),
(0xFE77, 'M', u'ـَ'),
(0xFE78, '3', u' ُ'),
(0xFE79, 'M', u'ـُ'),
(0xFE7A, '3', u' ِ'),
(0xFE7B, 'M', u'ـِ'),
(0xFE7C, '3', u' ّ'),
(0xFE7D, 'M', u'ـّ'),
(0xFE7E, '3', u' ْ'),
(0xFE7F, 'M', u'ـْ'),
(0xFE80, 'M', u'ء'),
(0xFE81, 'M', u'آ'),
(0xFE83, 'M', u'أ'),
(0xFE85, 'M', u'ؤ'),
(0xFE87, 'M', u'إ'),
(0xFE89, 'M', u'ئ'),
(0xFE8D, 'M', u'ا'),
(0xFE8F, 'M', u'ب'),
(0xFE93, 'M', u'ة'),
(0xFE95, 'M', u'ت'),
(0xFE99, 'M', u'ث'),
(0xFE9D, 'M', u'ج'),
(0xFEA1, 'M', u'ح'),
(0xFEA5, 'M', u'خ'),
(0xFEA9, 'M', u'د'),
(0xFEAB, 'M', u'ذ'),
(0xFEAD, 'M', u'ر'),
(0xFEAF, 'M', u'ز'),
(0xFEB1, 'M', u'س'),
(0xFEB5, 'M', u'ش'),
(0xFEB9, 'M', u'ص'),
(0xFEBD, 'M', u'ض'),
(0xFEC1, 'M', u'ط'),
(0xFEC5, 'M', u'ظ'),
(0xFEC9, 'M', u'ع'),
(0xFECD, 'M', u'غ'),
(0xFED1, 'M', u'ف'),
(0xFED5, 'M', u'ق'),
(0xFED9, 'M', u'ك'),
(0xFEDD, 'M', u'ل'),
(0xFEE1, 'M', u'م'),
(0xFEE5, 'M', u'ن'),
(0xFEE9, 'M', u'ه'),
(0xFEED, 'M', u'و'),
(0xFEEF, 'M', u'ى'),
(0xFEF1, 'M', u'ي'),
(0xFEF5, 'M', u'لآ'),
(0xFEF7, 'M', u'لأ'),
(0xFEF9, 'M', u'لإ'),
(0xFEFB, 'M', u'لا'),
(0xFEFD, 'X'),
(0xFEFF, 'I'),
(0xFF00, 'X'),
(0xFF01, '3', u'!'),
(0xFF02, '3', u'"'),
(0xFF03, '3', u'#'),
(0xFF04, '3', u'$'),
(0xFF05, '3', u'%'),
(0xFF06, '3', u'&'),
(0xFF07, '3', u'\''),
(0xFF08, '3', u'('),
(0xFF09, '3', u')'),
(0xFF0A, '3', u'*'),
(0xFF0B, '3', u'+'),
(0xFF0C, '3', u','),
(0xFF0D, 'M', u'-'),
(0xFF0E, 'M', u'.'),
(0xFF0F, '3', u'/'),
(0xFF10, 'M', u'0'),
(0xFF11, 'M', u'1'),
(0xFF12, 'M', u'2'),
(0xFF13, 'M', u'3'),
(0xFF14, 'M', u'4'),
(0xFF15, 'M', u'5'),
(0xFF16, 'M', u'6'),
(0xFF17, 'M', u'7'),
(0xFF18, 'M', u'8'),
(0xFF19, 'M', u'9'),
(0xFF1A, '3', u':'),
(0xFF1B, '3', u';'),
(0xFF1C, '3', u'<'),
(0xFF1D, '3', u'='),
(0xFF1E, '3', u'>'),
(0xFF1F, '3', u'?'),
(0xFF20, '3', u'@'),
(0xFF21, 'M', u'a'),
(0xFF22, 'M', u'b'),
(0xFF23, 'M', u'c'),
]
def _seg_51():
return [
(0xFF24, 'M', u'd'),
(0xFF25, 'M', u'e'),
(0xFF26, 'M', u'f'),
(0xFF27, 'M', u'g'),
(0xFF28, 'M', u'h'),
(0xFF29, 'M', u'i'),
(0xFF2A, 'M', u'j'),
(0xFF2B, 'M', u'k'),
(0xFF2C, 'M', u'l'),
(0xFF2D, 'M', u'm'),
(0xFF2E, 'M', u'n'),
(0xFF2F, 'M', u'o'),
(0xFF30, 'M', u'p'),
(0xFF31, 'M', u'q'),
(0xFF32, 'M', u'r'),
(0xFF33, 'M', u's'),
(0xFF34, 'M', u't'),
(0xFF35, 'M', u'u'),
(0xFF36, 'M', u'v'),
(0xFF37, 'M', u'w'),
(0xFF38, 'M', u'x'),
(0xFF39, 'M', u'y'),
(0xFF3A, 'M', u'z'),
(0xFF3B, '3', u'['),
(0xFF3C, '3', u'\\'),
(0xFF3D, '3', u']'),
(0xFF3E, '3', u'^'),
(0xFF3F, '3', u'_'),
(0xFF40, '3', u'`'),
(0xFF41, 'M', u'a'),
(0xFF42, 'M', u'b'),
(0xFF43, 'M', u'c'),
(0xFF44, 'M', u'd'),
(0xFF45, 'M', u'e'),
(0xFF46, 'M', u'f'),
(0xFF47, 'M', u'g'),
(0xFF48, 'M', u'h'),
(0xFF49, 'M', u'i'),
(0xFF4A, 'M', u'j'),
(0xFF4B, 'M', u'k'),
(0xFF4C, 'M', u'l'),
(0xFF4D, 'M', u'm'),
(0xFF4E, 'M', u'n'),
(0xFF4F, 'M', u'o'),
(0xFF50, 'M', u'p'),
(0xFF51, 'M', u'q'),
(0xFF52, 'M', u'r'),
(0xFF53, 'M', u's'),
(0xFF54, 'M', u't'),
(0xFF55, 'M', u'u'),
(0xFF56, 'M', u'v'),
(0xFF57, 'M', u'w'),
(0xFF58, 'M', u'x'),
(0xFF59, 'M', u'y'),
(0xFF5A, 'M', u'z'),
(0xFF5B, '3', u'{'),
(0xFF5C, '3', u'|'),
(0xFF5D, '3', u'}'),
(0xFF5E, '3', u'~'),
(0xFF5F, 'M', u'⦅'),
(0xFF60, 'M', u'⦆'),
(0xFF61, 'M', u'.'),
(0xFF62, 'M', u'「'),
(0xFF63, 'M', u'」'),
(0xFF64, 'M', u'、'),
(0xFF65, 'M', u'・'),
(0xFF66, 'M', u'ヲ'),
(0xFF67, 'M', u'ァ'),
(0xFF68, 'M', u'ィ'),
(0xFF69, 'M', u'ゥ'),
(0xFF6A, 'M', u'ェ'),
(0xFF6B, 'M', u'ォ'),
(0xFF6C, 'M', u'ャ'),
(0xFF6D, 'M', u'ュ'),
(0xFF6E, 'M', u'ョ'),
(0xFF6F, 'M', u'ッ'),
(0xFF70, 'M', u'ー'),
(0xFF71, 'M', u'ア'),
(0xFF72, 'M', u'イ'),
(0xFF73, 'M', u'ウ'),
(0xFF74, 'M', u'エ'),
(0xFF75, 'M', u'オ'),
(0xFF76, 'M', u'カ'),
(0xFF77, 'M', u'キ'),
(0xFF78, 'M', u'ク'),
(0xFF79, 'M', u'ケ'),
(0xFF7A, 'M', u'コ'),
(0xFF7B, 'M', u'サ'),
(0xFF7C, 'M', u'シ'),
(0xFF7D, 'M', u'ス'),
(0xFF7E, 'M', u'セ'),
(0xFF7F, 'M', u'ソ'),
(0xFF80, 'M', u'タ'),
(0xFF81, 'M', u'チ'),
(0xFF82, 'M', u'ツ'),
(0xFF83, 'M', u'テ'),
(0xFF84, 'M', u'ト'),
(0xFF85, 'M', u'ナ'),
(0xFF86, 'M', u'ニ'),
(0xFF87, 'M', u'ヌ'),
]
def _seg_52():
return [
(0xFF88, 'M', u'ネ'),
(0xFF89, 'M', u'ノ'),
(0xFF8A, 'M', u'ハ'),
(0xFF8B, 'M', u'ヒ'),
(0xFF8C, 'M', u'フ'),
(0xFF8D, 'M', u'ヘ'),
(0xFF8E, 'M', u'ホ'),
(0xFF8F, 'M', u'マ'),
(0xFF90, 'M', u'ミ'),
(0xFF91, 'M', u'ム'),
(0xFF92, 'M', u'メ'),
(0xFF93, 'M', u'モ'),
(0xFF94, 'M', u'ヤ'),
(0xFF95, 'M', u'ユ'),
(0xFF96, 'M', u'ヨ'),
(0xFF97, 'M', u'ラ'),
(0xFF98, 'M', u'リ'),
(0xFF99, 'M', u'ル'),
(0xFF9A, 'M', u'レ'),
(0xFF9B, 'M', u'ロ'),
(0xFF9C, 'M', u'ワ'),
(0xFF9D, 'M', u'ン'),
(0xFF9E, 'M', u'゙'),
(0xFF9F, 'M', u'゚'),
(0xFFA0, 'X'),
(0xFFA1, 'M', u'ᄀ'),
(0xFFA2, 'M', u'ᄁ'),
(0xFFA3, 'M', u'ᆪ'),
(0xFFA4, 'M', u'ᄂ'),
(0xFFA5, 'M', u'ᆬ'),
(0xFFA6, 'M', u'ᆭ'),
(0xFFA7, 'M', u'ᄃ'),
(0xFFA8, 'M', u'ᄄ'),
(0xFFA9, 'M', u'ᄅ'),
(0xFFAA, 'M', u'ᆰ'),
(0xFFAB, 'M', u'ᆱ'),
(0xFFAC, 'M', u'ᆲ'),
(0xFFAD, 'M', u'ᆳ'),
(0xFFAE, 'M', u'ᆴ'),
(0xFFAF, 'M', u'ᆵ'),
(0xFFB0, 'M', u'ᄚ'),
(0xFFB1, 'M', u'ᄆ'),
(0xFFB2, 'M', u'ᄇ'),
(0xFFB3, 'M', u'ᄈ'),
(0xFFB4, 'M', u'ᄡ'),
(0xFFB5, 'M', u'ᄉ'),
(0xFFB6, 'M', u'ᄊ'),
(0xFFB7, 'M', u'ᄋ'),
(0xFFB8, 'M', u'ᄌ'),
(0xFFB9, 'M', u'ᄍ'),
(0xFFBA, 'M', u'ᄎ'),
(0xFFBB, 'M', u'ᄏ'),
(0xFFBC, 'M', u'ᄐ'),
(0xFFBD, 'M', u'ᄑ'),
(0xFFBE, 'M', u'ᄒ'),
(0xFFBF, 'X'),
(0xFFC2, 'M', u'ᅡ'),
(0xFFC3, 'M', u'ᅢ'),
(0xFFC4, 'M', u'ᅣ'),
(0xFFC5, 'M', u'ᅤ'),
(0xFFC6, 'M', u'ᅥ'),
(0xFFC7, 'M', u'ᅦ'),
(0xFFC8, 'X'),
(0xFFCA, 'M', u'ᅧ'),
(0xFFCB, 'M', u'ᅨ'),
(0xFFCC, 'M', u'ᅩ'),
(0xFFCD, 'M', u'ᅪ'),
(0xFFCE, 'M', u'ᅫ'),
(0xFFCF, 'M', u'ᅬ'),
(0xFFD0, 'X'),
(0xFFD2, 'M', u'ᅭ'),
(0xFFD3, 'M', u'ᅮ'),
(0xFFD4, 'M', u'ᅯ'),
(0xFFD5, 'M', u'ᅰ'),
(0xFFD6, 'M', u'ᅱ'),
(0xFFD7, 'M', u'ᅲ'),
(0xFFD8, 'X'),
(0xFFDA, 'M', u'ᅳ'),
(0xFFDB, 'M', u'ᅴ'),
(0xFFDC, 'M', u'ᅵ'),
(0xFFDD, 'X'),
(0xFFE0, 'M', u'¢'),
(0xFFE1, 'M', u'£'),
(0xFFE2, 'M', u'¬'),
(0xFFE3, '3', u' ̄'),
(0xFFE4, 'M', u'¦'),
(0xFFE5, 'M', u'¥'),
(0xFFE6, 'M', u'₩'),
(0xFFE7, 'X'),
(0xFFE8, 'M', u'│'),
(0xFFE9, 'M', u'←'),
(0xFFEA, 'M', u'↑'),
(0xFFEB, 'M', u'→'),
(0xFFEC, 'M', u'↓'),
(0xFFED, 'M', u'■'),
(0xFFEE, 'M', u'○'),
(0xFFEF, 'X'),
(0x10000, 'V'),
(0x1000C, 'X'),
(0x1000D, 'V'),
]
def _seg_53():
return [
(0x10027, 'X'),
(0x10028, 'V'),
(0x1003B, 'X'),
(0x1003C, 'V'),
(0x1003E, 'X'),
(0x1003F, 'V'),
(0x1004E, 'X'),
(0x10050, 'V'),
(0x1005E, 'X'),
(0x10080, 'V'),
(0x100FB, 'X'),
(0x10100, 'V'),
(0x10103, 'X'),
(0x10107, 'V'),
(0x10134, 'X'),
(0x10137, 'V'),
(0x1018F, 'X'),
(0x10190, 'V'),
(0x1019D, 'X'),
(0x101A0, 'V'),
(0x101A1, 'X'),
(0x101D0, 'V'),
(0x101FE, 'X'),
(0x10280, 'V'),
(0x1029D, 'X'),
(0x102A0, 'V'),
(0x102D1, 'X'),
(0x102E0, 'V'),
(0x102FC, 'X'),
(0x10300, 'V'),
(0x10324, 'X'),
(0x1032D, 'V'),
(0x1034B, 'X'),
(0x10350, 'V'),
(0x1037B, 'X'),
(0x10380, 'V'),
(0x1039E, 'X'),
(0x1039F, 'V'),
(0x103C4, 'X'),
(0x103C8, 'V'),
(0x103D6, 'X'),
(0x10400, 'M', u'𐐨'),
(0x10401, 'M', u'𐐩'),
(0x10402, 'M', u'𐐪'),
(0x10403, 'M', u'𐐫'),
(0x10404, 'M', u'𐐬'),
(0x10405, 'M', u'𐐭'),
(0x10406, 'M', u'𐐮'),
(0x10407, 'M', u'𐐯'),
(0x10408, 'M', u'𐐰'),
(0x10409, 'M', u'𐐱'),
(0x1040A, 'M', u'𐐲'),
(0x1040B, 'M', u'𐐳'),
(0x1040C, 'M', u'𐐴'),
(0x1040D, 'M', u'𐐵'),
(0x1040E, 'M', u'𐐶'),
(0x1040F, 'M', u'𐐷'),
(0x10410, 'M', u'𐐸'),
(0x10411, 'M', u'𐐹'),
(0x10412, 'M', u'𐐺'),
(0x10413, 'M', u'𐐻'),
(0x10414, 'M', u'𐐼'),
(0x10415, 'M', u'𐐽'),
(0x10416, 'M', u'𐐾'),
(0x10417, 'M', u'𐐿'),
(0x10418, 'M', u'𐑀'),
(0x10419, 'M', u'𐑁'),
(0x1041A, 'M', u'𐑂'),
(0x1041B, 'M', u'𐑃'),
(0x1041C, 'M', u'𐑄'),
(0x1041D, 'M', u'𐑅'),
(0x1041E, 'M', u'𐑆'),
(0x1041F, 'M', u'𐑇'),
(0x10420, 'M', u'𐑈'),
(0x10421, 'M', u'𐑉'),
(0x10422, 'M', u'𐑊'),
(0x10423, 'M', u'𐑋'),
(0x10424, 'M', u'𐑌'),
(0x10425, 'M', u'𐑍'),
(0x10426, 'M', u'𐑎'),
(0x10427, 'M', u'𐑏'),
(0x10428, 'V'),
(0x1049E, 'X'),
(0x104A0, 'V'),
(0x104AA, 'X'),
(0x104B0, 'M', u'𐓘'),
(0x104B1, 'M', u'𐓙'),
(0x104B2, 'M', u'𐓚'),
(0x104B3, 'M', u'𐓛'),
(0x104B4, 'M', u'𐓜'),
(0x104B5, 'M', u'𐓝'),
(0x104B6, 'M', u'𐓞'),
(0x104B7, 'M', u'𐓟'),
(0x104B8, 'M', u'𐓠'),
(0x104B9, 'M', u'𐓡'),
(0x104BA, 'M', u'𐓢'),
(0x104BB, 'M', u'𐓣'),
(0x104BC, 'M', u'𐓤'),
(0x104BD, 'M', u'𐓥'),
(0x104BE, 'M', u'𐓦'),
]
def _seg_54():
return [
(0x104BF, 'M', u'𐓧'),
(0x104C0, 'M', u'𐓨'),
(0x104C1, 'M', u'𐓩'),
(0x104C2, 'M', u'𐓪'),
(0x104C3, 'M', u'𐓫'),
(0x104C4, 'M', u'𐓬'),
(0x104C5, 'M', u'𐓭'),
(0x104C6, 'M', u'𐓮'),
(0x104C7, 'M', u'𐓯'),
(0x104C8, 'M', u'𐓰'),
(0x104C9, 'M', u'𐓱'),
(0x104CA, 'M', u'𐓲'),
(0x104CB, 'M', u'𐓳'),
(0x104CC, 'M', u'𐓴'),
(0x104CD, 'M', u'𐓵'),
(0x104CE, 'M', u'𐓶'),
(0x104CF, 'M', u'𐓷'),
(0x104D0, 'M', u'𐓸'),
(0x104D1, 'M', u'𐓹'),
(0x104D2, 'M', u'𐓺'),
(0x104D3, 'M', u'𐓻'),
(0x104D4, 'X'),
(0x104D8, 'V'),
(0x104FC, 'X'),
(0x10500, 'V'),
(0x10528, 'X'),
(0x10530, 'V'),
(0x10564, 'X'),
(0x1056F, 'V'),
(0x10570, 'X'),
(0x10600, 'V'),
(0x10737, 'X'),
(0x10740, 'V'),
(0x10756, 'X'),
(0x10760, 'V'),
(0x10768, 'X'),
(0x10800, 'V'),
(0x10806, 'X'),
(0x10808, 'V'),
(0x10809, 'X'),
(0x1080A, 'V'),
(0x10836, 'X'),
(0x10837, 'V'),
(0x10839, 'X'),
(0x1083C, 'V'),
(0x1083D, 'X'),
(0x1083F, 'V'),
(0x10856, 'X'),
(0x10857, 'V'),
(0x1089F, 'X'),
(0x108A7, 'V'),
(0x108B0, 'X'),
(0x108E0, 'V'),
(0x108F3, 'X'),
(0x108F4, 'V'),
(0x108F6, 'X'),
(0x108FB, 'V'),
(0x1091C, 'X'),
(0x1091F, 'V'),
(0x1093A, 'X'),
(0x1093F, 'V'),
(0x10940, 'X'),
(0x10980, 'V'),
(0x109B8, 'X'),
(0x109BC, 'V'),
(0x109D0, 'X'),
(0x109D2, 'V'),
(0x10A04, 'X'),
(0x10A05, 'V'),
(0x10A07, 'X'),
(0x10A0C, 'V'),
(0x10A14, 'X'),
(0x10A15, 'V'),
(0x10A18, 'X'),
(0x10A19, 'V'),
(0x10A36, 'X'),
(0x10A38, 'V'),
(0x10A3B, 'X'),
(0x10A3F, 'V'),
(0x10A49, 'X'),
(0x10A50, 'V'),
(0x10A59, 'X'),
(0x10A60, 'V'),
(0x10AA0, 'X'),
(0x10AC0, 'V'),
(0x10AE7, 'X'),
(0x10AEB, 'V'),
(0x10AF7, 'X'),
(0x10B00, 'V'),
(0x10B36, 'X'),
(0x10B39, 'V'),
(0x10B56, 'X'),
(0x10B58, 'V'),
(0x10B73, 'X'),
(0x10B78, 'V'),
(0x10B92, 'X'),
(0x10B99, 'V'),
(0x10B9D, 'X'),
(0x10BA9, 'V'),
(0x10BB0, 'X'),
]
def _seg_55():
return [
(0x10C00, 'V'),
(0x10C49, 'X'),
(0x10C80, 'M', u'𐳀'),
(0x10C81, 'M', u'𐳁'),
(0x10C82, 'M', u'𐳂'),
(0x10C83, 'M', u'𐳃'),
(0x10C84, 'M', u'𐳄'),
(0x10C85, 'M', u'𐳅'),
(0x10C86, 'M', u'𐳆'),
(0x10C87, 'M', u'𐳇'),
(0x10C88, 'M', u'𐳈'),
(0x10C89, 'M', u'𐳉'),
(0x10C8A, 'M', u'𐳊'),
(0x10C8B, 'M', u'𐳋'),
(0x10C8C, 'M', u'𐳌'),
(0x10C8D, 'M', u'𐳍'),
(0x10C8E, 'M', u'𐳎'),
(0x10C8F, 'M', u'𐳏'),
(0x10C90, 'M', u'𐳐'),
(0x10C91, 'M', u'𐳑'),
(0x10C92, 'M', u'𐳒'),
(0x10C93, 'M', u'𐳓'),
(0x10C94, 'M', u'𐳔'),
(0x10C95, 'M', u'𐳕'),
(0x10C96, 'M', u'𐳖'),
(0x10C97, 'M', u'𐳗'),
(0x10C98, 'M', u'𐳘'),
(0x10C99, 'M', u'𐳙'),
(0x10C9A, 'M', u'𐳚'),
(0x10C9B, 'M', u'𐳛'),
(0x10C9C, 'M', u'𐳜'),
(0x10C9D, 'M', u'𐳝'),
(0x10C9E, 'M', u'𐳞'),
(0x10C9F, 'M', u'𐳟'),
(0x10CA0, 'M', u'𐳠'),
(0x10CA1, 'M', u'𐳡'),
(0x10CA2, 'M', u'𐳢'),
(0x10CA3, 'M', u'𐳣'),
(0x10CA4, 'M', u'𐳤'),
(0x10CA5, 'M', u'𐳥'),
(0x10CA6, 'M', u'𐳦'),
(0x10CA7, 'M', u'𐳧'),
(0x10CA8, 'M', u'𐳨'),
(0x10CA9, 'M', u'𐳩'),
(0x10CAA, 'M', u'𐳪'),
(0x10CAB, 'M', u'𐳫'),
(0x10CAC, 'M', u'𐳬'),
(0x10CAD, 'M', u'𐳭'),
(0x10CAE, 'M', u'𐳮'),
(0x10CAF, 'M', u'𐳯'),
(0x10CB0, 'M', u'𐳰'),
(0x10CB1, 'M', u'𐳱'),
(0x10CB2, 'M', u'𐳲'),
(0x10CB3, 'X'),
(0x10CC0, 'V'),
(0x10CF3, 'X'),
(0x10CFA, 'V'),
(0x10D28, 'X'),
(0x10D30, 'V'),
(0x10D3A, 'X'),
(0x10E60, 'V'),
(0x10E7F, 'X'),
(0x10E80, 'V'),
(0x10EAA, 'X'),
(0x10EAB, 'V'),
(0x10EAE, 'X'),
(0x10EB0, 'V'),
(0x10EB2, 'X'),
(0x10F00, 'V'),
(0x10F28, 'X'),
(0x10F30, 'V'),
(0x10F5A, 'X'),
(0x10FB0, 'V'),
(0x10FCC, 'X'),
(0x10FE0, 'V'),
(0x10FF7, 'X'),
(0x11000, 'V'),
(0x1104E, 'X'),
(0x11052, 'V'),
(0x11070, 'X'),
(0x1107F, 'V'),
(0x110BD, 'X'),
(0x110BE, 'V'),
(0x110C2, 'X'),
(0x110D0, 'V'),
(0x110E9, 'X'),
(0x110F0, 'V'),
(0x110FA, 'X'),
(0x11100, 'V'),
(0x11135, 'X'),
(0x11136, 'V'),
(0x11148, 'X'),
(0x11150, 'V'),
(0x11177, 'X'),
(0x11180, 'V'),
(0x111E0, 'X'),
(0x111E1, 'V'),
(0x111F5, 'X'),
(0x11200, 'V'),
(0x11212, 'X'),
]
def _seg_56():
return [
(0x11213, 'V'),
(0x1123F, 'X'),
(0x11280, 'V'),
(0x11287, 'X'),
(0x11288, 'V'),
(0x11289, 'X'),
(0x1128A, 'V'),
(0x1128E, 'X'),
(0x1128F, 'V'),
(0x1129E, 'X'),
(0x1129F, 'V'),
(0x112AA, 'X'),
(0x112B0, 'V'),
(0x112EB, 'X'),
(0x112F0, 'V'),
(0x112FA, 'X'),
(0x11300, 'V'),
(0x11304, 'X'),
(0x11305, 'V'),
(0x1130D, 'X'),
(0x1130F, 'V'),
(0x11311, 'X'),
(0x11313, 'V'),
(0x11329, 'X'),
(0x1132A, 'V'),
(0x11331, 'X'),
(0x11332, 'V'),
(0x11334, 'X'),
(0x11335, 'V'),
(0x1133A, 'X'),
(0x1133B, 'V'),
(0x11345, 'X'),
(0x11347, 'V'),
(0x11349, 'X'),
(0x1134B, 'V'),
(0x1134E, 'X'),
(0x11350, 'V'),
(0x11351, 'X'),
(0x11357, 'V'),
(0x11358, 'X'),
(0x1135D, 'V'),
(0x11364, 'X'),
(0x11366, 'V'),
(0x1136D, 'X'),
(0x11370, 'V'),
(0x11375, 'X'),
(0x11400, 'V'),
(0x1145C, 'X'),
(0x1145D, 'V'),
(0x11462, 'X'),
(0x11480, 'V'),
(0x114C8, 'X'),
(0x114D0, 'V'),
(0x114DA, 'X'),
(0x11580, 'V'),
(0x115B6, 'X'),
(0x115B8, 'V'),
(0x115DE, 'X'),
(0x11600, 'V'),
(0x11645, 'X'),
(0x11650, 'V'),
(0x1165A, 'X'),
(0x11660, 'V'),
(0x1166D, 'X'),
(0x11680, 'V'),
(0x116B9, 'X'),
(0x116C0, 'V'),
(0x116CA, 'X'),
(0x11700, 'V'),
(0x1171B, 'X'),
(0x1171D, 'V'),
(0x1172C, 'X'),
(0x11730, 'V'),
(0x11740, 'X'),
(0x11800, 'V'),
(0x1183C, 'X'),
(0x118A0, 'M', u'𑣀'),
(0x118A1, 'M', u'𑣁'),
(0x118A2, 'M', u'𑣂'),
(0x118A3, 'M', u'𑣃'),
(0x118A4, 'M', u'𑣄'),
(0x118A5, 'M', u'𑣅'),
(0x118A6, 'M', u'𑣆'),
(0x118A7, 'M', u'𑣇'),
(0x118A8, 'M', u'𑣈'),
(0x118A9, 'M', u'𑣉'),
(0x118AA, 'M', u'𑣊'),
(0x118AB, 'M', u'𑣋'),
(0x118AC, 'M', u'𑣌'),
(0x118AD, 'M', u'𑣍'),
(0x118AE, 'M', u'𑣎'),
(0x118AF, 'M', u'𑣏'),
(0x118B0, 'M', u'𑣐'),
(0x118B1, 'M', u'𑣑'),
(0x118B2, 'M', u'𑣒'),
(0x118B3, 'M', u'𑣓'),
(0x118B4, 'M', u'𑣔'),
(0x118B5, 'M', u'𑣕'),
(0x118B6, 'M', u'𑣖'),
(0x118B7, 'M', u'𑣗'),
]
def _seg_57():
return [
(0x118B8, 'M', u'𑣘'),
(0x118B9, 'M', u'𑣙'),
(0x118BA, 'M', u'𑣚'),
(0x118BB, 'M', u'𑣛'),
(0x118BC, 'M', u'𑣜'),
(0x118BD, 'M', u'𑣝'),
(0x118BE, 'M', u'𑣞'),
(0x118BF, 'M', u'𑣟'),
(0x118C0, 'V'),
(0x118F3, 'X'),
(0x118FF, 'V'),
(0x11907, 'X'),
(0x11909, 'V'),
(0x1190A, 'X'),
(0x1190C, 'V'),
(0x11914, 'X'),
(0x11915, 'V'),
(0x11917, 'X'),
(0x11918, 'V'),
(0x11936, 'X'),
(0x11937, 'V'),
(0x11939, 'X'),
(0x1193B, 'V'),
(0x11947, 'X'),
(0x11950, 'V'),
(0x1195A, 'X'),
(0x119A0, 'V'),
(0x119A8, 'X'),
(0x119AA, 'V'),
(0x119D8, 'X'),
(0x119DA, 'V'),
(0x119E5, 'X'),
(0x11A00, 'V'),
(0x11A48, 'X'),
(0x11A50, 'V'),
(0x11AA3, 'X'),
(0x11AC0, 'V'),
(0x11AF9, 'X'),
(0x11C00, 'V'),
(0x11C09, 'X'),
(0x11C0A, 'V'),
(0x11C37, 'X'),
(0x11C38, 'V'),
(0x11C46, 'X'),
(0x11C50, 'V'),
(0x11C6D, 'X'),
(0x11C70, 'V'),
(0x11C90, 'X'),
(0x11C92, 'V'),
(0x11CA8, 'X'),
(0x11CA9, 'V'),
(0x11CB7, 'X'),
(0x11D00, 'V'),
(0x11D07, 'X'),
(0x11D08, 'V'),
(0x11D0A, 'X'),
(0x11D0B, 'V'),
(0x11D37, 'X'),
(0x11D3A, 'V'),
(0x11D3B, 'X'),
(0x11D3C, 'V'),
(0x11D3E, 'X'),
(0x11D3F, 'V'),
(0x11D48, 'X'),
(0x11D50, 'V'),
(0x11D5A, 'X'),
(0x11D60, 'V'),
(0x11D66, 'X'),
(0x11D67, 'V'),
(0x11D69, 'X'),
(0x11D6A, 'V'),
(0x11D8F, 'X'),
(0x11D90, 'V'),
(0x11D92, 'X'),
(0x11D93, 'V'),
(0x11D99, 'X'),
(0x11DA0, 'V'),
(0x11DAA, 'X'),
(0x11EE0, 'V'),
(0x11EF9, 'X'),
(0x11FB0, 'V'),
(0x11FB1, 'X'),
(0x11FC0, 'V'),
(0x11FF2, 'X'),
(0x11FFF, 'V'),
(0x1239A, 'X'),
(0x12400, 'V'),
(0x1246F, 'X'),
(0x12470, 'V'),
(0x12475, 'X'),
(0x12480, 'V'),
(0x12544, 'X'),
(0x13000, 'V'),
(0x1342F, 'X'),
(0x14400, 'V'),
(0x14647, 'X'),
(0x16800, 'V'),
(0x16A39, 'X'),
(0x16A40, 'V'),
(0x16A5F, 'X'),
]
def _seg_58():
return [
(0x16A60, 'V'),
(0x16A6A, 'X'),
(0x16A6E, 'V'),
(0x16A70, 'X'),
(0x16AD0, 'V'),
(0x16AEE, 'X'),
(0x16AF0, 'V'),
(0x16AF6, 'X'),
(0x16B00, 'V'),
(0x16B46, 'X'),
(0x16B50, 'V'),
(0x16B5A, 'X'),
(0x16B5B, 'V'),
(0x16B62, 'X'),
(0x16B63, 'V'),
(0x16B78, 'X'),
(0x16B7D, 'V'),
(0x16B90, 'X'),
(0x16E40, 'M', u'𖹠'),
(0x16E41, 'M', u'𖹡'),
(0x16E42, 'M', u'𖹢'),
(0x16E43, 'M', u'𖹣'),
(0x16E44, 'M', u'𖹤'),
(0x16E45, 'M', u'𖹥'),
(0x16E46, 'M', u'𖹦'),
(0x16E47, 'M', u'𖹧'),
(0x16E48, 'M', u'𖹨'),
(0x16E49, 'M', u'𖹩'),
(0x16E4A, 'M', u'𖹪'),
(0x16E4B, 'M', u'𖹫'),
(0x16E4C, 'M', u'𖹬'),
(0x16E4D, 'M', u'𖹭'),
(0x16E4E, 'M', u'𖹮'),
(0x16E4F, 'M', u'𖹯'),
(0x16E50, 'M', u'𖹰'),
(0x16E51, 'M', u'𖹱'),
(0x16E52, 'M', u'𖹲'),
(0x16E53, 'M', u'𖹳'),
(0x16E54, 'M', u'𖹴'),
(0x16E55, 'M', u'𖹵'),
(0x16E56, 'M', u'𖹶'),
(0x16E57, 'M', u'𖹷'),
(0x16E58, 'M', u'𖹸'),
(0x16E59, 'M', u'𖹹'),
(0x16E5A, 'M', u'𖹺'),
(0x16E5B, 'M', u'𖹻'),
(0x16E5C, 'M', u'𖹼'),
(0x16E5D, 'M', u'𖹽'),
(0x16E5E, 'M', u'𖹾'),
(0x16E5F, 'M', u'𖹿'),
(0x16E60, 'V'),
(0x16E9B, 'X'),
(0x16F00, 'V'),
(0x16F4B, 'X'),
(0x16F4F, 'V'),
(0x16F88, 'X'),
(0x16F8F, 'V'),
(0x16FA0, 'X'),
(0x16FE0, 'V'),
(0x16FE5, 'X'),
(0x16FF0, 'V'),
(0x16FF2, 'X'),
(0x17000, 'V'),
(0x187F8, 'X'),
(0x18800, 'V'),
(0x18CD6, 'X'),
(0x18D00, 'V'),
(0x18D09, 'X'),
(0x1B000, 'V'),
(0x1B11F, 'X'),
(0x1B150, 'V'),
(0x1B153, 'X'),
(0x1B164, 'V'),
(0x1B168, 'X'),
(0x1B170, 'V'),
(0x1B2FC, 'X'),
(0x1BC00, 'V'),
(0x1BC6B, 'X'),
(0x1BC70, 'V'),
(0x1BC7D, 'X'),
(0x1BC80, 'V'),
(0x1BC89, 'X'),
(0x1BC90, 'V'),
(0x1BC9A, 'X'),
(0x1BC9C, 'V'),
(0x1BCA0, 'I'),
(0x1BCA4, 'X'),
(0x1D000, 'V'),
(0x1D0F6, 'X'),
(0x1D100, 'V'),
(0x1D127, 'X'),
(0x1D129, 'V'),
(0x1D15E, 'M', u'𝅗𝅥'),
(0x1D15F, 'M', u'𝅘𝅥'),
(0x1D160, 'M', u'𝅘𝅥𝅮'),
(0x1D161, 'M', u'𝅘𝅥𝅯'),
(0x1D162, 'M', u'𝅘𝅥𝅰'),
(0x1D163, 'M', u'𝅘𝅥𝅱'),
(0x1D164, 'M', u'𝅘𝅥𝅲'),
(0x1D165, 'V'),
]
def _seg_59():
return [
(0x1D173, 'X'),
(0x1D17B, 'V'),
(0x1D1BB, 'M', u'𝆹𝅥'),
(0x1D1BC, 'M', u'𝆺𝅥'),
(0x1D1BD, 'M', u'𝆹𝅥𝅮'),
(0x1D1BE, 'M', u'𝆺𝅥𝅮'),
(0x1D1BF, 'M', u'𝆹𝅥𝅯'),
(0x1D1C0, 'M', u'𝆺𝅥𝅯'),
(0x1D1C1, 'V'),
(0x1D1E9, 'X'),
(0x1D200, 'V'),
(0x1D246, 'X'),
(0x1D2E0, 'V'),
(0x1D2F4, 'X'),
(0x1D300, 'V'),
(0x1D357, 'X'),
(0x1D360, 'V'),
(0x1D379, 'X'),
(0x1D400, 'M', u'a'),
(0x1D401, 'M', u'b'),
(0x1D402, 'M', u'c'),
(0x1D403, 'M', u'd'),
(0x1D404, 'M', u'e'),
(0x1D405, 'M', u'f'),
(0x1D406, 'M', u'g'),
(0x1D407, 'M', u'h'),
(0x1D408, 'M', u'i'),
(0x1D409, 'M', u'j'),
(0x1D40A, 'M', u'k'),
(0x1D40B, 'M', u'l'),
(0x1D40C, 'M', u'm'),
(0x1D40D, 'M', u'n'),
(0x1D40E, 'M', u'o'),
(0x1D40F, 'M', u'p'),
(0x1D410, 'M', u'q'),
(0x1D411, 'M', u'r'),
(0x1D412, 'M', u's'),
(0x1D413, 'M', u't'),
(0x1D414, 'M', u'u'),
(0x1D415, 'M', u'v'),
(0x1D416, 'M', u'w'),
(0x1D417, 'M', u'x'),
(0x1D418, 'M', u'y'),
(0x1D419, 'M', u'z'),
(0x1D41A, 'M', u'a'),
(0x1D41B, 'M', u'b'),
(0x1D41C, 'M', u'c'),
(0x1D41D, 'M', u'd'),
(0x1D41E, 'M', u'e'),
(0x1D41F, 'M', u'f'),
(0x1D420, 'M', u'g'),
(0x1D421, 'M', u'h'),
(0x1D422, 'M', u'i'),
(0x1D423, 'M', u'j'),
(0x1D424, 'M', u'k'),
(0x1D425, 'M', u'l'),
(0x1D426, 'M', u'm'),
(0x1D427, 'M', u'n'),
(0x1D428, 'M', u'o'),
(0x1D429, 'M', u'p'),
(0x1D42A, 'M', u'q'),
(0x1D42B, 'M', u'r'),
(0x1D42C, 'M', u's'),
(0x1D42D, 'M', u't'),
(0x1D42E, 'M', u'u'),
(0x1D42F, 'M', u'v'),
(0x1D430, 'M', u'w'),
(0x1D431, 'M', u'x'),
(0x1D432, 'M', u'y'),
(0x1D433, 'M', u'z'),
(0x1D434, 'M', u'a'),
(0x1D435, 'M', u'b'),
(0x1D436, 'M', u'c'),
(0x1D437, 'M', u'd'),
(0x1D438, 'M', u'e'),
(0x1D439, 'M', u'f'),
(0x1D43A, 'M', u'g'),
(0x1D43B, 'M', u'h'),
(0x1D43C, 'M', u'i'),
(0x1D43D, 'M', u'j'),
(0x1D43E, 'M', u'k'),
(0x1D43F, 'M', u'l'),
(0x1D440, 'M', u'm'),
(0x1D441, 'M', u'n'),
(0x1D442, 'M', u'o'),
(0x1D443, 'M', u'p'),
(0x1D444, 'M', u'q'),
(0x1D445, 'M', u'r'),
(0x1D446, 'M', u's'),
(0x1D447, 'M', u't'),
(0x1D448, 'M', u'u'),
(0x1D449, 'M', u'v'),
(0x1D44A, 'M', u'w'),
(0x1D44B, 'M', u'x'),
(0x1D44C, 'M', u'y'),
(0x1D44D, 'M', u'z'),
(0x1D44E, 'M', u'a'),
(0x1D44F, 'M', u'b'),
(0x1D450, 'M', u'c'),
(0x1D451, 'M', u'd'),
]
def _seg_60():
return [
(0x1D452, 'M', u'e'),
(0x1D453, 'M', u'f'),
(0x1D454, 'M', u'g'),
(0x1D455, 'X'),
(0x1D456, 'M', u'i'),
(0x1D457, 'M', u'j'),
(0x1D458, 'M', u'k'),
(0x1D459, 'M', u'l'),
(0x1D45A, 'M', u'm'),
(0x1D45B, 'M', u'n'),
(0x1D45C, 'M', u'o'),
(0x1D45D, 'M', u'p'),
(0x1D45E, 'M', u'q'),
(0x1D45F, 'M', u'r'),
(0x1D460, 'M', u's'),
(0x1D461, 'M', u't'),
(0x1D462, 'M', u'u'),
(0x1D463, 'M', u'v'),
(0x1D464, 'M', u'w'),
(0x1D465, 'M', u'x'),
(0x1D466, 'M', u'y'),
(0x1D467, 'M', u'z'),
(0x1D468, 'M', u'a'),
(0x1D469, 'M', u'b'),
(0x1D46A, 'M', u'c'),
(0x1D46B, 'M', u'd'),
(0x1D46C, 'M', u'e'),
(0x1D46D, 'M', u'f'),
(0x1D46E, 'M', u'g'),
(0x1D46F, 'M', u'h'),
(0x1D470, 'M', u'i'),
(0x1D471, 'M', u'j'),
(0x1D472, 'M', u'k'),
(0x1D473, 'M', u'l'),
(0x1D474, 'M', u'm'),
(0x1D475, 'M', u'n'),
(0x1D476, 'M', u'o'),
(0x1D477, 'M', u'p'),
(0x1D478, 'M', u'q'),
(0x1D479, 'M', u'r'),
(0x1D47A, 'M', u's'),
(0x1D47B, 'M', u't'),
(0x1D47C, 'M', u'u'),
(0x1D47D, 'M', u'v'),
(0x1D47E, 'M', u'w'),
(0x1D47F, 'M', u'x'),
(0x1D480, 'M', u'y'),
(0x1D481, 'M', u'z'),
(0x1D482, 'M', u'a'),
(0x1D483, 'M', u'b'),
(0x1D484, 'M', u'c'),
(0x1D485, 'M', u'd'),
(0x1D486, 'M', u'e'),
(0x1D487, 'M', u'f'),
(0x1D488, 'M', u'g'),
(0x1D489, 'M', u'h'),
(0x1D48A, 'M', u'i'),
(0x1D48B, 'M', u'j'),
(0x1D48C, 'M', u'k'),
(0x1D48D, 'M', u'l'),
(0x1D48E, 'M', u'm'),
(0x1D48F, 'M', u'n'),
(0x1D490, 'M', u'o'),
(0x1D491, 'M', u'p'),
(0x1D492, 'M', u'q'),
(0x1D493, 'M', u'r'),
(0x1D494, 'M', u's'),
(0x1D495, 'M', u't'),
(0x1D496, 'M', u'u'),
(0x1D497, 'M', u'v'),
(0x1D498, 'M', u'w'),
(0x1D499, 'M', u'x'),
(0x1D49A, 'M', u'y'),
(0x1D49B, 'M', u'z'),
(0x1D49C, 'M', u'a'),
(0x1D49D, 'X'),
(0x1D49E, 'M', u'c'),
(0x1D49F, 'M', u'd'),
(0x1D4A0, 'X'),
(0x1D4A2, 'M', u'g'),
(0x1D4A3, 'X'),
(0x1D4A5, 'M', u'j'),
(0x1D4A6, 'M', u'k'),
(0x1D4A7, 'X'),
(0x1D4A9, 'M', u'n'),
(0x1D4AA, 'M', u'o'),
(0x1D4AB, 'M', u'p'),
(0x1D4AC, 'M', u'q'),
(0x1D4AD, 'X'),
(0x1D4AE, 'M', u's'),
(0x1D4AF, 'M', u't'),
(0x1D4B0, 'M', u'u'),
(0x1D4B1, 'M', u'v'),
(0x1D4B2, 'M', u'w'),
(0x1D4B3, 'M', u'x'),
(0x1D4B4, 'M', u'y'),
(0x1D4B5, 'M', u'z'),
(0x1D4B6, 'M', u'a'),
(0x1D4B7, 'M', u'b'),
(0x1D4B8, 'M', u'c'),
]
def _seg_61():
return [
(0x1D4B9, 'M', u'd'),
(0x1D4BA, 'X'),
(0x1D4BB, 'M', u'f'),
(0x1D4BC, 'X'),
(0x1D4BD, 'M', u'h'),
(0x1D4BE, 'M', u'i'),
(0x1D4BF, 'M', u'j'),
(0x1D4C0, 'M', u'k'),
(0x1D4C1, 'M', u'l'),
(0x1D4C2, 'M', u'm'),
(0x1D4C3, 'M', u'n'),
(0x1D4C4, 'X'),
(0x1D4C5, 'M', u'p'),
(0x1D4C6, 'M', u'q'),
(0x1D4C7, 'M', u'r'),
(0x1D4C8, 'M', u's'),
(0x1D4C9, 'M', u't'),
(0x1D4CA, 'M', u'u'),
(0x1D4CB, 'M', u'v'),
(0x1D4CC, 'M', u'w'),
(0x1D4CD, 'M', u'x'),
(0x1D4CE, 'M', u'y'),
(0x1D4CF, 'M', u'z'),
(0x1D4D0, 'M', u'a'),
(0x1D4D1, 'M', u'b'),
(0x1D4D2, 'M', u'c'),
(0x1D4D3, 'M', u'd'),
(0x1D4D4, 'M', u'e'),
(0x1D4D5, 'M', u'f'),
(0x1D4D6, 'M', u'g'),
(0x1D4D7, 'M', u'h'),
(0x1D4D8, 'M', u'i'),
(0x1D4D9, 'M', u'j'),
(0x1D4DA, 'M', u'k'),
(0x1D4DB, 'M', u'l'),
(0x1D4DC, 'M', u'm'),
(0x1D4DD, 'M', u'n'),
(0x1D4DE, 'M', u'o'),
(0x1D4DF, 'M', u'p'),
(0x1D4E0, 'M', u'q'),
(0x1D4E1, 'M', u'r'),
(0x1D4E2, 'M', u's'),
(0x1D4E3, 'M', u't'),
(0x1D4E4, 'M', u'u'),
(0x1D4E5, 'M', u'v'),
(0x1D4E6, 'M', u'w'),
(0x1D4E7, 'M', u'x'),
(0x1D4E8, 'M', u'y'),
(0x1D4E9, 'M', u'z'),
(0x1D4EA, 'M', u'a'),
(0x1D4EB, 'M', u'b'),
(0x1D4EC, 'M', u'c'),
(0x1D4ED, 'M', u'd'),
(0x1D4EE, 'M', u'e'),
(0x1D4EF, 'M', u'f'),
(0x1D4F0, 'M', u'g'),
(0x1D4F1, 'M', u'h'),
(0x1D4F2, 'M', u'i'),
(0x1D4F3, 'M', u'j'),
(0x1D4F4, 'M', u'k'),
(0x1D4F5, 'M', u'l'),
(0x1D4F6, 'M', u'm'),
(0x1D4F7, 'M', u'n'),
(0x1D4F8, 'M', u'o'),
(0x1D4F9, 'M', u'p'),
(0x1D4FA, 'M', u'q'),
(0x1D4FB, 'M', u'r'),
(0x1D4FC, 'M', u's'),
(0x1D4FD, 'M', u't'),
(0x1D4FE, 'M', u'u'),
(0x1D4FF, 'M', u'v'),
(0x1D500, 'M', u'w'),
(0x1D501, 'M', u'x'),
(0x1D502, 'M', u'y'),
(0x1D503, 'M', u'z'),
(0x1D504, 'M', u'a'),
(0x1D505, 'M', u'b'),
(0x1D506, 'X'),
(0x1D507, 'M', u'd'),
(0x1D508, 'M', u'e'),
(0x1D509, 'M', u'f'),
(0x1D50A, 'M', u'g'),
(0x1D50B, 'X'),
(0x1D50D, 'M', u'j'),
(0x1D50E, 'M', u'k'),
(0x1D50F, 'M', u'l'),
(0x1D510, 'M', u'm'),
(0x1D511, 'M', u'n'),
(0x1D512, 'M', u'o'),
(0x1D513, 'M', u'p'),
(0x1D514, 'M', u'q'),
(0x1D515, 'X'),
(0x1D516, 'M', u's'),
(0x1D517, 'M', u't'),
(0x1D518, 'M', u'u'),
(0x1D519, 'M', u'v'),
(0x1D51A, 'M', u'w'),
(0x1D51B, 'M', u'x'),
(0x1D51C, 'M', u'y'),
(0x1D51D, 'X'),
]
def _seg_62():
return [
(0x1D51E, 'M', u'a'),
(0x1D51F, 'M', u'b'),
(0x1D520, 'M', u'c'),
(0x1D521, 'M', u'd'),
(0x1D522, 'M', u'e'),
(0x1D523, 'M', u'f'),
(0x1D524, 'M', u'g'),
(0x1D525, 'M', u'h'),
(0x1D526, 'M', u'i'),
(0x1D527, 'M', u'j'),
(0x1D528, 'M', u'k'),
(0x1D529, 'M', u'l'),
(0x1D52A, 'M', u'm'),
(0x1D52B, 'M', u'n'),
(0x1D52C, 'M', u'o'),
(0x1D52D, 'M', u'p'),
(0x1D52E, 'M', u'q'),
(0x1D52F, 'M', u'r'),
(0x1D530, 'M', u's'),
(0x1D531, 'M', u't'),
(0x1D532, 'M', u'u'),
(0x1D533, 'M', u'v'),
(0x1D534, 'M', u'w'),
(0x1D535, 'M', u'x'),
(0x1D536, 'M', u'y'),
(0x1D537, 'M', u'z'),
(0x1D538, 'M', u'a'),
(0x1D539, 'M', u'b'),
(0x1D53A, 'X'),
(0x1D53B, 'M', u'd'),
(0x1D53C, 'M', u'e'),
(0x1D53D, 'M', u'f'),
(0x1D53E, 'M', u'g'),
(0x1D53F, 'X'),
(0x1D540, 'M', u'i'),
(0x1D541, 'M', u'j'),
(0x1D542, 'M', u'k'),
(0x1D543, 'M', u'l'),
(0x1D544, 'M', u'm'),
(0x1D545, 'X'),
(0x1D546, 'M', u'o'),
(0x1D547, 'X'),
(0x1D54A, 'M', u's'),
(0x1D54B, 'M', u't'),
(0x1D54C, 'M', u'u'),
(0x1D54D, 'M', u'v'),
(0x1D54E, 'M', u'w'),
(0x1D54F, 'M', u'x'),
(0x1D550, 'M', u'y'),
(0x1D551, 'X'),
(0x1D552, 'M', u'a'),
(0x1D553, 'M', u'b'),
(0x1D554, 'M', u'c'),
(0x1D555, 'M', u'd'),
(0x1D556, 'M', u'e'),
(0x1D557, 'M', u'f'),
(0x1D558, 'M', u'g'),
(0x1D559, 'M', u'h'),
(0x1D55A, 'M', u'i'),
(0x1D55B, 'M', u'j'),
(0x1D55C, 'M', u'k'),
(0x1D55D, 'M', u'l'),
(0x1D55E, 'M', u'm'),
(0x1D55F, 'M', u'n'),
(0x1D560, 'M', u'o'),
(0x1D561, 'M', u'p'),
(0x1D562, 'M', u'q'),
(0x1D563, 'M', u'r'),
(0x1D564, 'M', u's'),
(0x1D565, 'M', u't'),
(0x1D566, 'M', u'u'),
(0x1D567, 'M', u'v'),
(0x1D568, 'M', u'w'),
(0x1D569, 'M', u'x'),
(0x1D56A, 'M', u'y'),
(0x1D56B, 'M', u'z'),
(0x1D56C, 'M', u'a'),
(0x1D56D, 'M', u'b'),
(0x1D56E, 'M', u'c'),
(0x1D56F, 'M', u'd'),
(0x1D570, 'M', u'e'),
(0x1D571, 'M', u'f'),
(0x1D572, 'M', u'g'),
(0x1D573, 'M', u'h'),
(0x1D574, 'M', u'i'),
(0x1D575, 'M', u'j'),
(0x1D576, 'M', u'k'),
(0x1D577, 'M', u'l'),
(0x1D578, 'M', u'm'),
(0x1D579, 'M', u'n'),
(0x1D57A, 'M', u'o'),
(0x1D57B, 'M', u'p'),
(0x1D57C, 'M', u'q'),
(0x1D57D, 'M', u'r'),
(0x1D57E, 'M', u's'),
(0x1D57F, 'M', u't'),
(0x1D580, 'M', u'u'),
(0x1D581, 'M', u'v'),
(0x1D582, 'M', u'w'),
(0x1D583, 'M', u'x'),
]
def _seg_63():
return [
(0x1D584, 'M', u'y'),
(0x1D585, 'M', u'z'),
(0x1D586, 'M', u'a'),
(0x1D587, 'M', u'b'),
(0x1D588, 'M', u'c'),
(0x1D589, 'M', u'd'),
(0x1D58A, 'M', u'e'),
(0x1D58B, 'M', u'f'),
(0x1D58C, 'M', u'g'),
(0x1D58D, 'M', u'h'),
(0x1D58E, 'M', u'i'),
(0x1D58F, 'M', u'j'),
(0x1D590, 'M', u'k'),
(0x1D591, 'M', u'l'),
(0x1D592, 'M', u'm'),
(0x1D593, 'M', u'n'),
(0x1D594, 'M', u'o'),
(0x1D595, 'M', u'p'),
(0x1D596, 'M', u'q'),
(0x1D597, 'M', u'r'),
(0x1D598, 'M', u's'),
(0x1D599, 'M', u't'),
(0x1D59A, 'M', u'u'),
(0x1D59B, 'M', u'v'),
(0x1D59C, 'M', u'w'),
(0x1D59D, 'M', u'x'),
(0x1D59E, 'M', u'y'),
(0x1D59F, 'M', u'z'),
(0x1D5A0, 'M', u'a'),
(0x1D5A1, 'M', u'b'),
(0x1D5A2, 'M', u'c'),
(0x1D5A3, 'M', u'd'),
(0x1D5A4, 'M', u'e'),
(0x1D5A5, 'M', u'f'),
(0x1D5A6, 'M', u'g'),
(0x1D5A7, 'M', u'h'),
(0x1D5A8, 'M', u'i'),
(0x1D5A9, 'M', u'j'),
(0x1D5AA, 'M', u'k'),
(0x1D5AB, 'M', u'l'),
(0x1D5AC, 'M', u'm'),
(0x1D5AD, 'M', u'n'),
(0x1D5AE, 'M', u'o'),
(0x1D5AF, 'M', u'p'),
(0x1D5B0, 'M', u'q'),
(0x1D5B1, 'M', u'r'),
(0x1D5B2, 'M', u's'),
(0x1D5B3, 'M', u't'),
(0x1D5B4, 'M', u'u'),
(0x1D5B5, 'M', u'v'),
(0x1D5B6, 'M', u'w'),
(0x1D5B7, 'M', u'x'),
(0x1D5B8, 'M', u'y'),
(0x1D5B9, 'M', u'z'),
(0x1D5BA, 'M', u'a'),
(0x1D5BB, 'M', u'b'),
(0x1D5BC, 'M', u'c'),
(0x1D5BD, 'M', u'd'),
(0x1D5BE, 'M', u'e'),
(0x1D5BF, 'M', u'f'),
(0x1D5C0, 'M', u'g'),
(0x1D5C1, 'M', u'h'),
(0x1D5C2, 'M', u'i'),
(0x1D5C3, 'M', u'j'),
(0x1D5C4, 'M', u'k'),
(0x1D5C5, 'M', u'l'),
(0x1D5C6, 'M', u'm'),
(0x1D5C7, 'M', u'n'),
(0x1D5C8, 'M', u'o'),
(0x1D5C9, 'M', u'p'),
(0x1D5CA, 'M', u'q'),
(0x1D5CB, 'M', u'r'),
(0x1D5CC, 'M', u's'),
(0x1D5CD, 'M', u't'),
(0x1D5CE, 'M', u'u'),
(0x1D5CF, 'M', u'v'),
(0x1D5D0, 'M', u'w'),
(0x1D5D1, 'M', u'x'),
(0x1D5D2, 'M', u'y'),
(0x1D5D3, 'M', u'z'),
(0x1D5D4, 'M', u'a'),
(0x1D5D5, 'M', u'b'),
(0x1D5D6, 'M', u'c'),
(0x1D5D7, 'M', u'd'),
(0x1D5D8, 'M', u'e'),
(0x1D5D9, 'M', u'f'),
(0x1D5DA, 'M', u'g'),
(0x1D5DB, 'M', u'h'),
(0x1D5DC, 'M', u'i'),
(0x1D5DD, 'M', u'j'),
(0x1D5DE, 'M', u'k'),
(0x1D5DF, 'M', u'l'),
(0x1D5E0, 'M', u'm'),
(0x1D5E1, 'M', u'n'),
(0x1D5E2, 'M', u'o'),
(0x1D5E3, 'M', u'p'),
(0x1D5E4, 'M', u'q'),
(0x1D5E5, 'M', u'r'),
(0x1D5E6, 'M', u's'),
(0x1D5E7, 'M', u't'),
]
def _seg_64():
return [
(0x1D5E8, 'M', u'u'),
(0x1D5E9, 'M', u'v'),
(0x1D5EA, 'M', u'w'),
(0x1D5EB, 'M', u'x'),
(0x1D5EC, 'M', u'y'),
(0x1D5ED, 'M', u'z'),
(0x1D5EE, 'M', u'a'),
(0x1D5EF, 'M', u'b'),
(0x1D5F0, 'M', u'c'),
(0x1D5F1, 'M', u'd'),
(0x1D5F2, 'M', u'e'),
(0x1D5F3, 'M', u'f'),
(0x1D5F4, 'M', u'g'),
(0x1D5F5, 'M', u'h'),
(0x1D5F6, 'M', u'i'),
(0x1D5F7, 'M', u'j'),
(0x1D5F8, 'M', u'k'),
(0x1D5F9, 'M', u'l'),
(0x1D5FA, 'M', u'm'),
(0x1D5FB, 'M', u'n'),
(0x1D5FC, 'M', u'o'),
(0x1D5FD, 'M', u'p'),
(0x1D5FE, 'M', u'q'),
(0x1D5FF, 'M', u'r'),
(0x1D600, 'M', u's'),
(0x1D601, 'M', u't'),
(0x1D602, 'M', u'u'),
(0x1D603, 'M', u'v'),
(0x1D604, 'M', u'w'),
(0x1D605, 'M', u'x'),
(0x1D606, 'M', u'y'),
(0x1D607, 'M', u'z'),
(0x1D608, 'M', u'a'),
(0x1D609, 'M', u'b'),
(0x1D60A, 'M', u'c'),
(0x1D60B, 'M', u'd'),
(0x1D60C, 'M', u'e'),
(0x1D60D, 'M', u'f'),
(0x1D60E, 'M', u'g'),
(0x1D60F, 'M', u'h'),
(0x1D610, 'M', u'i'),
(0x1D611, 'M', u'j'),
(0x1D612, 'M', u'k'),
(0x1D613, 'M', u'l'),
(0x1D614, 'M', u'm'),
(0x1D615, 'M', u'n'),
(0x1D616, 'M', u'o'),
(0x1D617, 'M', u'p'),
(0x1D618, 'M', u'q'),
(0x1D619, 'M', u'r'),
(0x1D61A, 'M', u's'),
(0x1D61B, 'M', u't'),
(0x1D61C, 'M', u'u'),
(0x1D61D, 'M', u'v'),
(0x1D61E, 'M', u'w'),
(0x1D61F, 'M', u'x'),
(0x1D620, 'M', u'y'),
(0x1D621, 'M', u'z'),
(0x1D622, 'M', u'a'),
(0x1D623, 'M', u'b'),
(0x1D624, 'M', u'c'),
(0x1D625, 'M', u'd'),
(0x1D626, 'M', u'e'),
(0x1D627, 'M', u'f'),
(0x1D628, 'M', u'g'),
(0x1D629, 'M', u'h'),
(0x1D62A, 'M', u'i'),
(0x1D62B, 'M', u'j'),
(0x1D62C, 'M', u'k'),
(0x1D62D, 'M', u'l'),
(0x1D62E, 'M', u'm'),
(0x1D62F, 'M', u'n'),
(0x1D630, 'M', u'o'),
(0x1D631, 'M', u'p'),
(0x1D632, 'M', u'q'),
(0x1D633, 'M', u'r'),
(0x1D634, 'M', u's'),
(0x1D635, 'M', u't'),
(0x1D636, 'M', u'u'),
(0x1D637, 'M', u'v'),
(0x1D638, 'M', u'w'),
(0x1D639, 'M', u'x'),
(0x1D63A, 'M', u'y'),
(0x1D63B, 'M', u'z'),
(0x1D63C, 'M', u'a'),
(0x1D63D, 'M', u'b'),
(0x1D63E, 'M', u'c'),
(0x1D63F, 'M', u'd'),
(0x1D640, 'M', u'e'),
(0x1D641, 'M', u'f'),
(0x1D642, 'M', u'g'),
(0x1D643, 'M', u'h'),
(0x1D644, 'M', u'i'),
(0x1D645, 'M', u'j'),
(0x1D646, 'M', u'k'),
(0x1D647, 'M', u'l'),
(0x1D648, 'M', u'm'),
(0x1D649, 'M', u'n'),
(0x1D64A, 'M', u'o'),
(0x1D64B, 'M', u'p'),
]
def _seg_65():
return [
(0x1D64C, 'M', u'q'),
(0x1D64D, 'M', u'r'),
(0x1D64E, 'M', u's'),
(0x1D64F, 'M', u't'),
(0x1D650, 'M', u'u'),
(0x1D651, 'M', u'v'),
(0x1D652, 'M', u'w'),
(0x1D653, 'M', u'x'),
(0x1D654, 'M', u'y'),
(0x1D655, 'M', u'z'),
(0x1D656, 'M', u'a'),
(0x1D657, 'M', u'b'),
(0x1D658, 'M', u'c'),
(0x1D659, 'M', u'd'),
(0x1D65A, 'M', u'e'),
(0x1D65B, 'M', u'f'),
(0x1D65C, 'M', u'g'),
(0x1D65D, 'M', u'h'),
(0x1D65E, 'M', u'i'),
(0x1D65F, 'M', u'j'),
(0x1D660, 'M', u'k'),
(0x1D661, 'M', u'l'),
(0x1D662, 'M', u'm'),
(0x1D663, 'M', u'n'),
(0x1D664, 'M', u'o'),
(0x1D665, 'M', u'p'),
(0x1D666, 'M', u'q'),
(0x1D667, 'M', u'r'),
(0x1D668, 'M', u's'),
(0x1D669, 'M', u't'),
(0x1D66A, 'M', u'u'),
(0x1D66B, 'M', u'v'),
(0x1D66C, 'M', u'w'),
(0x1D66D, 'M', u'x'),
(0x1D66E, 'M', u'y'),
(0x1D66F, 'M', u'z'),
(0x1D670, 'M', u'a'),
(0x1D671, 'M', u'b'),
(0x1D672, 'M', u'c'),
(0x1D673, 'M', u'd'),
(0x1D674, 'M', u'e'),
(0x1D675, 'M', u'f'),
(0x1D676, 'M', u'g'),
(0x1D677, 'M', u'h'),
(0x1D678, 'M', u'i'),
(0x1D679, 'M', u'j'),
(0x1D67A, 'M', u'k'),
(0x1D67B, 'M', u'l'),
(0x1D67C, 'M', u'm'),
(0x1D67D, 'M', u'n'),
(0x1D67E, 'M', u'o'),
(0x1D67F, 'M', u'p'),
(0x1D680, 'M', u'q'),
(0x1D681, 'M', u'r'),
(0x1D682, 'M', u's'),
(0x1D683, 'M', u't'),
(0x1D684, 'M', u'u'),
(0x1D685, 'M', u'v'),
(0x1D686, 'M', u'w'),
(0x1D687, 'M', u'x'),
(0x1D688, 'M', u'y'),
(0x1D689, 'M', u'z'),
(0x1D68A, 'M', u'a'),
(0x1D68B, 'M', u'b'),
(0x1D68C, 'M', u'c'),
(0x1D68D, 'M', u'd'),
(0x1D68E, 'M', u'e'),
(0x1D68F, 'M', u'f'),
(0x1D690, 'M', u'g'),
(0x1D691, 'M', u'h'),
(0x1D692, 'M', u'i'),
(0x1D693, 'M', u'j'),
(0x1D694, 'M', u'k'),
(0x1D695, 'M', u'l'),
(0x1D696, 'M', u'm'),
(0x1D697, 'M', u'n'),
(0x1D698, 'M', u'o'),
(0x1D699, 'M', u'p'),
(0x1D69A, 'M', u'q'),
(0x1D69B, 'M', u'r'),
(0x1D69C, 'M', u's'),
(0x1D69D, 'M', u't'),
(0x1D69E, 'M', u'u'),
(0x1D69F, 'M', u'v'),
(0x1D6A0, 'M', u'w'),
(0x1D6A1, 'M', u'x'),
(0x1D6A2, 'M', u'y'),
(0x1D6A3, 'M', u'z'),
(0x1D6A4, 'M', u'ı'),
(0x1D6A5, 'M', u'ȷ'),
(0x1D6A6, 'X'),
(0x1D6A8, 'M', u'α'),
(0x1D6A9, 'M', u'β'),
(0x1D6AA, 'M', u'γ'),
(0x1D6AB, 'M', u'δ'),
(0x1D6AC, 'M', u'ε'),
(0x1D6AD, 'M', u'ζ'),
(0x1D6AE, 'M', u'η'),
(0x1D6AF, 'M', u'θ'),
(0x1D6B0, 'M', u'ι'),
]
def _seg_66():
return [
(0x1D6B1, 'M', u'κ'),
(0x1D6B2, 'M', u'λ'),
(0x1D6B3, 'M', u'μ'),
(0x1D6B4, 'M', u'ν'),
(0x1D6B5, 'M', u'ξ'),
(0x1D6B6, 'M', u'ο'),
(0x1D6B7, 'M', u'π'),
(0x1D6B8, 'M', u'ρ'),
(0x1D6B9, 'M', u'θ'),
(0x1D6BA, 'M', u'σ'),
(0x1D6BB, 'M', u'τ'),
(0x1D6BC, 'M', u'υ'),
(0x1D6BD, 'M', u'φ'),
(0x1D6BE, 'M', u'χ'),
(0x1D6BF, 'M', u'ψ'),
(0x1D6C0, 'M', u'ω'),
(0x1D6C1, 'M', u'∇'),
(0x1D6C2, 'M', u'α'),
(0x1D6C3, 'M', u'β'),
(0x1D6C4, 'M', u'γ'),
(0x1D6C5, 'M', u'δ'),
(0x1D6C6, 'M', u'ε'),
(0x1D6C7, 'M', u'ζ'),
(0x1D6C8, 'M', u'η'),
(0x1D6C9, 'M', u'θ'),
(0x1D6CA, 'M', u'ι'),
(0x1D6CB, 'M', u'κ'),
(0x1D6CC, 'M', u'λ'),
(0x1D6CD, 'M', u'μ'),
(0x1D6CE, 'M', u'ν'),
(0x1D6CF, 'M', u'ξ'),
(0x1D6D0, 'M', u'ο'),
(0x1D6D1, 'M', u'π'),
(0x1D6D2, 'M', u'ρ'),
(0x1D6D3, 'M', u'σ'),
(0x1D6D5, 'M', u'τ'),
(0x1D6D6, 'M', u'υ'),
(0x1D6D7, 'M', u'φ'),
(0x1D6D8, 'M', u'χ'),
(0x1D6D9, 'M', u'ψ'),
(0x1D6DA, 'M', u'ω'),
(0x1D6DB, 'M', u'∂'),
(0x1D6DC, 'M', u'ε'),
(0x1D6DD, 'M', u'θ'),
(0x1D6DE, 'M', u'κ'),
(0x1D6DF, 'M', u'φ'),
(0x1D6E0, 'M', u'ρ'),
(0x1D6E1, 'M', u'π'),
(0x1D6E2, 'M', u'α'),
(0x1D6E3, 'M', u'β'),
(0x1D6E4, 'M', u'γ'),
(0x1D6E5, 'M', u'δ'),
(0x1D6E6, 'M', u'ε'),
(0x1D6E7, 'M', u'ζ'),
(0x1D6E8, 'M', u'η'),
(0x1D6E9, 'M', u'θ'),
(0x1D6EA, 'M', u'ι'),
(0x1D6EB, 'M', u'κ'),
(0x1D6EC, 'M', u'λ'),
(0x1D6ED, 'M', u'μ'),
(0x1D6EE, 'M', u'ν'),
(0x1D6EF, 'M', u'ξ'),
(0x1D6F0, 'M', u'ο'),
(0x1D6F1, 'M', u'π'),
(0x1D6F2, 'M', u'ρ'),
(0x1D6F3, 'M', u'θ'),
(0x1D6F4, 'M', u'σ'),
(0x1D6F5, 'M', u'τ'),
(0x1D6F6, 'M', u'υ'),
(0x1D6F7, 'M', u'φ'),
(0x1D6F8, 'M', u'χ'),
(0x1D6F9, 'M', u'ψ'),
(0x1D6FA, 'M', u'ω'),
(0x1D6FB, 'M', u'∇'),
(0x1D6FC, 'M', u'α'),
(0x1D6FD, 'M', u'β'),
(0x1D6FE, 'M', u'γ'),
(0x1D6FF, 'M', u'δ'),
(0x1D700, 'M', u'ε'),
(0x1D701, 'M', u'ζ'),
(0x1D702, 'M', u'η'),
(0x1D703, 'M', u'θ'),
(0x1D704, 'M', u'ι'),
(0x1D705, 'M', u'κ'),
(0x1D706, 'M', u'λ'),
(0x1D707, 'M', u'μ'),
(0x1D708, 'M', u'ν'),
(0x1D709, 'M', u'ξ'),
(0x1D70A, 'M', u'ο'),
(0x1D70B, 'M', u'π'),
(0x1D70C, 'M', u'ρ'),
(0x1D70D, 'M', u'σ'),
(0x1D70F, 'M', u'τ'),
(0x1D710, 'M', u'υ'),
(0x1D711, 'M', u'φ'),
(0x1D712, 'M', u'χ'),
(0x1D713, 'M', u'ψ'),
(0x1D714, 'M', u'ω'),
(0x1D715, 'M', u'∂'),
(0x1D716, 'M', u'ε'),
]
def _seg_67():
return [
(0x1D717, 'M', u'θ'),
(0x1D718, 'M', u'κ'),
(0x1D719, 'M', u'φ'),
(0x1D71A, 'M', u'ρ'),
(0x1D71B, 'M', u'π'),
(0x1D71C, 'M', u'α'),
(0x1D71D, 'M', u'β'),
(0x1D71E, 'M', u'γ'),
(0x1D71F, 'M', u'δ'),
(0x1D720, 'M', u'ε'),
(0x1D721, 'M', u'ζ'),
(0x1D722, 'M', u'η'),
(0x1D723, 'M', u'θ'),
(0x1D724, 'M', u'ι'),
(0x1D725, 'M', u'κ'),
(0x1D726, 'M', u'λ'),
(0x1D727, 'M', u'μ'),
(0x1D728, 'M', u'ν'),
(0x1D729, 'M', u'ξ'),
(0x1D72A, 'M', u'ο'),
(0x1D72B, 'M', u'π'),
(0x1D72C, 'M', u'ρ'),
(0x1D72D, 'M', u'θ'),
(0x1D72E, 'M', u'σ'),
(0x1D72F, 'M', u'τ'),
(0x1D730, 'M', u'υ'),
(0x1D731, 'M', u'φ'),
(0x1D732, 'M', u'χ'),
(0x1D733, 'M', u'ψ'),
(0x1D734, 'M', u'ω'),
(0x1D735, 'M', u'∇'),
(0x1D736, 'M', u'α'),
(0x1D737, 'M', u'β'),
(0x1D738, 'M', u'γ'),
(0x1D739, 'M', u'δ'),
(0x1D73A, 'M', u'ε'),
(0x1D73B, 'M', u'ζ'),
(0x1D73C, 'M', u'η'),
(0x1D73D, 'M', u'θ'),
(0x1D73E, 'M', u'ι'),
(0x1D73F, 'M', u'κ'),
(0x1D740, 'M', u'λ'),
(0x1D741, 'M', u'μ'),
(0x1D742, 'M', u'ν'),
(0x1D743, 'M', u'ξ'),
(0x1D744, 'M', u'ο'),
(0x1D745, 'M', u'π'),
(0x1D746, 'M', u'ρ'),
(0x1D747, 'M', u'σ'),
(0x1D749, 'M', u'τ'),
(0x1D74A, 'M', u'υ'),
(0x1D74B, 'M', u'φ'),
(0x1D74C, 'M', u'χ'),
(0x1D74D, 'M', u'ψ'),
(0x1D74E, 'M', u'ω'),
(0x1D74F, 'M', u'∂'),
(0x1D750, 'M', u'ε'),
(0x1D751, 'M', u'θ'),
(0x1D752, 'M', u'κ'),
(0x1D753, 'M', u'φ'),
(0x1D754, 'M', u'ρ'),
(0x1D755, 'M', u'π'),
(0x1D756, 'M', u'α'),
(0x1D757, 'M', u'β'),
(0x1D758, 'M', u'γ'),
(0x1D759, 'M', u'δ'),
(0x1D75A, 'M', u'ε'),
(0x1D75B, 'M', u'ζ'),
(0x1D75C, 'M', u'η'),
(0x1D75D, 'M', u'θ'),
(0x1D75E, 'M', u'ι'),
(0x1D75F, 'M', u'κ'),
(0x1D760, 'M', u'λ'),
(0x1D761, 'M', u'μ'),
(0x1D762, 'M', u'ν'),
(0x1D763, 'M', u'ξ'),
(0x1D764, 'M', u'ο'),
(0x1D765, 'M', u'π'),
(0x1D766, 'M', u'ρ'),
(0x1D767, 'M', u'θ'),
(0x1D768, 'M', u'σ'),
(0x1D769, 'M', u'τ'),
(0x1D76A, 'M', u'υ'),
(0x1D76B, 'M', u'φ'),
(0x1D76C, 'M', u'χ'),
(0x1D76D, 'M', u'ψ'),
(0x1D76E, 'M', u'ω'),
(0x1D76F, 'M', u'∇'),
(0x1D770, 'M', u'α'),
(0x1D771, 'M', u'β'),
(0x1D772, 'M', u'γ'),
(0x1D773, 'M', u'δ'),
(0x1D774, 'M', u'ε'),
(0x1D775, 'M', u'ζ'),
(0x1D776, 'M', u'η'),
(0x1D777, 'M', u'θ'),
(0x1D778, 'M', u'ι'),
(0x1D779, 'M', u'κ'),
(0x1D77A, 'M', u'λ'),
(0x1D77B, 'M', u'μ'),
]
def _seg_68():
return [
(0x1D77C, 'M', u'ν'),
(0x1D77D, 'M', u'ξ'),
(0x1D77E, 'M', u'ο'),
(0x1D77F, 'M', u'π'),
(0x1D780, 'M', u'ρ'),
(0x1D781, 'M', u'σ'),
(0x1D783, 'M', u'τ'),
(0x1D784, 'M', u'υ'),
(0x1D785, 'M', u'φ'),
(0x1D786, 'M', u'χ'),
(0x1D787, 'M', u'ψ'),
(0x1D788, 'M', u'ω'),
(0x1D789, 'M', u'∂'),
(0x1D78A, 'M', u'ε'),
(0x1D78B, 'M', u'θ'),
(0x1D78C, 'M', u'κ'),
(0x1D78D, 'M', u'φ'),
(0x1D78E, 'M', u'ρ'),
(0x1D78F, 'M', u'π'),
(0x1D790, 'M', u'α'),
(0x1D791, 'M', u'β'),
(0x1D792, 'M', u'γ'),
(0x1D793, 'M', u'δ'),
(0x1D794, 'M', u'ε'),
(0x1D795, 'M', u'ζ'),
(0x1D796, 'M', u'η'),
(0x1D797, 'M', u'θ'),
(0x1D798, 'M', u'ι'),
(0x1D799, 'M', u'κ'),
(0x1D79A, 'M', u'λ'),
(0x1D79B, 'M', u'μ'),
(0x1D79C, 'M', u'ν'),
(0x1D79D, 'M', u'ξ'),
(0x1D79E, 'M', u'ο'),
(0x1D79F, 'M', u'π'),
(0x1D7A0, 'M', u'ρ'),
(0x1D7A1, 'M', u'θ'),
(0x1D7A2, 'M', u'σ'),
(0x1D7A3, 'M', u'τ'),
(0x1D7A4, 'M', u'υ'),
(0x1D7A5, 'M', u'φ'),
(0x1D7A6, 'M', u'χ'),
(0x1D7A7, 'M', u'ψ'),
(0x1D7A8, 'M', u'ω'),
(0x1D7A9, 'M', u'∇'),
(0x1D7AA, 'M', u'α'),
(0x1D7AB, 'M', u'β'),
(0x1D7AC, 'M', u'γ'),
(0x1D7AD, 'M', u'δ'),
(0x1D7AE, 'M', u'ε'),
(0x1D7AF, 'M', u'ζ'),
(0x1D7B0, 'M', u'η'),
(0x1D7B1, 'M', u'θ'),
(0x1D7B2, 'M', u'ι'),
(0x1D7B3, 'M', u'κ'),
(0x1D7B4, 'M', u'λ'),
(0x1D7B5, 'M', u'μ'),
(0x1D7B6, 'M', u'ν'),
(0x1D7B7, 'M', u'ξ'),
(0x1D7B8, 'M', u'ο'),
(0x1D7B9, 'M', u'π'),
(0x1D7BA, 'M', u'ρ'),
(0x1D7BB, 'M', u'σ'),
(0x1D7BD, 'M', u'τ'),
(0x1D7BE, 'M', u'υ'),
(0x1D7BF, 'M', u'φ'),
(0x1D7C0, 'M', u'χ'),
(0x1D7C1, 'M', u'ψ'),
(0x1D7C2, 'M', u'ω'),
(0x1D7C3, 'M', u'∂'),
(0x1D7C4, 'M', u'ε'),
(0x1D7C5, 'M', u'θ'),
(0x1D7C6, 'M', u'κ'),
(0x1D7C7, 'M', u'φ'),
(0x1D7C8, 'M', u'ρ'),
(0x1D7C9, 'M', u'π'),
(0x1D7CA, 'M', u'ϝ'),
(0x1D7CC, 'X'),
(0x1D7CE, 'M', u'0'),
(0x1D7CF, 'M', u'1'),
(0x1D7D0, 'M', u'2'),
(0x1D7D1, 'M', u'3'),
(0x1D7D2, 'M', u'4'),
(0x1D7D3, 'M', u'5'),
(0x1D7D4, 'M', u'6'),
(0x1D7D5, 'M', u'7'),
(0x1D7D6, 'M', u'8'),
(0x1D7D7, 'M', u'9'),
(0x1D7D8, 'M', u'0'),
(0x1D7D9, 'M', u'1'),
(0x1D7DA, 'M', u'2'),
(0x1D7DB, 'M', u'3'),
(0x1D7DC, 'M', u'4'),
(0x1D7DD, 'M', u'5'),
(0x1D7DE, 'M', u'6'),
(0x1D7DF, 'M', u'7'),
(0x1D7E0, 'M', u'8'),
(0x1D7E1, 'M', u'9'),
(0x1D7E2, 'M', u'0'),
(0x1D7E3, 'M', u'1'),
]
def _seg_69():
return [
(0x1D7E4, 'M', u'2'),
(0x1D7E5, 'M', u'3'),
(0x1D7E6, 'M', u'4'),
(0x1D7E7, 'M', u'5'),
(0x1D7E8, 'M', u'6'),
(0x1D7E9, 'M', u'7'),
(0x1D7EA, 'M', u'8'),
(0x1D7EB, 'M', u'9'),
(0x1D7EC, 'M', u'0'),
(0x1D7ED, 'M', u'1'),
(0x1D7EE, 'M', u'2'),
(0x1D7EF, 'M', u'3'),
(0x1D7F0, 'M', u'4'),
(0x1D7F1, 'M', u'5'),
(0x1D7F2, 'M', u'6'),
(0x1D7F3, 'M', u'7'),
(0x1D7F4, 'M', u'8'),
(0x1D7F5, 'M', u'9'),
(0x1D7F6, 'M', u'0'),
(0x1D7F7, 'M', u'1'),
(0x1D7F8, 'M', u'2'),
(0x1D7F9, 'M', u'3'),
(0x1D7FA, 'M', u'4'),
(0x1D7FB, 'M', u'5'),
(0x1D7FC, 'M', u'6'),
(0x1D7FD, 'M', u'7'),
(0x1D7FE, 'M', u'8'),
(0x1D7FF, 'M', u'9'),
(0x1D800, 'V'),
(0x1DA8C, 'X'),
(0x1DA9B, 'V'),
(0x1DAA0, 'X'),
(0x1DAA1, 'V'),
(0x1DAB0, 'X'),
(0x1E000, 'V'),
(0x1E007, 'X'),
(0x1E008, 'V'),
(0x1E019, 'X'),
(0x1E01B, 'V'),
(0x1E022, 'X'),
(0x1E023, 'V'),
(0x1E025, 'X'),
(0x1E026, 'V'),
(0x1E02B, 'X'),
(0x1E100, 'V'),
(0x1E12D, 'X'),
(0x1E130, 'V'),
(0x1E13E, 'X'),
(0x1E140, 'V'),
(0x1E14A, 'X'),
(0x1E14E, 'V'),
(0x1E150, 'X'),
(0x1E2C0, 'V'),
(0x1E2FA, 'X'),
(0x1E2FF, 'V'),
(0x1E300, 'X'),
(0x1E800, 'V'),
(0x1E8C5, 'X'),
(0x1E8C7, 'V'),
(0x1E8D7, 'X'),
(0x1E900, 'M', u'𞤢'),
(0x1E901, 'M', u'𞤣'),
(0x1E902, 'M', u'𞤤'),
(0x1E903, 'M', u'𞤥'),
(0x1E904, 'M', u'𞤦'),
(0x1E905, 'M', u'𞤧'),
(0x1E906, 'M', u'𞤨'),
(0x1E907, 'M', u'𞤩'),
(0x1E908, 'M', u'𞤪'),
(0x1E909, 'M', u'𞤫'),
(0x1E90A, 'M', u'𞤬'),
(0x1E90B, 'M', u'𞤭'),
(0x1E90C, 'M', u'𞤮'),
(0x1E90D, 'M', u'𞤯'),
(0x1E90E, 'M', u'𞤰'),
(0x1E90F, 'M', u'𞤱'),
(0x1E910, 'M', u'𞤲'),
(0x1E911, 'M', u'𞤳'),
(0x1E912, 'M', u'𞤴'),
(0x1E913, 'M', u'𞤵'),
(0x1E914, 'M', u'𞤶'),
(0x1E915, 'M', u'𞤷'),
(0x1E916, 'M', u'𞤸'),
(0x1E917, 'M', u'𞤹'),
(0x1E918, 'M', u'𞤺'),
(0x1E919, 'M', u'𞤻'),
(0x1E91A, 'M', u'𞤼'),
(0x1E91B, 'M', u'𞤽'),
(0x1E91C, 'M', u'𞤾'),
(0x1E91D, 'M', u'𞤿'),
(0x1E91E, 'M', u'𞥀'),
(0x1E91F, 'M', u'𞥁'),
(0x1E920, 'M', u'𞥂'),
(0x1E921, 'M', u'𞥃'),
(0x1E922, 'V'),
(0x1E94C, 'X'),
(0x1E950, 'V'),
(0x1E95A, 'X'),
(0x1E95E, 'V'),
(0x1E960, 'X'),
]
def _seg_70():
return [
(0x1EC71, 'V'),
(0x1ECB5, 'X'),
(0x1ED01, 'V'),
(0x1ED3E, 'X'),
(0x1EE00, 'M', u'ا'),
(0x1EE01, 'M', u'ب'),
(0x1EE02, 'M', u'ج'),
(0x1EE03, 'M', u'د'),
(0x1EE04, 'X'),
(0x1EE05, 'M', u'و'),
(0x1EE06, 'M', u'ز'),
(0x1EE07, 'M', u'ح'),
(0x1EE08, 'M', u'ط'),
(0x1EE09, 'M', u'ي'),
(0x1EE0A, 'M', u'ك'),
(0x1EE0B, 'M', u'ل'),
(0x1EE0C, 'M', u'م'),
(0x1EE0D, 'M', u'ن'),
(0x1EE0E, 'M', u'س'),
(0x1EE0F, 'M', u'ع'),
(0x1EE10, 'M', u'ف'),
(0x1EE11, 'M', u'ص'),
(0x1EE12, 'M', u'ق'),
(0x1EE13, 'M', u'ر'),
(0x1EE14, 'M', u'ش'),
(0x1EE15, 'M', u'ت'),
(0x1EE16, 'M', u'ث'),
(0x1EE17, 'M', u'خ'),
(0x1EE18, 'M', u'ذ'),
(0x1EE19, 'M', u'ض'),
(0x1EE1A, 'M', u'ظ'),
(0x1EE1B, 'M', u'غ'),
(0x1EE1C, 'M', u'ٮ'),
(0x1EE1D, 'M', u'ں'),
(0x1EE1E, 'M', u'ڡ'),
(0x1EE1F, 'M', u'ٯ'),
(0x1EE20, 'X'),
(0x1EE21, 'M', u'ب'),
(0x1EE22, 'M', u'ج'),
(0x1EE23, 'X'),
(0x1EE24, 'M', u'ه'),
(0x1EE25, 'X'),
(0x1EE27, 'M', u'ح'),
(0x1EE28, 'X'),
(0x1EE29, 'M', u'ي'),
(0x1EE2A, 'M', u'ك'),
(0x1EE2B, 'M', u'ل'),
(0x1EE2C, 'M', u'م'),
(0x1EE2D, 'M', u'ن'),
(0x1EE2E, 'M', u'س'),
(0x1EE2F, 'M', u'ع'),
(0x1EE30, 'M', u'ف'),
(0x1EE31, 'M', u'ص'),
(0x1EE32, 'M', u'ق'),
(0x1EE33, 'X'),
(0x1EE34, 'M', u'ش'),
(0x1EE35, 'M', u'ت'),
(0x1EE36, 'M', u'ث'),
(0x1EE37, 'M', u'خ'),
(0x1EE38, 'X'),
(0x1EE39, 'M', u'ض'),
(0x1EE3A, 'X'),
(0x1EE3B, 'M', u'غ'),
(0x1EE3C, 'X'),
(0x1EE42, 'M', u'ج'),
(0x1EE43, 'X'),
(0x1EE47, 'M', u'ح'),
(0x1EE48, 'X'),
(0x1EE49, 'M', u'ي'),
(0x1EE4A, 'X'),
(0x1EE4B, 'M', u'ل'),
(0x1EE4C, 'X'),
(0x1EE4D, 'M', u'ن'),
(0x1EE4E, 'M', u'س'),
(0x1EE4F, 'M', u'ع'),
(0x1EE50, 'X'),
(0x1EE51, 'M', u'ص'),
(0x1EE52, 'M', u'ق'),
(0x1EE53, 'X'),
(0x1EE54, 'M', u'ش'),
(0x1EE55, 'X'),
(0x1EE57, 'M', u'خ'),
(0x1EE58, 'X'),
(0x1EE59, 'M', u'ض'),
(0x1EE5A, 'X'),
(0x1EE5B, 'M', u'غ'),
(0x1EE5C, 'X'),
(0x1EE5D, 'M', u'ں'),
(0x1EE5E, 'X'),
(0x1EE5F, 'M', u'ٯ'),
(0x1EE60, 'X'),
(0x1EE61, 'M', u'ب'),
(0x1EE62, 'M', u'ج'),
(0x1EE63, 'X'),
(0x1EE64, 'M', u'ه'),
(0x1EE65, 'X'),
(0x1EE67, 'M', u'ح'),
(0x1EE68, 'M', u'ط'),
(0x1EE69, 'M', u'ي'),
(0x1EE6A, 'M', u'ك'),
]
def _seg_71():
return [
(0x1EE6B, 'X'),
(0x1EE6C, 'M', u'م'),
(0x1EE6D, 'M', u'ن'),
(0x1EE6E, 'M', u'س'),
(0x1EE6F, 'M', u'ع'),
(0x1EE70, 'M', u'ف'),
(0x1EE71, 'M', u'ص'),
(0x1EE72, 'M', u'ق'),
(0x1EE73, 'X'),
(0x1EE74, 'M', u'ش'),
(0x1EE75, 'M', u'ت'),
(0x1EE76, 'M', u'ث'),
(0x1EE77, 'M', u'خ'),
(0x1EE78, 'X'),
(0x1EE79, 'M', u'ض'),
(0x1EE7A, 'M', u'ظ'),
(0x1EE7B, 'M', u'غ'),
(0x1EE7C, 'M', u'ٮ'),
(0x1EE7D, 'X'),
(0x1EE7E, 'M', u'ڡ'),
(0x1EE7F, 'X'),
(0x1EE80, 'M', u'ا'),
(0x1EE81, 'M', u'ب'),
(0x1EE82, 'M', u'ج'),
(0x1EE83, 'M', u'د'),
(0x1EE84, 'M', u'ه'),
(0x1EE85, 'M', u'و'),
(0x1EE86, 'M', u'ز'),
(0x1EE87, 'M', u'ح'),
(0x1EE88, 'M', u'ط'),
(0x1EE89, 'M', u'ي'),
(0x1EE8A, 'X'),
(0x1EE8B, 'M', u'ل'),
(0x1EE8C, 'M', u'م'),
(0x1EE8D, 'M', u'ن'),
(0x1EE8E, 'M', u'س'),
(0x1EE8F, 'M', u'ع'),
(0x1EE90, 'M', u'ف'),
(0x1EE91, 'M', u'ص'),
(0x1EE92, 'M', u'ق'),
(0x1EE93, 'M', u'ر'),
(0x1EE94, 'M', u'ش'),
(0x1EE95, 'M', u'ت'),
(0x1EE96, 'M', u'ث'),
(0x1EE97, 'M', u'خ'),
(0x1EE98, 'M', u'ذ'),
(0x1EE99, 'M', u'ض'),
(0x1EE9A, 'M', u'ظ'),
(0x1EE9B, 'M', u'غ'),
(0x1EE9C, 'X'),
(0x1EEA1, 'M', u'ب'),
(0x1EEA2, 'M', u'ج'),
(0x1EEA3, 'M', u'د'),
(0x1EEA4, 'X'),
(0x1EEA5, 'M', u'و'),
(0x1EEA6, 'M', u'ز'),
(0x1EEA7, 'M', u'ح'),
(0x1EEA8, 'M', u'ط'),
(0x1EEA9, 'M', u'ي'),
(0x1EEAA, 'X'),
(0x1EEAB, 'M', u'ل'),
(0x1EEAC, 'M', u'م'),
(0x1EEAD, 'M', u'ن'),
(0x1EEAE, 'M', u'س'),
(0x1EEAF, 'M', u'ع'),
(0x1EEB0, 'M', u'ف'),
(0x1EEB1, 'M', u'ص'),
(0x1EEB2, 'M', u'ق'),
(0x1EEB3, 'M', u'ر'),
(0x1EEB4, 'M', u'ش'),
(0x1EEB5, 'M', u'ت'),
(0x1EEB6, 'M', u'ث'),
(0x1EEB7, 'M', u'خ'),
(0x1EEB8, 'M', u'ذ'),
(0x1EEB9, 'M', u'ض'),
(0x1EEBA, 'M', u'ظ'),
(0x1EEBB, 'M', u'غ'),
(0x1EEBC, 'X'),
(0x1EEF0, 'V'),
(0x1EEF2, 'X'),
(0x1F000, 'V'),
(0x1F02C, 'X'),
(0x1F030, 'V'),
(0x1F094, 'X'),
(0x1F0A0, 'V'),
(0x1F0AF, 'X'),
(0x1F0B1, 'V'),
(0x1F0C0, 'X'),
(0x1F0C1, 'V'),
(0x1F0D0, 'X'),
(0x1F0D1, 'V'),
(0x1F0F6, 'X'),
(0x1F101, '3', u'0,'),
(0x1F102, '3', u'1,'),
(0x1F103, '3', u'2,'),
(0x1F104, '3', u'3,'),
(0x1F105, '3', u'4,'),
(0x1F106, '3', u'5,'),
(0x1F107, '3', u'6,'),
(0x1F108, '3', u'7,'),
]
def _seg_72():
return [
(0x1F109, '3', u'8,'),
(0x1F10A, '3', u'9,'),
(0x1F10B, 'V'),
(0x1F110, '3', u'(a)'),
(0x1F111, '3', u'(b)'),
(0x1F112, '3', u'(c)'),
(0x1F113, '3', u'(d)'),
(0x1F114, '3', u'(e)'),
(0x1F115, '3', u'(f)'),
(0x1F116, '3', u'(g)'),
(0x1F117, '3', u'(h)'),
(0x1F118, '3', u'(i)'),
(0x1F119, '3', u'(j)'),
(0x1F11A, '3', u'(k)'),
(0x1F11B, '3', u'(l)'),
(0x1F11C, '3', u'(m)'),
(0x1F11D, '3', u'(n)'),
(0x1F11E, '3', u'(o)'),
(0x1F11F, '3', u'(p)'),
(0x1F120, '3', u'(q)'),
(0x1F121, '3', u'(r)'),
(0x1F122, '3', u'(s)'),
(0x1F123, '3', u'(t)'),
(0x1F124, '3', u'(u)'),
(0x1F125, '3', u'(v)'),
(0x1F126, '3', u'(w)'),
(0x1F127, '3', u'(x)'),
(0x1F128, '3', u'(y)'),
(0x1F129, '3', u'(z)'),
(0x1F12A, 'M', u'〔s〕'),
(0x1F12B, 'M', u'c'),
(0x1F12C, 'M', u'r'),
(0x1F12D, 'M', u'cd'),
(0x1F12E, 'M', u'wz'),
(0x1F12F, 'V'),
(0x1F130, 'M', u'a'),
(0x1F131, 'M', u'b'),
(0x1F132, 'M', u'c'),
(0x1F133, 'M', u'd'),
(0x1F134, 'M', u'e'),
(0x1F135, 'M', u'f'),
(0x1F136, 'M', u'g'),
(0x1F137, 'M', u'h'),
(0x1F138, 'M', u'i'),
(0x1F139, 'M', u'j'),
(0x1F13A, 'M', u'k'),
(0x1F13B, 'M', u'l'),
(0x1F13C, 'M', u'm'),
(0x1F13D, 'M', u'n'),
(0x1F13E, 'M', u'o'),
(0x1F13F, 'M', u'p'),
(0x1F140, 'M', u'q'),
(0x1F141, 'M', u'r'),
(0x1F142, 'M', u's'),
(0x1F143, 'M', u't'),
(0x1F144, 'M', u'u'),
(0x1F145, 'M', u'v'),
(0x1F146, 'M', u'w'),
(0x1F147, 'M', u'x'),
(0x1F148, 'M', u'y'),
(0x1F149, 'M', u'z'),
(0x1F14A, 'M', u'hv'),
(0x1F14B, 'M', u'mv'),
(0x1F14C, 'M', u'sd'),
(0x1F14D, 'M', u'ss'),
(0x1F14E, 'M', u'ppv'),
(0x1F14F, 'M', u'wc'),
(0x1F150, 'V'),
(0x1F16A, 'M', u'mc'),
(0x1F16B, 'M', u'md'),
(0x1F16C, 'M', u'mr'),
(0x1F16D, 'V'),
(0x1F190, 'M', u'dj'),
(0x1F191, 'V'),
(0x1F1AE, 'X'),
(0x1F1E6, 'V'),
(0x1F200, 'M', u'ほか'),
(0x1F201, 'M', u'ココ'),
(0x1F202, 'M', u'サ'),
(0x1F203, 'X'),
(0x1F210, 'M', u'手'),
(0x1F211, 'M', u'字'),
(0x1F212, 'M', u'双'),
(0x1F213, 'M', u'デ'),
(0x1F214, 'M', u'二'),
(0x1F215, 'M', u'多'),
(0x1F216, 'M', u'解'),
(0x1F217, 'M', u'天'),
(0x1F218, 'M', u'交'),
(0x1F219, 'M', u'映'),
(0x1F21A, 'M', u'無'),
(0x1F21B, 'M', u'料'),
(0x1F21C, 'M', u'前'),
(0x1F21D, 'M', u'後'),
(0x1F21E, 'M', u'再'),
(0x1F21F, 'M', u'新'),
(0x1F220, 'M', u'初'),
(0x1F221, 'M', u'終'),
(0x1F222, 'M', u'生'),
(0x1F223, 'M', u'販'),
]
def _seg_73():
return [
(0x1F224, 'M', u'声'),
(0x1F225, 'M', u'吹'),
(0x1F226, 'M', u'演'),
(0x1F227, 'M', u'投'),
(0x1F228, 'M', u'捕'),
(0x1F229, 'M', u'一'),
(0x1F22A, 'M', u'三'),
(0x1F22B, 'M', u'遊'),
(0x1F22C, 'M', u'左'),
(0x1F22D, 'M', u'中'),
(0x1F22E, 'M', u'右'),
(0x1F22F, 'M', u'指'),
(0x1F230, 'M', u'走'),
(0x1F231, 'M', u'打'),
(0x1F232, 'M', u'禁'),
(0x1F233, 'M', u'空'),
(0x1F234, 'M', u'合'),
(0x1F235, 'M', u'満'),
(0x1F236, 'M', u'有'),
(0x1F237, 'M', u'月'),
(0x1F238, 'M', u'申'),
(0x1F239, 'M', u'割'),
(0x1F23A, 'M', u'営'),
(0x1F23B, 'M', u'配'),
(0x1F23C, 'X'),
(0x1F240, 'M', u'〔本〕'),
(0x1F241, 'M', u'〔三〕'),
(0x1F242, 'M', u'〔二〕'),
(0x1F243, 'M', u'〔安〕'),
(0x1F244, 'M', u'〔点〕'),
(0x1F245, 'M', u'〔打〕'),
(0x1F246, 'M', u'〔盗〕'),
(0x1F247, 'M', u'〔勝〕'),
(0x1F248, 'M', u'〔敗〕'),
(0x1F249, 'X'),
(0x1F250, 'M', u'得'),
(0x1F251, 'M', u'可'),
(0x1F252, 'X'),
(0x1F260, 'V'),
(0x1F266, 'X'),
(0x1F300, 'V'),
(0x1F6D8, 'X'),
(0x1F6E0, 'V'),
(0x1F6ED, 'X'),
(0x1F6F0, 'V'),
(0x1F6FD, 'X'),
(0x1F700, 'V'),
(0x1F774, 'X'),
(0x1F780, 'V'),
(0x1F7D9, 'X'),
(0x1F7E0, 'V'),
(0x1F7EC, 'X'),
(0x1F800, 'V'),
(0x1F80C, 'X'),
(0x1F810, 'V'),
(0x1F848, 'X'),
(0x1F850, 'V'),
(0x1F85A, 'X'),
(0x1F860, 'V'),
(0x1F888, 'X'),
(0x1F890, 'V'),
(0x1F8AE, 'X'),
(0x1F8B0, 'V'),
(0x1F8B2, 'X'),
(0x1F900, 'V'),
(0x1F979, 'X'),
(0x1F97A, 'V'),
(0x1F9CC, 'X'),
(0x1F9CD, 'V'),
(0x1FA54, 'X'),
(0x1FA60, 'V'),
(0x1FA6E, 'X'),
(0x1FA70, 'V'),
(0x1FA75, 'X'),
(0x1FA78, 'V'),
(0x1FA7B, 'X'),
(0x1FA80, 'V'),
(0x1FA87, 'X'),
(0x1FA90, 'V'),
(0x1FAA9, 'X'),
(0x1FAB0, 'V'),
(0x1FAB7, 'X'),
(0x1FAC0, 'V'),
(0x1FAC3, 'X'),
(0x1FAD0, 'V'),
(0x1FAD7, 'X'),
(0x1FB00, 'V'),
(0x1FB93, 'X'),
(0x1FB94, 'V'),
(0x1FBCB, 'X'),
(0x1FBF0, 'M', u'0'),
(0x1FBF1, 'M', u'1'),
(0x1FBF2, 'M', u'2'),
(0x1FBF3, 'M', u'3'),
(0x1FBF4, 'M', u'4'),
(0x1FBF5, 'M', u'5'),
(0x1FBF6, 'M', u'6'),
(0x1FBF7, 'M', u'7'),
(0x1FBF8, 'M', u'8'),
(0x1FBF9, 'M', u'9'),
]
def _seg_74():
return [
(0x1FBFA, 'X'),
(0x20000, 'V'),
(0x2A6DE, 'X'),
(0x2A700, 'V'),
(0x2B735, 'X'),
(0x2B740, 'V'),
(0x2B81E, 'X'),
(0x2B820, 'V'),
(0x2CEA2, 'X'),
(0x2CEB0, 'V'),
(0x2EBE1, 'X'),
(0x2F800, 'M', u'丽'),
(0x2F801, 'M', u'丸'),
(0x2F802, 'M', u'乁'),
(0x2F803, 'M', u'𠄢'),
(0x2F804, 'M', u'你'),
(0x2F805, 'M', u'侮'),
(0x2F806, 'M', u'侻'),
(0x2F807, 'M', u'倂'),
(0x2F808, 'M', u'偺'),
(0x2F809, 'M', u'備'),
(0x2F80A, 'M', u'僧'),
(0x2F80B, 'M', u'像'),
(0x2F80C, 'M', u'㒞'),
(0x2F80D, 'M', u'𠘺'),
(0x2F80E, 'M', u'免'),
(0x2F80F, 'M', u'兔'),
(0x2F810, 'M', u'兤'),
(0x2F811, 'M', u'具'),
(0x2F812, 'M', u'𠔜'),
(0x2F813, 'M', u'㒹'),
(0x2F814, 'M', u'內'),
(0x2F815, 'M', u'再'),
(0x2F816, 'M', u'𠕋'),
(0x2F817, 'M', u'冗'),
(0x2F818, 'M', u'冤'),
(0x2F819, 'M', u'仌'),
(0x2F81A, 'M', u'冬'),
(0x2F81B, 'M', u'况'),
(0x2F81C, 'M', u'𩇟'),
(0x2F81D, 'M', u'凵'),
(0x2F81E, 'M', u'刃'),
(0x2F81F, 'M', u'㓟'),
(0x2F820, 'M', u'刻'),
(0x2F821, 'M', u'剆'),
(0x2F822, 'M', u'割'),
(0x2F823, 'M', u'剷'),
(0x2F824, 'M', u'㔕'),
(0x2F825, 'M', u'勇'),
(0x2F826, 'M', u'勉'),
(0x2F827, 'M', u'勤'),
(0x2F828, 'M', u'勺'),
(0x2F829, 'M', u'包'),
(0x2F82A, 'M', u'匆'),
(0x2F82B, 'M', u'北'),
(0x2F82C, 'M', u'卉'),
(0x2F82D, 'M', u'卑'),
(0x2F82E, 'M', u'博'),
(0x2F82F, 'M', u'即'),
(0x2F830, 'M', u'卽'),
(0x2F831, 'M', u'卿'),
(0x2F834, 'M', u'𠨬'),
(0x2F835, 'M', u'灰'),
(0x2F836, 'M', u'及'),
(0x2F837, 'M', u'叟'),
(0x2F838, 'M', u'𠭣'),
(0x2F839, 'M', u'叫'),
(0x2F83A, 'M', u'叱'),
(0x2F83B, 'M', u'吆'),
(0x2F83C, 'M', u'咞'),
(0x2F83D, 'M', u'吸'),
(0x2F83E, 'M', u'呈'),
(0x2F83F, 'M', u'周'),
(0x2F840, 'M', u'咢'),
(0x2F841, 'M', u'哶'),
(0x2F842, 'M', u'唐'),
(0x2F843, 'M', u'啓'),
(0x2F844, 'M', u'啣'),
(0x2F845, 'M', u'善'),
(0x2F847, 'M', u'喙'),
(0x2F848, 'M', u'喫'),
(0x2F849, 'M', u'喳'),
(0x2F84A, 'M', u'嗂'),
(0x2F84B, 'M', u'圖'),
(0x2F84C, 'M', u'嘆'),
(0x2F84D, 'M', u'圗'),
(0x2F84E, 'M', u'噑'),
(0x2F84F, 'M', u'噴'),
(0x2F850, 'M', u'切'),
(0x2F851, 'M', u'壮'),
(0x2F852, 'M', u'城'),
(0x2F853, 'M', u'埴'),
(0x2F854, 'M', u'堍'),
(0x2F855, 'M', u'型'),
(0x2F856, 'M', u'堲'),
(0x2F857, 'M', u'報'),
(0x2F858, 'M', u'墬'),
(0x2F859, 'M', u'𡓤'),
(0x2F85A, 'M', u'売'),
(0x2F85B, 'M', u'壷'),
]
def _seg_75():
return [
(0x2F85C, 'M', u'夆'),
(0x2F85D, 'M', u'多'),
(0x2F85E, 'M', u'夢'),
(0x2F85F, 'M', u'奢'),
(0x2F860, 'M', u'𡚨'),
(0x2F861, 'M', u'𡛪'),
(0x2F862, 'M', u'姬'),
(0x2F863, 'M', u'娛'),
(0x2F864, 'M', u'娧'),
(0x2F865, 'M', u'姘'),
(0x2F866, 'M', u'婦'),
(0x2F867, 'M', u'㛮'),
(0x2F868, 'X'),
(0x2F869, 'M', u'嬈'),
(0x2F86A, 'M', u'嬾'),
(0x2F86C, 'M', u'𡧈'),
(0x2F86D, 'M', u'寃'),
(0x2F86E, 'M', u'寘'),
(0x2F86F, 'M', u'寧'),
(0x2F870, 'M', u'寳'),
(0x2F871, 'M', u'𡬘'),
(0x2F872, 'M', u'寿'),
(0x2F873, 'M', u'将'),
(0x2F874, 'X'),
(0x2F875, 'M', u'尢'),
(0x2F876, 'M', u'㞁'),
(0x2F877, 'M', u'屠'),
(0x2F878, 'M', u'屮'),
(0x2F879, 'M', u'峀'),
(0x2F87A, 'M', u'岍'),
(0x2F87B, 'M', u'𡷤'),
(0x2F87C, 'M', u'嵃'),
(0x2F87D, 'M', u'𡷦'),
(0x2F87E, 'M', u'嵮'),
(0x2F87F, 'M', u'嵫'),
(0x2F880, 'M', u'嵼'),
(0x2F881, 'M', u'巡'),
(0x2F882, 'M', u'巢'),
(0x2F883, 'M', u'㠯'),
(0x2F884, 'M', u'巽'),
(0x2F885, 'M', u'帨'),
(0x2F886, 'M', u'帽'),
(0x2F887, 'M', u'幩'),
(0x2F888, 'M', u'㡢'),
(0x2F889, 'M', u'𢆃'),
(0x2F88A, 'M', u'㡼'),
(0x2F88B, 'M', u'庰'),
(0x2F88C, 'M', u'庳'),
(0x2F88D, 'M', u'庶'),
(0x2F88E, 'M', u'廊'),
(0x2F88F, 'M', u'𪎒'),
(0x2F890, 'M', u'廾'),
(0x2F891, 'M', u'𢌱'),
(0x2F893, 'M', u'舁'),
(0x2F894, 'M', u'弢'),
(0x2F896, 'M', u'㣇'),
(0x2F897, 'M', u'𣊸'),
(0x2F898, 'M', u'𦇚'),
(0x2F899, 'M', u'形'),
(0x2F89A, 'M', u'彫'),
(0x2F89B, 'M', u'㣣'),
(0x2F89C, 'M', u'徚'),
(0x2F89D, 'M', u'忍'),
(0x2F89E, 'M', u'志'),
(0x2F89F, 'M', u'忹'),
(0x2F8A0, 'M', u'悁'),
(0x2F8A1, 'M', u'㤺'),
(0x2F8A2, 'M', u'㤜'),
(0x2F8A3, 'M', u'悔'),
(0x2F8A4, 'M', u'𢛔'),
(0x2F8A5, 'M', u'惇'),
(0x2F8A6, 'M', u'慈'),
(0x2F8A7, 'M', u'慌'),
(0x2F8A8, 'M', u'慎'),
(0x2F8A9, 'M', u'慌'),
(0x2F8AA, 'M', u'慺'),
(0x2F8AB, 'M', u'憎'),
(0x2F8AC, 'M', u'憲'),
(0x2F8AD, 'M', u'憤'),
(0x2F8AE, 'M', u'憯'),
(0x2F8AF, 'M', u'懞'),
(0x2F8B0, 'M', u'懲'),
(0x2F8B1, 'M', u'懶'),
(0x2F8B2, 'M', u'成'),
(0x2F8B3, 'M', u'戛'),
(0x2F8B4, 'M', u'扝'),
(0x2F8B5, 'M', u'抱'),
(0x2F8B6, 'M', u'拔'),
(0x2F8B7, 'M', u'捐'),
(0x2F8B8, 'M', u'𢬌'),
(0x2F8B9, 'M', u'挽'),
(0x2F8BA, 'M', u'拼'),
(0x2F8BB, 'M', u'捨'),
(0x2F8BC, 'M', u'掃'),
(0x2F8BD, 'M', u'揤'),
(0x2F8BE, 'M', u'𢯱'),
(0x2F8BF, 'M', u'搢'),
(0x2F8C0, 'M', u'揅'),
(0x2F8C1, 'M', u'掩'),
(0x2F8C2, 'M', u'㨮'),
]
def _seg_76():
return [
(0x2F8C3, 'M', u'摩'),
(0x2F8C4, 'M', u'摾'),
(0x2F8C5, 'M', u'撝'),
(0x2F8C6, 'M', u'摷'),
(0x2F8C7, 'M', u'㩬'),
(0x2F8C8, 'M', u'敏'),
(0x2F8C9, 'M', u'敬'),
(0x2F8CA, 'M', u'𣀊'),
(0x2F8CB, 'M', u'旣'),
(0x2F8CC, 'M', u'書'),
(0x2F8CD, 'M', u'晉'),
(0x2F8CE, 'M', u'㬙'),
(0x2F8CF, 'M', u'暑'),
(0x2F8D0, 'M', u'㬈'),
(0x2F8D1, 'M', u'㫤'),
(0x2F8D2, 'M', u'冒'),
(0x2F8D3, 'M', u'冕'),
(0x2F8D4, 'M', u'最'),
(0x2F8D5, 'M', u'暜'),
(0x2F8D6, 'M', u'肭'),
(0x2F8D7, 'M', u'䏙'),
(0x2F8D8, 'M', u'朗'),
(0x2F8D9, 'M', u'望'),
(0x2F8DA, 'M', u'朡'),
(0x2F8DB, 'M', u'杞'),
(0x2F8DC, 'M', u'杓'),
(0x2F8DD, 'M', u'𣏃'),
(0x2F8DE, 'M', u'㭉'),
(0x2F8DF, 'M', u'柺'),
(0x2F8E0, 'M', u'枅'),
(0x2F8E1, 'M', u'桒'),
(0x2F8E2, 'M', u'梅'),
(0x2F8E3, 'M', u'𣑭'),
(0x2F8E4, 'M', u'梎'),
(0x2F8E5, 'M', u'栟'),
(0x2F8E6, 'M', u'椔'),
(0x2F8E7, 'M', u'㮝'),
(0x2F8E8, 'M', u'楂'),
(0x2F8E9, 'M', u'榣'),
(0x2F8EA, 'M', u'槪'),
(0x2F8EB, 'M', u'檨'),
(0x2F8EC, 'M', u'𣚣'),
(0x2F8ED, 'M', u'櫛'),
(0x2F8EE, 'M', u'㰘'),
(0x2F8EF, 'M', u'次'),
(0x2F8F0, 'M', u'𣢧'),
(0x2F8F1, 'M', u'歔'),
(0x2F8F2, 'M', u'㱎'),
(0x2F8F3, 'M', u'歲'),
(0x2F8F4, 'M', u'殟'),
(0x2F8F5, 'M', u'殺'),
(0x2F8F6, 'M', u'殻'),
(0x2F8F7, 'M', u'𣪍'),
(0x2F8F8, 'M', u'𡴋'),
(0x2F8F9, 'M', u'𣫺'),
(0x2F8FA, 'M', u'汎'),
(0x2F8FB, 'M', u'𣲼'),
(0x2F8FC, 'M', u'沿'),
(0x2F8FD, 'M', u'泍'),
(0x2F8FE, 'M', u'汧'),
(0x2F8FF, 'M', u'洖'),
(0x2F900, 'M', u'派'),
(0x2F901, 'M', u'海'),
(0x2F902, 'M', u'流'),
(0x2F903, 'M', u'浩'),
(0x2F904, 'M', u'浸'),
(0x2F905, 'M', u'涅'),
(0x2F906, 'M', u'𣴞'),
(0x2F907, 'M', u'洴'),
(0x2F908, 'M', u'港'),
(0x2F909, 'M', u'湮'),
(0x2F90A, 'M', u'㴳'),
(0x2F90B, 'M', u'滋'),
(0x2F90C, 'M', u'滇'),
(0x2F90D, 'M', u'𣻑'),
(0x2F90E, 'M', u'淹'),
(0x2F90F, 'M', u'潮'),
(0x2F910, 'M', u'𣽞'),
(0x2F911, 'M', u'𣾎'),
(0x2F912, 'M', u'濆'),
(0x2F913, 'M', u'瀹'),
(0x2F914, 'M', u'瀞'),
(0x2F915, 'M', u'瀛'),
(0x2F916, 'M', u'㶖'),
(0x2F917, 'M', u'灊'),
(0x2F918, 'M', u'災'),
(0x2F919, 'M', u'灷'),
(0x2F91A, 'M', u'炭'),
(0x2F91B, 'M', u'𠔥'),
(0x2F91C, 'M', u'煅'),
(0x2F91D, 'M', u'𤉣'),
(0x2F91E, 'M', u'熜'),
(0x2F91F, 'X'),
(0x2F920, 'M', u'爨'),
(0x2F921, 'M', u'爵'),
(0x2F922, 'M', u'牐'),
(0x2F923, 'M', u'𤘈'),
(0x2F924, 'M', u'犀'),
(0x2F925, 'M', u'犕'),
(0x2F926, 'M', u'𤜵'),
]
def _seg_77():
return [
(0x2F927, 'M', u'𤠔'),
(0x2F928, 'M', u'獺'),
(0x2F929, 'M', u'王'),
(0x2F92A, 'M', u'㺬'),
(0x2F92B, 'M', u'玥'),
(0x2F92C, 'M', u'㺸'),
(0x2F92E, 'M', u'瑇'),
(0x2F92F, 'M', u'瑜'),
(0x2F930, 'M', u'瑱'),
(0x2F931, 'M', u'璅'),
(0x2F932, 'M', u'瓊'),
(0x2F933, 'M', u'㼛'),
(0x2F934, 'M', u'甤'),
(0x2F935, 'M', u'𤰶'),
(0x2F936, 'M', u'甾'),
(0x2F937, 'M', u'𤲒'),
(0x2F938, 'M', u'異'),
(0x2F939, 'M', u'𢆟'),
(0x2F93A, 'M', u'瘐'),
(0x2F93B, 'M', u'𤾡'),
(0x2F93C, 'M', u'𤾸'),
(0x2F93D, 'M', u'𥁄'),
(0x2F93E, 'M', u'㿼'),
(0x2F93F, 'M', u'䀈'),
(0x2F940, 'M', u'直'),
(0x2F941, 'M', u'𥃳'),
(0x2F942, 'M', u'𥃲'),
(0x2F943, 'M', u'𥄙'),
(0x2F944, 'M', u'𥄳'),
(0x2F945, 'M', u'眞'),
(0x2F946, 'M', u'真'),
(0x2F948, 'M', u'睊'),
(0x2F949, 'M', u'䀹'),
(0x2F94A, 'M', u'瞋'),
(0x2F94B, 'M', u'䁆'),
(0x2F94C, 'M', u'䂖'),
(0x2F94D, 'M', u'𥐝'),
(0x2F94E, 'M', u'硎'),
(0x2F94F, 'M', u'碌'),
(0x2F950, 'M', u'磌'),
(0x2F951, 'M', u'䃣'),
(0x2F952, 'M', u'𥘦'),
(0x2F953, 'M', u'祖'),
(0x2F954, 'M', u'𥚚'),
(0x2F955, 'M', u'𥛅'),
(0x2F956, 'M', u'福'),
(0x2F957, 'M', u'秫'),
(0x2F958, 'M', u'䄯'),
(0x2F959, 'M', u'穀'),
(0x2F95A, 'M', u'穊'),
(0x2F95B, 'M', u'穏'),
(0x2F95C, 'M', u'𥥼'),
(0x2F95D, 'M', u'𥪧'),
(0x2F95F, 'X'),
(0x2F960, 'M', u'䈂'),
(0x2F961, 'M', u'𥮫'),
(0x2F962, 'M', u'篆'),
(0x2F963, 'M', u'築'),
(0x2F964, 'M', u'䈧'),
(0x2F965, 'M', u'𥲀'),
(0x2F966, 'M', u'糒'),
(0x2F967, 'M', u'䊠'),
(0x2F968, 'M', u'糨'),
(0x2F969, 'M', u'糣'),
(0x2F96A, 'M', u'紀'),
(0x2F96B, 'M', u'𥾆'),
(0x2F96C, 'M', u'絣'),
(0x2F96D, 'M', u'䌁'),
(0x2F96E, 'M', u'緇'),
(0x2F96F, 'M', u'縂'),
(0x2F970, 'M', u'繅'),
(0x2F971, 'M', u'䌴'),
(0x2F972, 'M', u'𦈨'),
(0x2F973, 'M', u'𦉇'),
(0x2F974, 'M', u'䍙'),
(0x2F975, 'M', u'𦋙'),
(0x2F976, 'M', u'罺'),
(0x2F977, 'M', u'𦌾'),
(0x2F978, 'M', u'羕'),
(0x2F979, 'M', u'翺'),
(0x2F97A, 'M', u'者'),
(0x2F97B, 'M', u'𦓚'),
(0x2F97C, 'M', u'𦔣'),
(0x2F97D, 'M', u'聠'),
(0x2F97E, 'M', u'𦖨'),
(0x2F97F, 'M', u'聰'),
(0x2F980, 'M', u'𣍟'),
(0x2F981, 'M', u'䏕'),
(0x2F982, 'M', u'育'),
(0x2F983, 'M', u'脃'),
(0x2F984, 'M', u'䐋'),
(0x2F985, 'M', u'脾'),
(0x2F986, 'M', u'媵'),
(0x2F987, 'M', u'𦞧'),
(0x2F988, 'M', u'𦞵'),
(0x2F989, 'M', u'𣎓'),
(0x2F98A, 'M', u'𣎜'),
(0x2F98B, 'M', u'舁'),
(0x2F98C, 'M', u'舄'),
(0x2F98D, 'M', u'辞'),
]
def _seg_78():
return [
(0x2F98E, 'M', u'䑫'),
(0x2F98F, 'M', u'芑'),
(0x2F990, 'M', u'芋'),
(0x2F991, 'M', u'芝'),
(0x2F992, 'M', u'劳'),
(0x2F993, 'M', u'花'),
(0x2F994, 'M', u'芳'),
(0x2F995, 'M', u'芽'),
(0x2F996, 'M', u'苦'),
(0x2F997, 'M', u'𦬼'),
(0x2F998, 'M', u'若'),
(0x2F999, 'M', u'茝'),
(0x2F99A, 'M', u'荣'),
(0x2F99B, 'M', u'莭'),
(0x2F99C, 'M', u'茣'),
(0x2F99D, 'M', u'莽'),
(0x2F99E, 'M', u'菧'),
(0x2F99F, 'M', u'著'),
(0x2F9A0, 'M', u'荓'),
(0x2F9A1, 'M', u'菊'),
(0x2F9A2, 'M', u'菌'),
(0x2F9A3, 'M', u'菜'),
(0x2F9A4, 'M', u'𦰶'),
(0x2F9A5, 'M', u'𦵫'),
(0x2F9A6, 'M', u'𦳕'),
(0x2F9A7, 'M', u'䔫'),
(0x2F9A8, 'M', u'蓱'),
(0x2F9A9, 'M', u'蓳'),
(0x2F9AA, 'M', u'蔖'),
(0x2F9AB, 'M', u'𧏊'),
(0x2F9AC, 'M', u'蕤'),
(0x2F9AD, 'M', u'𦼬'),
(0x2F9AE, 'M', u'䕝'),
(0x2F9AF, 'M', u'䕡'),
(0x2F9B0, 'M', u'𦾱'),
(0x2F9B1, 'M', u'𧃒'),
(0x2F9B2, 'M', u'䕫'),
(0x2F9B3, 'M', u'虐'),
(0x2F9B4, 'M', u'虜'),
(0x2F9B5, 'M', u'虧'),
(0x2F9B6, 'M', u'虩'),
(0x2F9B7, 'M', u'蚩'),
(0x2F9B8, 'M', u'蚈'),
(0x2F9B9, 'M', u'蜎'),
(0x2F9BA, 'M', u'蛢'),
(0x2F9BB, 'M', u'蝹'),
(0x2F9BC, 'M', u'蜨'),
(0x2F9BD, 'M', u'蝫'),
(0x2F9BE, 'M', u'螆'),
(0x2F9BF, 'X'),
(0x2F9C0, 'M', u'蟡'),
(0x2F9C1, 'M', u'蠁'),
(0x2F9C2, 'M', u'䗹'),
(0x2F9C3, 'M', u'衠'),
(0x2F9C4, 'M', u'衣'),
(0x2F9C5, 'M', u'𧙧'),
(0x2F9C6, 'M', u'裗'),
(0x2F9C7, 'M', u'裞'),
(0x2F9C8, 'M', u'䘵'),
(0x2F9C9, 'M', u'裺'),
(0x2F9CA, 'M', u'㒻'),
(0x2F9CB, 'M', u'𧢮'),
(0x2F9CC, 'M', u'𧥦'),
(0x2F9CD, 'M', u'䚾'),
(0x2F9CE, 'M', u'䛇'),
(0x2F9CF, 'M', u'誠'),
(0x2F9D0, 'M', u'諭'),
(0x2F9D1, 'M', u'變'),
(0x2F9D2, 'M', u'豕'),
(0x2F9D3, 'M', u'𧲨'),
(0x2F9D4, 'M', u'貫'),
(0x2F9D5, 'M', u'賁'),
(0x2F9D6, 'M', u'贛'),
(0x2F9D7, 'M', u'起'),
(0x2F9D8, 'M', u'𧼯'),
(0x2F9D9, 'M', u'𠠄'),
(0x2F9DA, 'M', u'跋'),
(0x2F9DB, 'M', u'趼'),
(0x2F9DC, 'M', u'跰'),
(0x2F9DD, 'M', u'𠣞'),
(0x2F9DE, 'M', u'軔'),
(0x2F9DF, 'M', u'輸'),
(0x2F9E0, 'M', u'𨗒'),
(0x2F9E1, 'M', u'𨗭'),
(0x2F9E2, 'M', u'邔'),
(0x2F9E3, 'M', u'郱'),
(0x2F9E4, 'M', u'鄑'),
(0x2F9E5, 'M', u'𨜮'),
(0x2F9E6, 'M', u'鄛'),
(0x2F9E7, 'M', u'鈸'),
(0x2F9E8, 'M', u'鋗'),
(0x2F9E9, 'M', u'鋘'),
(0x2F9EA, 'M', u'鉼'),
(0x2F9EB, 'M', u'鏹'),
(0x2F9EC, 'M', u'鐕'),
(0x2F9ED, 'M', u'𨯺'),
(0x2F9EE, 'M', u'開'),
(0x2F9EF, 'M', u'䦕'),
(0x2F9F0, 'M', u'閷'),
(0x2F9F1, 'M', u'𨵷'),
]
def _seg_79():
return [
(0x2F9F2, 'M', u'䧦'),
(0x2F9F3, 'M', u'雃'),
(0x2F9F4, 'M', u'嶲'),
(0x2F9F5, 'M', u'霣'),
(0x2F9F6, 'M', u'𩅅'),
(0x2F9F7, 'M', u'𩈚'),
(0x2F9F8, 'M', u'䩮'),
(0x2F9F9, 'M', u'䩶'),
(0x2F9FA, 'M', u'韠'),
(0x2F9FB, 'M', u'𩐊'),
(0x2F9FC, 'M', u'䪲'),
(0x2F9FD, 'M', u'𩒖'),
(0x2F9FE, 'M', u'頋'),
(0x2FA00, 'M', u'頩'),
(0x2FA01, 'M', u'𩖶'),
(0x2FA02, 'M', u'飢'),
(0x2FA03, 'M', u'䬳'),
(0x2FA04, 'M', u'餩'),
(0x2FA05, 'M', u'馧'),
(0x2FA06, 'M', u'駂'),
(0x2FA07, 'M', u'駾'),
(0x2FA08, 'M', u'䯎'),
(0x2FA09, 'M', u'𩬰'),
(0x2FA0A, 'M', u'鬒'),
(0x2FA0B, 'M', u'鱀'),
(0x2FA0C, 'M', u'鳽'),
(0x2FA0D, 'M', u'䳎'),
(0x2FA0E, 'M', u'䳭'),
(0x2FA0F, 'M', u'鵧'),
(0x2FA10, 'M', u'𪃎'),
(0x2FA11, 'M', u'䳸'),
(0x2FA12, 'M', u'𪄅'),
(0x2FA13, 'M', u'𪈎'),
(0x2FA14, 'M', u'𪊑'),
(0x2FA15, 'M', u'麻'),
(0x2FA16, 'M', u'䵖'),
(0x2FA17, 'M', u'黹'),
(0x2FA18, 'M', u'黾'),
(0x2FA19, 'M', u'鼅'),
(0x2FA1A, 'M', u'鼏'),
(0x2FA1B, 'M', u'鼖'),
(0x2FA1C, 'M', u'鼻'),
(0x2FA1D, 'M', u'𪘀'),
(0x2FA1E, 'X'),
(0x30000, 'V'),
(0x3134B, 'X'),
(0xE0100, 'I'),
(0xE01F0, 'X'),
]
uts46data = tuple(
_seg_0()
+ _seg_1()
+ _seg_2()
+ _seg_3()
+ _seg_4()
+ _seg_5()
+ _seg_6()
+ _seg_7()
+ _seg_8()
+ _seg_9()
+ _seg_10()
+ _seg_11()
+ _seg_12()
+ _seg_13()
+ _seg_14()
+ _seg_15()
+ _seg_16()
+ _seg_17()
+ _seg_18()
+ _seg_19()
+ _seg_20()
+ _seg_21()
+ _seg_22()
+ _seg_23()
+ _seg_24()
+ _seg_25()
+ _seg_26()
+ _seg_27()
+ _seg_28()
+ _seg_29()
+ _seg_30()
+ _seg_31()
+ _seg_32()
+ _seg_33()
+ _seg_34()
+ _seg_35()
+ _seg_36()
+ _seg_37()
+ _seg_38()
+ _seg_39()
+ _seg_40()
+ _seg_41()
+ _seg_42()
+ _seg_43()
+ _seg_44()
+ _seg_45()
+ _seg_46()
+ _seg_47()
+ _seg_48()
+ _seg_49()
+ _seg_50()
+ _seg_51()
+ _seg_52()
+ _seg_53()
+ _seg_54()
+ _seg_55()
+ _seg_56()
+ _seg_57()
+ _seg_58()
+ _seg_59()
+ _seg_60()
+ _seg_61()
+ _seg_62()
+ _seg_63()
+ _seg_64()
+ _seg_65()
+ _seg_66()
+ _seg_67()
+ _seg_68()
+ _seg_69()
+ _seg_70()
+ _seg_71()
+ _seg_72()
+ _seg_73()
+ _seg_74()
+ _seg_75()
+ _seg_76()
+ _seg_77()
+ _seg_78()
+ _seg_79()
)
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/idna/uts46data.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/idna/uts46data.py",
"repo_id": "Django-locallibrary",
"token_count": 143458
} | 20 |
"""Nicer log formatting with colours.
Code copied from Tornado, Apache licensed.
"""
# Copyright 2012 Facebook
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import logging
import sys
try:
import curses
except ImportError:
curses = None
def _stderr_supports_color():
color = False
if curses and hasattr(sys.stderr, 'isatty') and sys.stderr.isatty():
try:
curses.setupterm()
if curses.tigetnum("colors") > 0:
color = True
except Exception:
pass
return color
class LogFormatter(logging.Formatter):
"""Log formatter with colour support
"""
DEFAULT_COLORS = {
logging.INFO: 2, # Green
logging.WARNING: 3, # Yellow
logging.ERROR: 1, # Red
logging.CRITICAL: 1,
}
def __init__(self, color=True, datefmt=None):
r"""
:arg bool color: Enables color support.
:arg string fmt: Log message format.
It will be applied to the attributes dict of log records. The
text between ``%(color)s`` and ``%(end_color)s`` will be colored
depending on the level if color support is on.
:arg dict colors: color mappings from logging level to terminal color
code
:arg string datefmt: Datetime format.
Used for formatting ``(asctime)`` placeholder in ``prefix_fmt``.
.. versionchanged:: 3.2
Added ``fmt`` and ``datefmt`` arguments.
"""
logging.Formatter.__init__(self, datefmt=datefmt)
self._colors = {}
if color and _stderr_supports_color():
# The curses module has some str/bytes confusion in
# python3. Until version 3.2.3, most methods return
# bytes, but only accept strings. In addition, we want to
# output these strings with the logging module, which
# works with unicode strings. The explicit calls to
# unicode() below are harmless in python2 but will do the
# right conversion in python 3.
fg_color = (curses.tigetstr("setaf") or
curses.tigetstr("setf") or "")
if (3, 0) < sys.version_info < (3, 2, 3):
fg_color = str(fg_color, "ascii")
for levelno, code in self.DEFAULT_COLORS.items():
self._colors[levelno] = str(
curses.tparm(fg_color, code), "ascii")
self._normal = str(curses.tigetstr("sgr0"), "ascii")
scr = curses.initscr()
self.termwidth = scr.getmaxyx()[1]
curses.endwin()
else:
self._normal = ''
# Default width is usually 80, but too wide is
# worse than too narrow
self.termwidth = 70
def formatMessage(self, record):
mlen = len(record.message)
right_text = '{initial}-{name}'.format(initial=record.levelname[0],
name=record.name)
if mlen + len(right_text) < self.termwidth:
space = ' ' * (self.termwidth - (mlen + len(right_text)))
else:
space = ' '
if record.levelno in self._colors:
start_color = self._colors[record.levelno]
end_color = self._normal
else:
start_color = end_color = ''
return record.message + space + start_color + right_text + end_color
def enable_colourful_output(level=logging.INFO):
handler = logging.StreamHandler()
handler.setFormatter(LogFormatter())
logging.root.addHandler(handler)
logging.root.setLevel(level)
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/pep517/colorlog.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/pep517/colorlog.py",
"repo_id": "Django-locallibrary",
"token_count": 1736
} | 21 |
# -*- coding: utf-8 -*-
# Copyright (c) 2012 Giorgos Verigakis <[email protected]>
#
# Permission to use, copy, modify, and distribute this software for any
# purpose with or without fee is hereby granted, provided that the above
# copyright notice and this permission notice appear in all copies.
#
# THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
# WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
# ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
# WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
# ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
# OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
from __future__ import unicode_literals
from . import Infinite, Progress
class Counter(Infinite):
def update(self):
self.write(str(self.index))
class Countdown(Progress):
def update(self):
self.write(str(self.remaining))
class Stack(Progress):
phases = (' ', '▁', '▂', '▃', '▄', '▅', '▆', '▇', '█')
def update(self):
nphases = len(self.phases)
i = min(nphases - 1, int(self.progress * nphases))
self.write(self.phases[i])
class Pie(Stack):
phases = ('○', '◔', '◑', '◕', '●')
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/progress/counter.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/progress/counter.py",
"repo_id": "Django-locallibrary",
"token_count": 479
} | 22 |
# -*- coding: utf-8 -*-
"""
requests.hooks
~~~~~~~~~~~~~~
This module provides the capabilities for the Requests hooks system.
Available hooks:
``response``:
The response generated from a Request.
"""
HOOKS = ['response']
def default_hooks():
return {event: [] for event in HOOKS}
# TODO: response is the only one
def dispatch_hook(key, hooks, hook_data, **kwargs):
"""Dispatches a hook dictionary on a given piece of data."""
hooks = hooks or {}
hooks = hooks.get(key)
if hooks:
if hasattr(hooks, '__call__'):
hooks = [hooks]
for hook in hooks:
_hook_data = hook(hook_data, **kwargs)
if _hook_data is not None:
hook_data = _hook_data
return hook_data
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/requests/hooks.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/requests/hooks.py",
"repo_id": "Django-locallibrary",
"token_count": 306
} | 23 |
__all__ = ["Sequence"]
try:
from collections.abc import Sequence
except ImportError:
from collections import Sequence
| Django-locallibrary/env/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py/0 | {
"file_path": "Django-locallibrary/env/Lib/site-packages/pip/_vendor/resolvelib/compat/collections_abc.py",
"repo_id": "Django-locallibrary",
"token_count": 36
} | 24 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.