repo_name
stringlengths 5
100
| path
stringlengths 4
375
| copies
stringclasses 991
values | size
stringlengths 4
7
| content
stringlengths 666
1M
| license
stringclasses 15
values |
---|---|---|---|---|---|
OpenUpgrade-dev/OpenUpgrade | addons/crm_helpdesk/report/__init__.py | 442 | 1083 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import crm_helpdesk_report
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
OpenSourcePolicyCenter/PolicyBrain | webapp/apps/register/tests.py | 2 | 2406 | from django.test import TestCase
import pytest
from django.test import Client
from django.core.urlresolvers import reverse
from webapp.apps.register.models import Subscriber
# run this with `py.test --pdb -s -m register`
# actually no. `py.test -q webapp/apps/register/tests.py`
@pytest.mark.register
class RegisterTestCase(TestCase):
def test_initial_subscribe(self):
subscriber = Subscriber.objects.create(
email='[email protected]',
)
self.assertFalse(subscriber.active)
def test_post_registration_email(self):
c = Client()
response = c.post(reverse('about'),
{'email': '[email protected]'})
subscriber = Subscriber.objects.get(
email='[email protected]')
self.assertFalse(subscriber.active)
def test_confirm_link_correct(self):
subscriber = Subscriber.objects.create(
email='[email protected]',
)
self.assertEqual(subscriber.confirm_url("http://ospc-taxes.org"),
("http://ospc-taxes.org/register/?k={}"
.format(subscriber.confirm_key)))
# Tests to write:
# User enters in a username that already exists, then changes it and
# resubmits
# User enters an email that is the same as an existing email.
# def test_patching_works(self):
# with patch('django.core.mail.send_mail') as mocked_send_mail:
# from django.core.mail import send_mail
# send_mail(subject="foo", message="bar",
# from_email="andrew <[email protected]>",
# recipient_list = ['farrell <[email protected]>',])
# self.assertTrue(mocked_send_mail.called)
#
# def test_mail_is_sent(self):
# with patch('django.core.mail.send_mail') as mocked_send_mail:
# from webapp.apps.register.models import Subscriber
# subscriber = Subscriber.objects.create(
# email = '[email protected]',
# )
# subscriber.save()
# self.assertFalse(subscriber.active)
# self.assertTrue(mocked_send_mail.called)
# self.assertEqual(mocked_send_mail.call_args['recipient_list'],
# '[email protected]')
# self.assertIn(mocked_send_mail.call_args['message'],
# subscriber.confirm_key)
| mit |
stormi/tsunami | src/secondaires/crafting/fonctions/a_rang.py | 1 | 3868 | # -*-coding:Utf-8 -*
# Copyright (c) 2015 LE GOFF Vincent
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# * Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
# * Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT
# OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
"""Fichier contenant la fonction a_rang."""
from primaires.format.fonctions import supprimer_accents
from primaires.scripting.fonction import Fonction
from primaires.scripting.instruction import ErreurExecution
class ClasseFonction(Fonction):
"""Retourne vrai si le personnage a le rang indiqué dans la guilde."""
@classmethod
def init_types(cls):
cls.ajouter_types(cls.a_rang, "Personnage", "str")
cls.ajouter_types(cls.a_rang, "Personnage", "str", "str")
@staticmethod
def a_rang(personnage, cle_guilde, cle_rang=""):
"""Retourne vrai si le personnage est du rang indiqué.
Vous pouvez ne préciser que le personnage et la clé de la
guilde pour savoir si le personnage est membre de la guilde,
à quelque rang que ce soit. Consultez les exemples
ci-dessous. Si vous précisez une clé de rang en troisième
paramètre, cette fonction retourne vrai si le personnage est
de ce rang ou de rang supérieur.
Paramètres à préciser :
* personnage : le personnage (membre ou non)
* cle_guilde : la clé de la guilde (une chaîne)
* cle_rang (optionnel) : la clé de rang (une chaîne)
Exemples d'utilisation :
# On admet une guilde 'forgerons' avec les rangs suivants :
# - apprenti
# - compagnon
# - maitre
si a_rang(personnage, "forgerons"):
# personnage est membre de la guilde
sinon:
# personnage n'est ni apprenti, ni compagnon, ni maître
finsi
si a_rang(personnage, "forgerons", "compagnon"):
# personnage est soit compagnon soit maitre
...
finsi
"""
cle_guilde = cle_guilde.lower()
if cle_guilde not in importeur.crafting.guildes:
raise ErreurExecution("La guilde {} n'existe pas".format(
repr(cle_guilde)))
guilde = importeur.crafting.guildes[cle_guilde]
if personnage not in guilde.membres:
return False
if cle_rang == "":
return personnage in guilde.membres
rang = guilde.get_rang(cle_rang)
à_rang = guilde.membres[personnage].rang
return rang in p_rang.rangs_parents
| bsd-3-clause |
hfp/libxsmm | samples/deeplearning/sparse_training/fairseq/tests/test_binaries.py | 1 | 40028 | # Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
import contextlib
from io import StringIO
import logging
import os
import random
import tempfile
import unittest
import torch
from fairseq import options
from fairseq_cli import train
from fairseq_cli import eval_lm
from fairseq_cli import validate
from tests.utils import (
create_dummy_data,
preprocess_lm_data,
preprocess_translation_data,
train_translation_model,
generate_main,
)
class TestTranslation(unittest.TestCase):
def setUp(self):
logging.disable(logging.CRITICAL)
def tearDown(self):
logging.disable(logging.NOTSET)
def test_fconv(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_fconv') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'fconv_iwslt_de_en')
generate_main(data_dir)
def test_raw(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_fconv_raw') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir, ['--dataset-impl', 'raw'])
train_translation_model(data_dir, 'fconv_iwslt_de_en', ['--dataset-impl', 'raw'])
generate_main(data_dir, ['--dataset-impl', 'raw'])
def test_update_freq(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_update_freq') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'fconv_iwslt_de_en', ['--update-freq', '3'])
generate_main(data_dir)
def test_max_positions(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_max_positions') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
with self.assertRaises(Exception) as context:
train_translation_model(
data_dir, 'fconv_iwslt_de_en', ['--max-target-positions', '5'],
)
self.assertTrue(
'skip this example with --skip-invalid-size-inputs-valid-test' in str(context.exception)
)
train_translation_model(
data_dir, 'fconv_iwslt_de_en',
['--max-target-positions', '5', '--skip-invalid-size-inputs-valid-test'],
)
with self.assertRaises(Exception) as context:
generate_main(data_dir)
generate_main(data_dir, ['--skip-invalid-size-inputs-valid-test'])
def test_generation(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_sampling') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'fconv_iwslt_de_en')
generate_main(data_dir, [
'--sampling',
'--temperature', '2',
'--beam', '2',
'--nbest', '2',
])
generate_main(data_dir, [
'--sampling',
'--sampling-topk', '3',
'--beam', '2',
'--nbest', '2',
])
generate_main(data_dir, [
'--sampling',
'--sampling-topp', '0.2',
'--beam', '2',
'--nbest', '2',
])
generate_main(data_dir, [
'--diversity-rate', '0.5',
'--beam', '6',
])
with self.assertRaises(ValueError):
generate_main(data_dir, [
'--diverse-beam-groups', '4',
'--match-source-len',
])
generate_main(data_dir, ['--prefix-size', '2'])
generate_main(data_dir, ['--retain-dropout'])
def test_eval_bleu(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_eval_bleu') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'fconv_iwslt_de_en', [
'--eval-bleu',
'--eval-bleu-print-samples',
'--eval-bleu-remove-bpe',
'--eval-bleu-detok', 'space',
'--eval-bleu-args', '{"beam": 4, "min_len": 10}',
])
def test_lstm(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_lstm') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'lstm_wiseman_iwslt_de_en', [
'--encoder-layers', '2',
'--decoder-layers', '2',
'--encoder-embed-dim', '8',
'--decoder-embed-dim', '8',
'--decoder-out-embed-dim', '8',
])
generate_main(data_dir)
def test_lstm_bidirectional(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_lstm_bidirectional') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'lstm', [
'--encoder-layers', '2',
'--encoder-bidirectional',
'--encoder-hidden-size', '16',
'--encoder-embed-dim', '8',
'--decoder-embed-dim', '8',
'--decoder-out-embed-dim', '8',
'--decoder-layers', '2',
])
generate_main(data_dir)
def test_transformer(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_transformer') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'transformer_iwslt_de_en', [
'--encoder-layers', '2',
'--decoder-layers', '2',
'--encoder-embed-dim', '8',
'--decoder-embed-dim', '8',
], run_validation=True)
generate_main(data_dir)
def test_multilingual_transformer(self):
# test with all combinations of encoder/decoder lang tokens
encoder_langtok_flags = [[], ['--encoder-langtok', 'src'], ['--encoder-langtok', 'tgt']]
decoder_langtok_flags = [[], ['--decoder-langtok']]
with contextlib.redirect_stdout(StringIO()):
for i in range(len(encoder_langtok_flags)):
for j in range(len(decoder_langtok_flags)):
enc_ltok_flag = encoder_langtok_flags[i]
dec_ltok_flag = decoder_langtok_flags[j]
with tempfile.TemporaryDirectory(f'test_multilingual_transformer_{i}_{j}') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(
data_dir,
arch='multilingual_transformer',
task='multilingual_translation',
extra_flags=[
'--encoder-layers', '2',
'--decoder-layers', '2',
'--encoder-embed-dim', '8',
'--decoder-embed-dim', '8',
] + enc_ltok_flag + dec_ltok_flag,
lang_flags=['--lang-pairs', 'in-out,out-in'],
run_validation=True,
extra_valid_flags=enc_ltok_flag + dec_ltok_flag,
)
generate_main(
data_dir,
extra_flags=[
'--task', 'multilingual_translation',
'--lang-pairs', 'in-out,out-in',
'--source-lang', 'in',
'--target-lang', 'out',
] + enc_ltok_flag + dec_ltok_flag,
)
def test_translation_multi_simple_epoch(self):
# test with all combinations of encoder/decoder lang tokens
encoder_langtok_flags = [[], ['--encoder-langtok', 'src'], ['--encoder-langtok', 'tgt']]
decoder_langtok_flags = [[], ['--decoder-langtok']]
with contextlib.redirect_stdout(StringIO()):
for i in range(len(encoder_langtok_flags)):
for j in range(len(decoder_langtok_flags)):
enc_ltok_flag = encoder_langtok_flags[i]
dec_ltok_flag = decoder_langtok_flags[j]
with tempfile.TemporaryDirectory(f'test_translation_multi_simple_epoch_{i}_{j}') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(
data_dir,
arch='transformer',
task='translation_multi_simple_epoch',
extra_flags=[
'--encoder-layers', '2',
'--decoder-layers', '2',
'--encoder-embed-dim', '8',
'--decoder-embed-dim', '8',
'--sampling-method', 'temperature',
'--sampling-temperature', '1.5',
'--virtual-epoch-size', '1000',
] + enc_ltok_flag + dec_ltok_flag,
lang_flags=['--lang-pairs', 'in-out,out-in'],
run_validation=True,
extra_valid_flags=enc_ltok_flag + dec_ltok_flag,
)
generate_main(
data_dir,
extra_flags=[
'--task', 'translation_multi_simple_epoch',
'--lang-pairs', 'in-out,out-in',
'--source-lang', 'in',
'--target-lang', 'out',
] + enc_ltok_flag + dec_ltok_flag,
)
def test_transformer_cross_self_attention(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_transformer_cross_self_attention') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'transformer_iwslt_de_en', [
'--encoder-layers', '2',
'--decoder-layers', '2',
'--encoder-embed-dim', '8',
'--decoder-embed-dim', '8',
'--decoder-embed-dim', '8',
'--no-cross-attention',
'--cross-self-attention',
], run_validation=True)
generate_main(data_dir, extra_flags=[])
def test_lightconv(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_lightconv') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'lightconv_iwslt_de_en', [
'--encoder-conv-type', 'lightweight',
'--decoder-conv-type', 'lightweight',
'--encoder-embed-dim', '8',
'--decoder-embed-dim', '8',
])
generate_main(data_dir)
def test_dynamicconv(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_dynamicconv') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'lightconv_iwslt_de_en', [
'--encoder-conv-type', 'dynamic',
'--decoder-conv-type', 'dynamic',
'--encoder-embed-dim', '8',
'--decoder-embed-dim', '8',
])
generate_main(data_dir)
def test_cmlm_transformer(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_cmlm_transformer') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir, ['--joined-dictionary'])
train_translation_model(data_dir, 'cmlm_transformer', [
'--apply-bert-init',
'--criterion', 'nat_loss',
'--noise', 'full_mask',
'--pred-length-offset',
'--length-loss-factor', '0.1'
], task='translation_lev')
generate_main(data_dir, [
'--task', 'translation_lev',
'--iter-decode-max-iter', '9',
'--iter-decode-eos-penalty', '0',
'--print-step',
])
def test_nonautoregressive_transformer(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_nonautoregressive_transformer') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir, ['--joined-dictionary'])
train_translation_model(data_dir, 'nonautoregressive_transformer', [
'--apply-bert-init', '--src-embedding-copy', '--criterion',
'nat_loss', '--noise', 'full_mask', '--pred-length-offset',
'--length-loss-factor', '0.1'
], task='translation_lev')
generate_main(data_dir, [
'--task', 'translation_lev',
'--iter-decode-max-iter', '0',
'--iter-decode-eos-penalty', '0',
'--print-step',
])
# def test_nat_crf_transformer(self):
# with contextlib.redirect_stdout(StringIO()):
# with tempfile.TemporaryDirectory('test_nat_crf_transformer') as data_dir:
# create_dummy_data(data_dir)
# preprocess_translation_data(data_dir, ['--joined-dictionary'])
# train_translation_model(data_dir, 'nacrf_transformer', [
# '--apply-bert-init', '--criterion',
# 'nat_loss', '--noise', 'full_mask', '--pred-length-offset',
# '--length-loss-factor', '0.1',
# '--word-ins-loss-factor', '0.5',
# '--crf-lowrank-approx', '1',
# '--crf-beam-approx', '1'
# ], task='translation_lev')
# generate_main(data_dir, [
# '--task', 'translation_lev',
# '--iter-decode-max-iter', '0',
# '--iter-decode-eos-penalty', '0',
# '--print-step',
# ])
def test_iterative_nonautoregressive_transformer(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_iterative_nonautoregressive_transformer') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir, ['--joined-dictionary'])
train_translation_model(data_dir, 'iterative_nonautoregressive_transformer', [
'--apply-bert-init', '--src-embedding-copy', '--criterion',
'nat_loss', '--noise', 'full_mask', '--stochastic-approx',
'--dae-ratio', '0.5', '--train-step', '3'
], task='translation_lev')
generate_main(data_dir, [
'--task', 'translation_lev',
'--iter-decode-max-iter', '9',
'--iter-decode-eos-penalty', '0',
'--print-step',
])
def test_insertion_transformer(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_insertion_transformer') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir, ['--joined-dictionary'])
train_translation_model(data_dir, 'insertion_transformer', [
'--apply-bert-init', '--criterion', 'nat_loss', '--noise',
'random_mask'
], task='translation_lev')
generate_main(data_dir, [
'--task', 'translation_lev',
'--iter-decode-max-iter', '9',
'--iter-decode-eos-penalty', '0',
'--print-step',
])
def test_mixture_of_experts(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_moe') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
train_translation_model(data_dir, 'transformer_iwslt_de_en', [
'--task', 'translation_moe',
'--user-dir', 'examples/translation_moe/src',
'--method', 'hMoElp',
'--mean-pool-gating-network',
'--num-experts', '3',
'--encoder-layers', '2',
'--decoder-layers', '2',
'--encoder-embed-dim', '8',
'--decoder-embed-dim', '8',
])
generate_main(data_dir, [
'--task', 'translation_moe',
'--user-dir', 'examples/translation_moe/src',
'--method', 'hMoElp',
'--mean-pool-gating-network',
'--num-experts', '3',
'--gen-expert', '0'
])
def test_alignment(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_alignment') as data_dir:
create_dummy_data(data_dir, alignment=True)
preprocess_translation_data(data_dir, ['--align-suffix', 'align'])
train_translation_model(
data_dir,
'transformer_align',
[
'--encoder-layers', '2',
'--decoder-layers', '2',
'--encoder-embed-dim', '8',
'--decoder-embed-dim', '8',
'--load-alignments',
'--alignment-layer', '1',
'--criterion', 'label_smoothed_cross_entropy_with_alignment'
],
run_validation=True,
)
generate_main(data_dir)
class TestStories(unittest.TestCase):
def setUp(self):
logging.disable(logging.CRITICAL)
def tearDown(self):
logging.disable(logging.NOTSET)
def test_fconv_self_att_wp(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_fconv_self_att_wp') as data_dir:
create_dummy_data(data_dir)
preprocess_translation_data(data_dir)
config = [
'--encoder-layers', '[(128, 3)] * 2',
'--decoder-layers', '[(128, 3)] * 2',
'--decoder-attention', 'True',
'--encoder-attention', 'False',
'--gated-attention', 'True',
'--self-attention', 'True',
'--project-input', 'True',
'--encoder-embed-dim', '8',
'--decoder-embed-dim', '8',
'--decoder-out-embed-dim', '8',
'--multihead-self-attention-nheads', '2'
]
train_translation_model(data_dir, 'fconv_self_att_wp', config)
generate_main(data_dir)
# fusion model
os.rename(os.path.join(data_dir, 'checkpoint_last.pt'), os.path.join(data_dir, 'pretrained.pt'))
config.extend([
'--pretrained', 'True',
'--pretrained-checkpoint', os.path.join(data_dir, 'pretrained.pt'),
'--save-dir', os.path.join(data_dir, 'fusion_model'),
])
train_translation_model(data_dir, 'fconv_self_att_wp', config)
class TestLanguageModeling(unittest.TestCase):
def setUp(self):
logging.disable(logging.CRITICAL)
def tearDown(self):
logging.disable(logging.NOTSET)
def test_fconv_lm(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_fconv_lm') as data_dir:
create_dummy_data(data_dir)
preprocess_lm_data(data_dir)
train_language_model(data_dir, 'fconv_lm', [
'--decoder-layers', '[(850, 3)] * 2 + [(1024,4)]',
'--decoder-embed-dim', '280',
'--optimizer', 'nag',
'--lr', '0.1',
])
eval_lm_main(data_dir)
generate_main(data_dir, [
'--task', 'language_modeling',
'--sample-break-mode', 'eos',
'--tokens-per-sample', '500',
])
def test_transformer_lm(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_transformer_lm') as data_dir:
create_dummy_data(data_dir)
preprocess_lm_data(data_dir)
train_language_model(
data_dir, 'transformer_lm', ['--add-bos-token'], run_validation=True,
)
eval_lm_main(data_dir)
generate_main(data_dir, [
'--task', 'language_modeling',
'--sample-break-mode', 'eos',
'--tokens-per-sample', '500',
])
def test_lightconv_lm(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_lightconv_lm') as data_dir:
create_dummy_data(data_dir)
preprocess_lm_data(data_dir)
train_language_model(
data_dir, 'lightconv_lm', ['--add-bos-token'], run_validation=True,
)
eval_lm_main(data_dir)
generate_main(data_dir, [
'--task', 'language_modeling',
'--sample-break-mode', 'eos',
'--tokens-per-sample', '500',
])
def test_lstm_lm(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_lstm_lm') as data_dir:
create_dummy_data(data_dir)
preprocess_lm_data(data_dir)
train_language_model(
data_dir, 'lstm_lm', ['--add-bos-token'], run_validation=True,
)
eval_lm_main(data_dir)
generate_main(data_dir, [
'--task', 'language_modeling',
'--sample-break-mode', 'eos',
'--tokens-per-sample', '500',
])
def test_lstm_lm_residuals(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_lstm_lm_residuals') as data_dir:
create_dummy_data(data_dir)
preprocess_lm_data(data_dir)
train_language_model(
data_dir, 'lstm_lm', ['--add-bos-token', '--residuals'], run_validation=True,
)
eval_lm_main(data_dir)
generate_main(data_dir, [
'--task', 'language_modeling',
'--sample-break-mode', 'eos',
'--tokens-per-sample', '500',
])
class TestMaskedLanguageModel(unittest.TestCase):
def setUp(self):
logging.disable(logging.CRITICAL)
def tearDown(self):
logging.disable(logging.NOTSET)
def test_legacy_masked_lm(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory("test_legacy_mlm") as data_dir:
create_dummy_data(data_dir)
preprocess_lm_data(data_dir)
train_legacy_masked_language_model(data_dir, "masked_lm")
def test_roberta_masked_lm(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory("test_roberta_mlm") as data_dir:
create_dummy_data(data_dir)
preprocess_lm_data(data_dir)
train_masked_lm(data_dir, "roberta_base")
def test_roberta_sentence_prediction(self):
num_classes = 3
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory("test_roberta_head") as data_dir:
create_dummy_roberta_head_data(data_dir, num_classes=num_classes)
preprocess_lm_data(os.path.join(data_dir, 'input0'))
preprocess_lm_data(os.path.join(data_dir, 'label'))
train_roberta_head(data_dir, "roberta_base", num_classes=num_classes)
def test_roberta_regression_single(self):
num_classes = 1
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory("test_roberta_regression_single") as data_dir:
create_dummy_roberta_head_data(data_dir, num_classes=num_classes, regression=True)
preprocess_lm_data(os.path.join(data_dir, 'input0'))
train_roberta_head(data_dir, "roberta_base", num_classes=num_classes, extra_flags=['--regression-target'])
def test_roberta_regression_multiple(self):
num_classes = 3
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory("test_roberta_regression_multiple") as data_dir:
create_dummy_roberta_head_data(data_dir, num_classes=num_classes, regression=True)
preprocess_lm_data(os.path.join(data_dir, 'input0'))
train_roberta_head(data_dir, "roberta_base", num_classes=num_classes, extra_flags=['--regression-target'])
def _test_pretrained_masked_lm_for_translation(self, learned_pos_emb, encoder_only):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory("test_mlm") as data_dir:
create_dummy_data(data_dir)
preprocess_lm_data(data_dir)
train_legacy_masked_language_model(
data_dir,
arch="masked_lm",
extra_args=('--encoder-learned-pos',) if learned_pos_emb else ()
)
with tempfile.TemporaryDirectory(
"test_mlm_translation"
) as translation_dir:
create_dummy_data(translation_dir)
preprocess_translation_data(
translation_dir, extra_flags=["--joined-dictionary"]
)
# Train transformer with data_dir/checkpoint_last.pt
train_translation_model(
translation_dir,
arch="transformer_from_pretrained_xlm",
extra_flags=[
"--decoder-layers",
"1",
"--decoder-embed-dim",
"32",
"--decoder-attention-heads",
"1",
"--decoder-ffn-embed-dim",
"32",
"--encoder-layers",
"1",
"--encoder-embed-dim",
"32",
"--encoder-attention-heads",
"1",
"--encoder-ffn-embed-dim",
"32",
"--pretrained-xlm-checkpoint",
"{}/checkpoint_last.pt".format(data_dir),
"--activation-fn",
"gelu",
"--max-source-positions",
"500",
"--max-target-positions",
"500",
] + (
["--encoder-learned-pos", "--decoder-learned-pos"]
if learned_pos_emb else []
) + (['--init-encoder-only'] if encoder_only else []),
task="translation_from_pretrained_xlm",
)
def test_pretrained_masked_lm_for_translation_learned_pos_emb(self):
self._test_pretrained_masked_lm_for_translation(True, False)
def test_pretrained_masked_lm_for_translation_sinusoidal_pos_emb(self):
self._test_pretrained_masked_lm_for_translation(False, False)
def test_pretrained_masked_lm_for_translation_encoder_only(self):
self._test_pretrained_masked_lm_for_translation(True, True)
def train_legacy_masked_language_model(data_dir, arch, extra_args=()):
train_parser = options.get_training_parser()
# TODO: langs should be in and out right?
train_args = options.parse_args_and_arch(
train_parser,
[
"--task",
"cross_lingual_lm",
data_dir,
"--arch",
arch,
# Optimizer args
"--optimizer",
"adam",
"--lr-scheduler",
"reduce_lr_on_plateau",
"--lr-shrink",
"0.5",
"--lr",
"0.0001",
"--min-lr",
"1e-09",
# dropout, attention args
"--dropout",
"0.1",
"--attention-dropout",
"0.1",
# MLM args
"--criterion",
"legacy_masked_lm_loss",
"--masked-lm-only",
"--monolingual-langs",
"in,out",
"--num-segment",
"5",
# Transformer args: use a small transformer model for fast training
"--encoder-layers",
"1",
"--encoder-embed-dim",
"32",
"--encoder-attention-heads",
"1",
"--encoder-ffn-embed-dim",
"32",
# Other training args
"--max-tokens",
"500",
"--tokens-per-sample",
"500",
"--save-dir",
data_dir,
"--max-epoch",
"1",
"--no-progress-bar",
"--distributed-world-size",
"1",
"--dataset-impl",
"raw",
] + list(extra_args),
)
train.main(train_args)
class TestOptimizers(unittest.TestCase):
def setUp(self):
logging.disable(logging.CRITICAL)
def tearDown(self):
logging.disable(logging.NOTSET)
def test_optimizers(self):
with contextlib.redirect_stdout(StringIO()):
with tempfile.TemporaryDirectory('test_optimizers') as data_dir:
# Use just a bit of data and tiny model to keep this test runtime reasonable
create_dummy_data(data_dir, num_examples=10, maxlen=5)
preprocess_translation_data(data_dir)
optimizers = ['adafactor', 'adam', 'nag', 'adagrad', 'sgd', 'adadelta']
last_checkpoint = os.path.join(data_dir, 'checkpoint_last.pt')
for optimizer in optimizers:
if os.path.exists(last_checkpoint):
os.remove(last_checkpoint)
train_translation_model(data_dir, 'lstm', [
'--required-batch-size-multiple', '1',
'--encoder-layers', '1',
'--encoder-hidden-size', '32',
'--decoder-layers', '1',
'--optimizer', optimizer,
])
generate_main(data_dir)
def create_dummy_roberta_head_data(data_dir, num_examples=100, maxlen=10, num_classes=2, regression=False):
input_dir = 'input0'
def _create_dummy_data(filename):
random_data = torch.rand(num_examples * maxlen)
input_data = 97 + torch.floor(26 * random_data).int()
if regression:
output_data = torch.rand((num_examples, num_classes))
else:
output_data = 1 + torch.floor(num_classes * torch.rand(num_examples)).int()
with open(os.path.join(data_dir, input_dir, filename+'.out'), 'w') as f_in:
label_filename = filename+'.label' if regression else filename+'.out'
with open(os.path.join(data_dir, 'label', label_filename), 'w') as f_out:
offset = 0
for i in range(num_examples):
# write example input
ex_len = random.randint(1, maxlen)
ex_str = ' '.join(map(chr, input_data[offset:offset+ex_len]))
print(ex_str, file=f_in)
# write example label
if regression:
class_str = ' '.join(map(str, output_data[i].numpy()))
print(class_str, file=f_out)
else:
class_str = 'class{}'.format(output_data[i])
print(class_str, file=f_out)
offset += ex_len
os.mkdir(os.path.join(data_dir, input_dir))
os.mkdir(os.path.join(data_dir, 'label'))
_create_dummy_data('train')
_create_dummy_data('valid')
_create_dummy_data('test')
def train_masked_lm(data_dir, arch, extra_flags=None):
train_parser = options.get_training_parser()
train_args = options.parse_args_and_arch(
train_parser,
[
'--task', 'masked_lm',
data_dir,
'--arch', arch,
'--optimizer', 'adam',
'--lr', '0.0001',
'--criterion', 'masked_lm',
'--max-sentences', '500',
'--save-dir', data_dir,
'--max-epoch', '1',
'--no-progress-bar',
'--distributed-world-size', '1',
'--ddp-backend', 'no_c10d',
'--num-workers', 0,
] + (extra_flags or []),
)
train.main(train_args)
def train_roberta_head(data_dir, arch, num_classes=2, extra_flags=None):
train_parser = options.get_training_parser()
train_args = options.parse_args_and_arch(
train_parser,
[
'--task', 'sentence_prediction',
data_dir,
'--arch', arch,
'--num-classes', str(num_classes),
'--optimizer', 'adam',
'--lr', '0.0001',
'--criterion', 'sentence_prediction',
'--max-tokens', '500',
'--max-positions', '500',
'--max-sentences', '500',
'--save-dir', data_dir,
'--max-epoch', '1',
'--no-progress-bar',
'--distributed-world-size', '1',
'--ddp-backend', 'no_c10d',
'--num-workers', 0,
] + (extra_flags or []),
)
train.main(train_args)
def train_language_model(data_dir, arch, extra_flags=None, run_validation=False):
train_parser = options.get_training_parser()
train_args = options.parse_args_and_arch(
train_parser,
[
'--task', 'language_modeling',
data_dir,
'--arch', arch,
'--optimizer', 'adam',
'--lr', '0.0001',
'--criterion', 'adaptive_loss',
'--adaptive-softmax-cutoff', '5,10,15',
'--max-tokens', '500',
'--tokens-per-sample', '500',
'--save-dir', data_dir,
'--max-epoch', '1',
'--no-progress-bar',
'--distributed-world-size', '1',
'--ddp-backend', 'no_c10d',
] + (extra_flags or []),
)
train.main(train_args)
if run_validation:
# test validation
validate_parser = options.get_validation_parser()
validate_args = options.parse_args_and_arch(
validate_parser,
[
'--task', 'language_modeling',
data_dir,
'--path', os.path.join(data_dir, 'checkpoint_last.pt'),
'--valid-subset', 'valid',
'--max-tokens', '500',
'--no-progress-bar',
]
)
validate.main(validate_args)
def eval_lm_main(data_dir):
eval_lm_parser = options.get_eval_lm_parser()
eval_lm_args = options.parse_args_and_arch(
eval_lm_parser,
[
data_dir,
'--path', os.path.join(data_dir, 'checkpoint_last.pt'),
'--no-progress-bar',
],
)
eval_lm.main(eval_lm_args)
def train_masked_language_model(data_dir, arch, extra_args=()):
train_parser = options.get_training_parser()
# TODO: langs should be in and out right?
train_args = options.parse_args_and_arch(
train_parser,
[
"--task",
"cross_lingual_lm",
data_dir,
"--arch",
arch,
# Optimizer args
"--optimizer",
"adam",
"--lr-scheduler",
"reduce_lr_on_plateau",
"--lr-shrink",
"0.5",
"--lr",
"0.0001",
"--min-lr",
"1e-09",
# dropout, attention args
"--dropout",
"0.1",
"--attention-dropout",
"0.1",
# MLM args
"--criterion",
"masked_lm_loss",
"--masked-lm-only",
"--monolingual-langs",
"in,out",
"--num-segment",
"5",
# Transformer args: use a small transformer model for fast training
"--encoder-layers",
"1",
"--encoder-embed-dim",
"32",
"--encoder-attention-heads",
"1",
"--encoder-ffn-embed-dim",
"32",
# Other training args
"--max-tokens",
"500",
"--tokens-per-sample",
"500",
"--save-dir",
data_dir,
"--max-epoch",
"1",
"--no-progress-bar",
"--distributed-world-size",
"1",
"--dataset-impl",
"raw",
] + list(extra_args),
)
train.main(train_args)
if __name__ == '__main__':
unittest.main()
| bsd-3-clause |
jfhumann/servo | tests/wpt/update_css.py | 116 | 1081 | # This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
import os
import sys
here = os.path.split(__file__)[0]
def wpt_path(*args):
return os.path.join(here, *args)
# Imports
sys.path.append(wpt_path("harness"))
from wptrunner import wptcommandline
def update_tests(**kwargs):
from wptrunner import update
set_defaults(kwargs)
logger = update.setup_logging(kwargs, {"mach": sys.stdout})
rv = update.run_update(logger, **kwargs)
return 0 if rv is update.update.exit_clean else 1
def set_defaults(kwargs):
if kwargs["product"] is None:
kwargs["product"] = "servo"
if kwargs["config"] is None:
kwargs["config"] = wpt_path('config_css.ini')
wptcommandline.set_from_config(kwargs)
def main():
parser = wptcommandline.create_parser_update()
kwargs = vars(parser.parse_args())
return update_tests(**kwargs)
if __name__ == "__main__":
sys.exit(0 if main() else 1)
| mpl-2.0 |
bkeiren/cef | tools/check_style.py | 3 | 4018 | # Copyright (c) 2012 The Chromium Embedded Framework Authors.
# Portions copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import os, re, string, sys
from file_util import *
import git_util as git
# script directory
script_dir = os.path.dirname(__file__)
# CEF root directory
cef_dir = os.path.abspath(os.path.join(script_dir, os.pardir))
# Valid extensions for files we want to lint.
DEFAULT_LINT_WHITELIST_REGEX = r"(.*\.cpp|.*\.cc|.*\.h)"
DEFAULT_LINT_BLACKLIST_REGEX = r"$^"
try:
# depot_tools may already be in the import path.
import cpplint
import cpplint_chromium
except ImportError, e:
# Search the PATH environment variable to find the depot_tools folder.
depot_tools = None;
paths = os.environ.get('PATH').split(os.pathsep)
for path in paths:
if os.path.exists(os.path.join(path, 'cpplint_chromium.py')):
depot_tools = path
break
if depot_tools is None:
print >> sys.stderr, 'Error: could not find depot_tools in PATH.'
sys.exit(2)
# Add depot_tools to import path.
sys.path.append(depot_tools)
import cpplint
import cpplint_chromium
# The default implementation of FileInfo.RepositoryName looks for the top-most
# directory that contains a .git folder. This is a problem for CEF because the
# CEF root folder (which may have an arbitrary name) lives inside the Chromium
# src folder. Reimplement in a dumb but sane way.
def patch_RepositoryName(self):
fullname = self.FullName()
project_dir = os.path.dirname(fullname)
if os.path.exists(fullname):
root_dir = project_dir
while os.path.basename(project_dir) != "src":
project_dir = os.path.dirname(project_dir)
prefix = os.path.commonprefix([root_dir, project_dir])
components = fullname[len(prefix) + 1:].split('/')
return string.join(["cef"] + components[1:], '/')
return fullname
def check_style(args, white_list = None, black_list = None):
""" Execute cpplint with the specified arguments. """
# Apply patches.
cpplint.FileInfo.RepositoryName = patch_RepositoryName
# Process cpplint arguments.
filenames = cpplint.ParseArguments(args)
if not white_list:
white_list = DEFAULT_LINT_WHITELIST_REGEX
white_regex = re.compile(white_list)
if not black_list:
black_list = DEFAULT_LINT_BLACKLIST_REGEX
black_regex = re.compile(black_list)
extra_check_functions = [cpplint_chromium.CheckPointerDeclarationWhitespace]
for filename in filenames:
if white_regex.match(filename):
if black_regex.match(filename):
print "Ignoring file %s" % filename
else:
cpplint.ProcessFile(filename, cpplint._cpplint_state.verbose_level,
extra_check_functions)
else:
print "Skipping file %s" % filename
print "Total errors found: %d\n" % cpplint._cpplint_state.error_count
return 1
if __name__ == "__main__":
# Start with the default parameters.
args = [
# * Disable the 'build/class' test because it errors uselessly with C
# structure pointers and template declarations.
# * Disable the 'runtime/references' test because CEF allows non-const
# arguments passed by reference.
# * Disable the 'runtime/sizeof' test because it has a high number of
# false positives and adds marginal value.
'--filter=-build/class,-runtime/references,-runtime/sizeof',
]
# Add anything passed on the command-line.
args += sys.argv[1:]
# Pre-process the arguments before passing to the linter.
new_args = []
changed = []
for arg in args:
if arg == '--changed':
# Add any changed files.
changed = git.get_changed_files(cef_dir)
elif arg[:2] == '--' or not os.path.isdir(arg):
# Pass argument unchanged.
new_args.append(arg)
else:
# Add all files in the directory.
new_args += get_files(os.path.join(arg, '*'))
if len(changed) > 0:
new_args += changed
check_style(new_args)
| bsd-3-clause |
cbmoore/statsmodels | docs/source/plots/graphics_gofplots_qqplot.py | 38 | 1911 | # -*- coding: utf-8 -*-
"""
Created on Sun May 06 05:32:15 2012
Author: Josef Perktold
editted by: Paul Hobson (2012-08-19)
"""
from scipy import stats
from matplotlib import pyplot as plt
import statsmodels.api as sm
#example from docstring
data = sm.datasets.longley.load()
data.exog = sm.add_constant(data.exog, prepend=True)
mod_fit = sm.OLS(data.endog, data.exog).fit()
res = mod_fit.resid
left = -1.8 #x coordinate for text insert
fig = plt.figure()
ax = fig.add_subplot(2, 2, 1)
sm.graphics.qqplot(res, ax=ax)
top = ax.get_ylim()[1] * 0.75
txt = ax.text(left, top, 'no keywords', verticalalignment='top')
txt.set_bbox(dict(facecolor='k', alpha=0.1))
ax = fig.add_subplot(2, 2, 2)
sm.graphics.qqplot(res, line='s', ax=ax)
top = ax.get_ylim()[1] * 0.75
txt = ax.text(left, top, "line='s'", verticalalignment='top')
txt.set_bbox(dict(facecolor='k', alpha=0.1))
ax = fig.add_subplot(2, 2, 3)
sm.graphics.qqplot(res, line='45', fit=True, ax=ax)
ax.set_xlim(-2, 2)
top = ax.get_ylim()[1] * 0.75
txt = ax.text(left, top, "line='45', \nfit=True", verticalalignment='top')
txt.set_bbox(dict(facecolor='k', alpha=0.1))
ax = fig.add_subplot(2, 2, 4)
sm.graphics.qqplot(res, dist=stats.t, line='45', fit=True, ax=ax)
ax.set_xlim(-2, 2)
top = ax.get_ylim()[1] * 0.75
txt = ax.text(left, top, "dist=stats.t, \nline='45', \nfit=True",
verticalalignment='top')
txt.set_bbox(dict(facecolor='k', alpha=0.1))
fig.tight_layout()
plt.gcf()
# example with the new ProbPlot class
import numpy as np
x = np.random.normal(loc=8.25, scale=3.5, size=37)
y = np.random.normal(loc=8.00, scale=3.25, size=37)
pp_x = sm.ProbPlot(x, fit=True)
pp_y = sm.ProbPlot(y, fit=True)
# probability of exceedance
fig2 = pp_x.probplot(exceed=True)
# compare x quantiles to y quantiles
fig3 = pp_x.qqplot(other=pp_y, line='45')
# same as above with probabilities/percentiles
fig4 = pp_x.ppplot(other=pp_y, line='45')
| bsd-3-clause |
boyuegame/kbengine | kbe/src/lib/python/Lib/distutils/tests/test_bdist_rpm.py | 71 | 4868 | """Tests for distutils.command.bdist_rpm."""
import unittest
import sys
import os
import tempfile
import shutil
from test.support import run_unittest
from distutils.core import Distribution
from distutils.command.bdist_rpm import bdist_rpm
from distutils.tests import support
from distutils.spawn import find_executable
from distutils import spawn
from distutils.errors import DistutilsExecError
SETUP_PY = """\
from distutils.core import setup
import foo
setup(name='foo', version='0.1', py_modules=['foo'],
url='xxx', author='xxx', author_email='xxx')
"""
class BuildRpmTestCase(support.TempdirManager,
support.LoggingSilencer,
unittest.TestCase):
def setUp(self):
try:
sys.executable.encode("UTF-8")
except UnicodeEncodeError:
raise unittest.SkipTest("sys.executable is not encodable to UTF-8")
super(BuildRpmTestCase, self).setUp()
self.old_location = os.getcwd()
self.old_sys_argv = sys.argv, sys.argv[:]
def tearDown(self):
os.chdir(self.old_location)
sys.argv = self.old_sys_argv[0]
sys.argv[:] = self.old_sys_argv[1]
super(BuildRpmTestCase, self).tearDown()
# XXX I am unable yet to make this test work without
# spurious sdtout/stderr output under Mac OS X
@unittest.skipUnless(sys.platform.startswith('linux'),
'spurious sdtout/stderr output under Mac OS X')
@unittest.skipIf(find_executable('rpm') is None,
'the rpm command is not found')
@unittest.skipIf(find_executable('rpmbuild') is None,
'the rpmbuild command is not found')
def test_quiet(self):
# let's create a package
tmp_dir = self.mkdtemp()
pkg_dir = os.path.join(tmp_dir, 'foo')
os.mkdir(pkg_dir)
self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
self.write_file((pkg_dir, 'foo.py'), '#')
self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
self.write_file((pkg_dir, 'README'), '')
dist = Distribution({'name': 'foo', 'version': '0.1',
'py_modules': ['foo'],
'url': 'xxx', 'author': 'xxx',
'author_email': 'xxx'})
dist.script_name = 'setup.py'
os.chdir(pkg_dir)
sys.argv = ['setup.py']
cmd = bdist_rpm(dist)
cmd.fix_python = True
# running in quiet mode
cmd.quiet = 1
cmd.ensure_finalized()
cmd.run()
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
self.assertIn('foo-0.1-1.noarch.rpm', dist_created)
# bug #2945: upload ignores bdist_rpm files
self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files)
self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files)
# XXX I am unable yet to make this test work without
# spurious sdtout/stderr output under Mac OS X
@unittest.skipUnless(sys.platform.startswith('linux'),
'spurious sdtout/stderr output under Mac OS X')
# http://bugs.python.org/issue1533164
@unittest.skipIf(find_executable('rpm') is None,
'the rpm command is not found')
@unittest.skipIf(find_executable('rpmbuild') is None,
'the rpmbuild command is not found')
def test_no_optimize_flag(self):
# let's create a package that brakes bdist_rpm
tmp_dir = self.mkdtemp()
pkg_dir = os.path.join(tmp_dir, 'foo')
os.mkdir(pkg_dir)
self.write_file((pkg_dir, 'setup.py'), SETUP_PY)
self.write_file((pkg_dir, 'foo.py'), '#')
self.write_file((pkg_dir, 'MANIFEST.in'), 'include foo.py')
self.write_file((pkg_dir, 'README'), '')
dist = Distribution({'name': 'foo', 'version': '0.1',
'py_modules': ['foo'],
'url': 'xxx', 'author': 'xxx',
'author_email': 'xxx'})
dist.script_name = 'setup.py'
os.chdir(pkg_dir)
sys.argv = ['setup.py']
cmd = bdist_rpm(dist)
cmd.fix_python = True
cmd.quiet = 1
cmd.ensure_finalized()
cmd.run()
dist_created = os.listdir(os.path.join(pkg_dir, 'dist'))
self.assertIn('foo-0.1-1.noarch.rpm', dist_created)
# bug #2945: upload ignores bdist_rpm files
self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.src.rpm'), dist.dist_files)
self.assertIn(('bdist_rpm', 'any', 'dist/foo-0.1-1.noarch.rpm'), dist.dist_files)
os.remove(os.path.join(pkg_dir, 'dist', 'foo-0.1-1.noarch.rpm'))
def test_suite():
return unittest.makeSuite(BuildRpmTestCase)
if __name__ == '__main__':
run_unittest(test_suite())
| lgpl-3.0 |
2013Commons/hue | desktop/core/ext-py/Django-1.4.5/django/db/models/sql/query.py | 53 | 85934 | """
Create SQL statements for QuerySets.
The code in here encapsulates all of the SQL construction so that QuerySets
themselves do not have to (and could be backed by things other than SQL
databases). The abstraction barrier only works one way: this module has to know
all about the internals of models in order to get the information it needs.
"""
import copy
from django.utils.datastructures import SortedDict
from django.utils.encoding import force_unicode
from django.utils.tree import Node
from django.db import connections, DEFAULT_DB_ALIAS
from django.db.models import signals
from django.db.models.expressions import ExpressionNode
from django.db.models.fields import FieldDoesNotExist
from django.db.models.query_utils import InvalidQuery
from django.db.models.sql import aggregates as base_aggregates_module
from django.db.models.sql.constants import *
from django.db.models.sql.datastructures import EmptyResultSet, Empty, MultiJoin
from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.sql.where import (WhereNode, Constraint, EverythingNode,
ExtraWhere, AND, OR)
from django.core.exceptions import FieldError
__all__ = ['Query', 'RawQuery']
class RawQuery(object):
"""
A single raw SQL query
"""
def __init__(self, sql, using, params=None):
self.params = params or ()
self.sql = sql
self.using = using
self.cursor = None
# Mirror some properties of a normal query so that
# the compiler can be used to process results.
self.low_mark, self.high_mark = 0, None # Used for offset/limit
self.extra_select = {}
self.aggregate_select = {}
def clone(self, using):
return RawQuery(self.sql, using, params=self.params)
def convert_values(self, value, field, connection):
"""Convert the database-returned value into a type that is consistent
across database backends.
By default, this defers to the underlying backend operations, but
it can be overridden by Query classes for specific backends.
"""
return connection.ops.convert_values(value, field)
def get_columns(self):
if self.cursor is None:
self._execute_query()
converter = connections[self.using].introspection.table_name_converter
return [converter(column_meta[0])
for column_meta in self.cursor.description]
def __iter__(self):
# Always execute a new query for a new iterator.
# This could be optimized with a cache at the expense of RAM.
self._execute_query()
if not connections[self.using].features.can_use_chunked_reads:
# If the database can't use chunked reads we need to make sure we
# evaluate the entire query up front.
result = list(self.cursor)
else:
result = self.cursor
return iter(result)
def __repr__(self):
return "<RawQuery: %r>" % (self.sql % tuple(self.params))
def _execute_query(self):
self.cursor = connections[self.using].cursor()
self.cursor.execute(self.sql, self.params)
class Query(object):
"""
A single SQL query.
"""
# SQL join types. These are part of the class because their string forms
# vary from database to database and can be customised by a subclass.
INNER = 'INNER JOIN'
LOUTER = 'LEFT OUTER JOIN'
alias_prefix = 'T'
query_terms = QUERY_TERMS
aggregates_module = base_aggregates_module
compiler = 'SQLCompiler'
def __init__(self, model, where=WhereNode):
self.model = model
self.alias_refcount = SortedDict()
self.alias_map = {} # Maps alias to join information
self.table_map = {} # Maps table names to list of aliases.
self.join_map = {}
self.rev_join_map = {} # Reverse of join_map.
self.quote_cache = {}
self.default_cols = True
self.default_ordering = True
self.standard_ordering = True
self.ordering_aliases = []
self.select_fields = []
self.related_select_fields = []
self.dupe_avoidance = {}
self.used_aliases = set()
self.filter_is_sticky = False
self.included_inherited_models = {}
# SQL-related attributes
self.select = []
self.tables = [] # Aliases in the order they are created.
self.where = where()
self.where_class = where
self.group_by = None
self.having = where()
self.order_by = []
self.low_mark, self.high_mark = 0, None # Used for offset/limit
self.distinct = False
self.distinct_fields = []
self.select_for_update = False
self.select_for_update_nowait = False
self.select_related = False
self.related_select_cols = []
# SQL aggregate-related attributes
self.aggregates = SortedDict() # Maps alias -> SQL aggregate function
self.aggregate_select_mask = None
self._aggregate_select_cache = None
# Arbitrary maximum limit for select_related. Prevents infinite
# recursion. Can be changed by the depth parameter to select_related().
self.max_depth = 5
# These are for extensions. The contents are more or less appended
# verbatim to the appropriate clause.
self.extra = SortedDict() # Maps col_alias -> (col_sql, params).
self.extra_select_mask = None
self._extra_select_cache = None
self.extra_tables = ()
self.extra_order_by = ()
# A tuple that is a set of model field names and either True, if these
# are the fields to defer, or False if these are the only fields to
# load.
self.deferred_loading = (set(), True)
def __str__(self):
"""
Returns the query as a string of SQL with the parameter values
substituted in (use sql_with_params() to see the unsubstituted string).
Parameter values won't necessarily be quoted correctly, since that is
done by the database interface at execution time.
"""
sql, params = self.sql_with_params()
return sql % params
def sql_with_params(self):
"""
Returns the query as an SQL string and the parameters that will be
subsituted into the query.
"""
return self.get_compiler(DEFAULT_DB_ALIAS).as_sql()
def __deepcopy__(self, memo):
result = self.clone(memo=memo)
memo[id(self)] = result
return result
def __getstate__(self):
"""
Pickling support.
"""
obj_dict = self.__dict__.copy()
obj_dict['related_select_fields'] = []
obj_dict['related_select_cols'] = []
# Fields can't be pickled, so if a field list has been
# specified, we pickle the list of field names instead.
# None is also a possible value; that can pass as-is
obj_dict['select_fields'] = [
f is not None and f.name or None
for f in obj_dict['select_fields']
]
return obj_dict
def __setstate__(self, obj_dict):
"""
Unpickling support.
"""
# Rebuild list of field instances
opts = obj_dict['model']._meta
obj_dict['select_fields'] = [
name is not None and opts.get_field(name) or None
for name in obj_dict['select_fields']
]
self.__dict__.update(obj_dict)
def prepare(self):
return self
def get_compiler(self, using=None, connection=None):
if using is None and connection is None:
raise ValueError("Need either using or connection")
if using:
connection = connections[using]
# Check that the compiler will be able to execute the query
for alias, aggregate in self.aggregate_select.items():
connection.ops.check_aggregate_support(aggregate)
return connection.ops.compiler(self.compiler)(self, connection, using)
def get_meta(self):
"""
Returns the Options instance (the model._meta) from which to start
processing. Normally, this is self.model._meta, but it can be changed
by subclasses.
"""
return self.model._meta
def clone(self, klass=None, memo=None, **kwargs):
"""
Creates a copy of the current instance. The 'kwargs' parameter can be
used by clients to update attributes after copying has taken place.
"""
obj = Empty()
obj.__class__ = klass or self.__class__
obj.model = self.model
obj.alias_refcount = self.alias_refcount.copy()
obj.alias_map = self.alias_map.copy()
obj.table_map = self.table_map.copy()
obj.join_map = self.join_map.copy()
obj.rev_join_map = self.rev_join_map.copy()
obj.quote_cache = {}
obj.default_cols = self.default_cols
obj.default_ordering = self.default_ordering
obj.standard_ordering = self.standard_ordering
obj.included_inherited_models = self.included_inherited_models.copy()
obj.ordering_aliases = []
obj.select_fields = self.select_fields[:]
obj.related_select_fields = self.related_select_fields[:]
obj.dupe_avoidance = self.dupe_avoidance.copy()
obj.select = self.select[:]
obj.tables = self.tables[:]
obj.where = copy.deepcopy(self.where, memo=memo)
obj.where_class = self.where_class
if self.group_by is None:
obj.group_by = None
else:
obj.group_by = self.group_by[:]
obj.having = copy.deepcopy(self.having, memo=memo)
obj.order_by = self.order_by[:]
obj.low_mark, obj.high_mark = self.low_mark, self.high_mark
obj.distinct = self.distinct
obj.distinct_fields = self.distinct_fields[:]
obj.select_for_update = self.select_for_update
obj.select_for_update_nowait = self.select_for_update_nowait
obj.select_related = self.select_related
obj.related_select_cols = []
obj.aggregates = copy.deepcopy(self.aggregates, memo=memo)
if self.aggregate_select_mask is None:
obj.aggregate_select_mask = None
else:
obj.aggregate_select_mask = self.aggregate_select_mask.copy()
# _aggregate_select_cache cannot be copied, as doing so breaks the
# (necessary) state in which both aggregates and
# _aggregate_select_cache point to the same underlying objects.
# It will get re-populated in the cloned queryset the next time it's
# used.
obj._aggregate_select_cache = None
obj.max_depth = self.max_depth
obj.extra = self.extra.copy()
if self.extra_select_mask is None:
obj.extra_select_mask = None
else:
obj.extra_select_mask = self.extra_select_mask.copy()
if self._extra_select_cache is None:
obj._extra_select_cache = None
else:
obj._extra_select_cache = self._extra_select_cache.copy()
obj.extra_tables = self.extra_tables
obj.extra_order_by = self.extra_order_by
obj.deferred_loading = copy.deepcopy(self.deferred_loading, memo=memo)
if self.filter_is_sticky and self.used_aliases:
obj.used_aliases = self.used_aliases.copy()
else:
obj.used_aliases = set()
obj.filter_is_sticky = False
obj.__dict__.update(kwargs)
if hasattr(obj, '_setup_query'):
obj._setup_query()
return obj
def convert_values(self, value, field, connection):
"""Convert the database-returned value into a type that is consistent
across database backends.
By default, this defers to the underlying backend operations, but
it can be overridden by Query classes for specific backends.
"""
return connection.ops.convert_values(value, field)
def resolve_aggregate(self, value, aggregate, connection):
"""Resolve the value of aggregates returned by the database to
consistent (and reasonable) types.
This is required because of the predisposition of certain backends
to return Decimal and long types when they are not needed.
"""
if value is None:
if aggregate.is_ordinal:
return 0
# Return None as-is
return value
elif aggregate.is_ordinal:
# Any ordinal aggregate (e.g., count) returns an int
return int(value)
elif aggregate.is_computed:
# Any computed aggregate (e.g., avg) returns a float
return float(value)
else:
# Return value depends on the type of the field being processed.
return self.convert_values(value, aggregate.field, connection)
def get_aggregation(self, using):
"""
Returns the dictionary with the values of the existing aggregations.
"""
if not self.aggregate_select:
return {}
# If there is a group by clause, aggregating does not add useful
# information but retrieves only the first row. Aggregate
# over the subquery instead.
if self.group_by is not None:
from django.db.models.sql.subqueries import AggregateQuery
query = AggregateQuery(self.model)
obj = self.clone()
# Remove any aggregates marked for reduction from the subquery
# and move them to the outer AggregateQuery.
for alias, aggregate in self.aggregate_select.items():
if aggregate.is_summary:
query.aggregate_select[alias] = aggregate
del obj.aggregate_select[alias]
try:
query.add_subquery(obj, using)
except EmptyResultSet:
return dict(
(alias, None)
for alias in query.aggregate_select
)
else:
query = self
self.select = []
self.default_cols = False
self.extra = {}
self.remove_inherited_models()
query.clear_ordering(True)
query.clear_limits()
query.select_for_update = False
query.select_related = False
query.related_select_cols = []
query.related_select_fields = []
result = query.get_compiler(using).execute_sql(SINGLE)
if result is None:
result = [None for q in query.aggregate_select.items()]
return dict([
(alias, self.resolve_aggregate(val, aggregate, connection=connections[using]))
for (alias, aggregate), val
in zip(query.aggregate_select.items(), result)
])
def get_count(self, using):
"""
Performs a COUNT() query using the current filter constraints.
"""
obj = self.clone()
if len(self.select) > 1 or self.aggregate_select or (self.distinct and self.distinct_fields):
# If a select clause exists, then the query has already started to
# specify the columns that are to be returned.
# In this case, we need to use a subquery to evaluate the count.
from django.db.models.sql.subqueries import AggregateQuery
subquery = obj
subquery.clear_ordering(True)
subquery.clear_limits()
obj = AggregateQuery(obj.model)
try:
obj.add_subquery(subquery, using=using)
except EmptyResultSet:
# add_subquery evaluates the query, if it's an EmptyResultSet
# then there are can be no results, and therefore there the
# count is obviously 0
return 0
obj.add_count_column()
number = obj.get_aggregation(using=using)[None]
# Apply offset and limit constraints manually, since using LIMIT/OFFSET
# in SQL (in variants that provide them) doesn't change the COUNT
# output.
number = max(0, number - self.low_mark)
if self.high_mark is not None:
number = min(number, self.high_mark - self.low_mark)
return number
def has_results(self, using):
q = self.clone()
q.add_extra({'a': 1}, None, None, None, None, None)
q.select = []
q.select_fields = []
q.default_cols = False
q.select_related = False
q.set_extra_mask(('a',))
q.set_aggregate_mask(())
q.clear_ordering(True)
q.set_limits(high=1)
compiler = q.get_compiler(using=using)
return bool(compiler.execute_sql(SINGLE))
def combine(self, rhs, connector):
"""
Merge the 'rhs' query into the current one (with any 'rhs' effects
being applied *after* (that is, "to the right of") anything in the
current query. 'rhs' is not modified during a call to this function.
The 'connector' parameter describes how to connect filters from the
'rhs' query.
"""
assert self.model == rhs.model, \
"Cannot combine queries on two different base models."
assert self.can_filter(), \
"Cannot combine queries once a slice has been taken."
assert self.distinct == rhs.distinct, \
"Cannot combine a unique query with a non-unique query."
assert self.distinct_fields == rhs.distinct_fields, \
"Cannot combine queries with different distinct fields."
self.remove_inherited_models()
# Work out how to relabel the rhs aliases, if necessary.
change_map = {}
used = set()
conjunction = (connector == AND)
first = True
for alias in rhs.tables:
if not rhs.alias_refcount[alias]:
# An unused alias.
continue
promote = (rhs.alias_map[alias][JOIN_TYPE] == self.LOUTER)
lhs, table, lhs_col, col = rhs.rev_join_map[alias]
# If the left side of the join was already relabeled, use the
# updated alias.
lhs = change_map.get(lhs, lhs)
new_alias = self.join((lhs, table, lhs_col, col),
(conjunction and not first), used, promote, not conjunction)
used.add(new_alias)
change_map[alias] = new_alias
first = False
# So that we don't exclude valid results in an "or" query combination,
# all joins exclusive to either the lhs or the rhs must be converted
# to an outer join.
if not conjunction:
l_tables = set(self.tables)
r_tables = set(rhs.tables)
# Update r_tables aliases.
for alias in change_map:
if alias in r_tables:
# r_tables may contain entries that have a refcount of 0
# if the query has references to a table that can be
# trimmed because only the foreign key is used.
# We only need to fix the aliases for the tables that
# actually have aliases.
if rhs.alias_refcount[alias]:
r_tables.remove(alias)
r_tables.add(change_map[alias])
# Find aliases that are exclusive to rhs or lhs.
# These are promoted to outer joins.
outer_tables = (l_tables | r_tables) - (l_tables & r_tables)
for alias in outer_tables:
# Again, some of the tables won't have aliases due to
# the trimming of unnecessary tables.
if self.alias_refcount.get(alias) or rhs.alias_refcount.get(alias):
self.promote_alias(alias, True)
# Now relabel a copy of the rhs where-clause and add it to the current
# one.
if rhs.where:
w = copy.deepcopy(rhs.where)
w.relabel_aliases(change_map)
if not self.where:
# Since 'self' matches everything, add an explicit "include
# everything" where-constraint so that connections between the
# where clauses won't exclude valid results.
self.where.add(EverythingNode(), AND)
elif self.where:
# rhs has an empty where clause.
w = self.where_class()
w.add(EverythingNode(), AND)
else:
w = self.where_class()
self.where.add(w, connector)
# Selection columns and extra extensions are those provided by 'rhs'.
self.select = []
for col in rhs.select:
if isinstance(col, (list, tuple)):
self.select.append((change_map.get(col[0], col[0]), col[1]))
else:
item = copy.deepcopy(col)
item.relabel_aliases(change_map)
self.select.append(item)
self.select_fields = rhs.select_fields[:]
if connector == OR:
# It would be nice to be able to handle this, but the queries don't
# really make sense (or return consistent value sets). Not worth
# the extra complexity when you can write a real query instead.
if self.extra and rhs.extra:
raise ValueError("When merging querysets using 'or', you "
"cannot have extra(select=...) on both sides.")
self.extra.update(rhs.extra)
extra_select_mask = set()
if self.extra_select_mask is not None:
extra_select_mask.update(self.extra_select_mask)
if rhs.extra_select_mask is not None:
extra_select_mask.update(rhs.extra_select_mask)
if extra_select_mask:
self.set_extra_mask(extra_select_mask)
self.extra_tables += rhs.extra_tables
# Ordering uses the 'rhs' ordering, unless it has none, in which case
# the current ordering is used.
self.order_by = rhs.order_by and rhs.order_by[:] or self.order_by
self.extra_order_by = rhs.extra_order_by or self.extra_order_by
def deferred_to_data(self, target, callback):
"""
Converts the self.deferred_loading data structure to an alternate data
structure, describing the field that *will* be loaded. This is used to
compute the columns to select from the database and also by the
QuerySet class to work out which fields are being initialised on each
model. Models that have all their fields included aren't mentioned in
the result, only those that have field restrictions in place.
The "target" parameter is the instance that is populated (in place).
The "callback" is a function that is called whenever a (model, field)
pair need to be added to "target". It accepts three parameters:
"target", and the model and list of fields being added for that model.
"""
field_names, defer = self.deferred_loading
if not field_names:
return
orig_opts = self.model._meta
seen = {}
must_include = {orig_opts.concrete_model: set([orig_opts.pk])}
for field_name in field_names:
parts = field_name.split(LOOKUP_SEP)
cur_model = self.model
opts = orig_opts
for name in parts[:-1]:
old_model = cur_model
source = opts.get_field_by_name(name)[0]
cur_model = source.rel.to
opts = cur_model._meta
# Even if we're "just passing through" this model, we must add
# both the current model's pk and the related reference field
# to the things we select.
must_include[old_model].add(source)
add_to_dict(must_include, cur_model, opts.pk)
field, model, _, _ = opts.get_field_by_name(parts[-1])
if model is None:
model = cur_model
add_to_dict(seen, model, field)
if defer:
# We need to load all fields for each model, except those that
# appear in "seen" (for all models that appear in "seen"). The only
# slight complexity here is handling fields that exist on parent
# models.
workset = {}
for model, values in seen.iteritems():
for field, m in model._meta.get_fields_with_model():
if field in values:
continue
add_to_dict(workset, m or model, field)
for model, values in must_include.iteritems():
# If we haven't included a model in workset, we don't add the
# corresponding must_include fields for that model, since an
# empty set means "include all fields". That's why there's no
# "else" branch here.
if model in workset:
workset[model].update(values)
for model, values in workset.iteritems():
callback(target, model, values)
else:
for model, values in must_include.iteritems():
if model in seen:
seen[model].update(values)
else:
# As we've passed through this model, but not explicitly
# included any fields, we have to make sure it's mentioned
# so that only the "must include" fields are pulled in.
seen[model] = values
# Now ensure that every model in the inheritance chain is mentioned
# in the parent list. Again, it must be mentioned to ensure that
# only "must include" fields are pulled in.
for model in orig_opts.get_parent_list():
if model not in seen:
seen[model] = set()
for model, values in seen.iteritems():
callback(target, model, values)
def deferred_to_columns_cb(self, target, model, fields):
"""
Callback used by deferred_to_columns(). The "target" parameter should
be a set instance.
"""
table = model._meta.db_table
if table not in target:
target[table] = set()
for field in fields:
target[table].add(field.column)
def table_alias(self, table_name, create=False):
"""
Returns a table alias for the given table_name and whether this is a
new alias or not.
If 'create' is true, a new alias is always created. Otherwise, the
most recently created alias for the table (if one exists) is reused.
"""
current = self.table_map.get(table_name)
if not create and current:
alias = current[0]
self.alias_refcount[alias] += 1
return alias, False
# Create a new alias for this table.
if current:
alias = '%s%d' % (self.alias_prefix, len(self.alias_map) + 1)
current.append(alias)
else:
# The first occurence of a table uses the table name directly.
alias = table_name
self.table_map[alias] = [alias]
self.alias_refcount[alias] = 1
self.tables.append(alias)
return alias, True
def ref_alias(self, alias):
""" Increases the reference count for this alias. """
self.alias_refcount[alias] += 1
def unref_alias(self, alias, amount=1):
""" Decreases the reference count for this alias. """
self.alias_refcount[alias] -= amount
def promote_alias(self, alias, unconditional=False):
"""
Promotes the join type of an alias to an outer join if it's possible
for the join to contain NULL values on the left. If 'unconditional' is
False, the join is only promoted if it is nullable, otherwise it is
always promoted.
Returns True if the join was promoted by this call.
"""
if ((unconditional or self.alias_map[alias][NULLABLE]) and
self.alias_map[alias][JOIN_TYPE] != self.LOUTER):
data = list(self.alias_map[alias])
data[JOIN_TYPE] = self.LOUTER
self.alias_map[alias] = tuple(data)
return True
return False
def promote_alias_chain(self, chain, must_promote=False):
"""
Walks along a chain of aliases, promoting the first nullable join and
any joins following that. If 'must_promote' is True, all the aliases in
the chain are promoted.
"""
for alias in chain:
if self.promote_alias(alias, must_promote):
must_promote = True
def reset_refcounts(self, to_counts):
"""
This method will reset reference counts for aliases so that they match
the value passed in :param to_counts:.
"""
for alias, cur_refcount in self.alias_refcount.copy().items():
unref_amount = cur_refcount - to_counts.get(alias, 0)
self.unref_alias(alias, unref_amount)
def promote_unused_aliases(self, initial_refcounts, used_aliases):
"""
Given a "before" copy of the alias_refcounts dictionary (as
'initial_refcounts') and a collection of aliases that may have been
changed or created, works out which aliases have been created since
then and which ones haven't been used and promotes all of those
aliases, plus any children of theirs in the alias tree, to outer joins.
"""
# FIXME: There's some (a lot of!) overlap with the similar OR promotion
# in add_filter(). It's not quite identical, but is very similar. So
# pulling out the common bits is something for later.
considered = {}
for alias in self.tables:
if alias not in used_aliases:
continue
if (alias not in initial_refcounts or
self.alias_refcount[alias] == initial_refcounts[alias]):
parent = self.alias_map[alias][LHS_ALIAS]
must_promote = considered.get(parent, False)
promoted = self.promote_alias(alias, must_promote)
considered[alias] = must_promote or promoted
def change_aliases(self, change_map):
"""
Changes the aliases in change_map (which maps old-alias -> new-alias),
relabelling any references to them in select columns and the where
clause.
"""
assert set(change_map.keys()).intersection(set(change_map.values())) == set()
# 1. Update references in "select" (normal columns plus aliases),
# "group by", "where" and "having".
self.where.relabel_aliases(change_map)
self.having.relabel_aliases(change_map)
for columns in [self.select, self.group_by or []]:
for pos, col in enumerate(columns):
if isinstance(col, (list, tuple)):
old_alias = col[0]
columns[pos] = (change_map.get(old_alias, old_alias), col[1])
else:
col.relabel_aliases(change_map)
for mapping in [self.aggregates]:
for key, col in mapping.items():
if isinstance(col, (list, tuple)):
old_alias = col[0]
mapping[key] = (change_map.get(old_alias, old_alias), col[1])
else:
col.relabel_aliases(change_map)
# 2. Rename the alias in the internal table/alias datastructures.
for old_alias, new_alias in change_map.iteritems():
alias_data = list(self.alias_map[old_alias])
alias_data[RHS_ALIAS] = new_alias
t = self.rev_join_map[old_alias]
data = list(self.join_map[t])
data[data.index(old_alias)] = new_alias
self.join_map[t] = tuple(data)
self.rev_join_map[new_alias] = t
del self.rev_join_map[old_alias]
self.alias_refcount[new_alias] = self.alias_refcount[old_alias]
del self.alias_refcount[old_alias]
self.alias_map[new_alias] = tuple(alias_data)
del self.alias_map[old_alias]
table_aliases = self.table_map[alias_data[TABLE_NAME]]
for pos, alias in enumerate(table_aliases):
if alias == old_alias:
table_aliases[pos] = new_alias
break
for pos, alias in enumerate(self.tables):
if alias == old_alias:
self.tables[pos] = new_alias
break
for key, alias in self.included_inherited_models.items():
if alias in change_map:
self.included_inherited_models[key] = change_map[alias]
# 3. Update any joins that refer to the old alias.
for alias, data in self.alias_map.iteritems():
lhs = data[LHS_ALIAS]
if lhs in change_map:
data = list(data)
data[LHS_ALIAS] = change_map[lhs]
self.alias_map[alias] = tuple(data)
def bump_prefix(self, exceptions=()):
"""
Changes the alias prefix to the next letter in the alphabet and
relabels all the aliases. Even tables that previously had no alias will
get an alias after this call (it's mostly used for nested queries and
the outer query will already be using the non-aliased table name).
Subclasses who create their own prefix should override this method to
produce a similar result (a new prefix and relabelled aliases).
The 'exceptions' parameter is a container that holds alias names which
should not be changed.
"""
current = ord(self.alias_prefix)
assert current < ord('Z')
prefix = chr(current + 1)
self.alias_prefix = prefix
change_map = SortedDict()
for pos, alias in enumerate(self.tables):
if alias in exceptions:
continue
new_alias = '%s%d' % (prefix, pos)
change_map[alias] = new_alias
self.tables[pos] = new_alias
self.change_aliases(change_map)
def get_initial_alias(self):
"""
Returns the first alias for this query, after increasing its reference
count.
"""
if self.tables:
alias = self.tables[0]
self.ref_alias(alias)
else:
alias = self.join((None, self.model._meta.db_table, None, None))
return alias
def count_active_tables(self):
"""
Returns the number of tables in this query with a non-zero reference
count. Note that after execution, the reference counts are zeroed, so
tables added in compiler will not be seen by this method.
"""
return len([1 for count in self.alias_refcount.itervalues() if count])
def join(self, connection, always_create=False, exclusions=(),
promote=False, outer_if_first=False, nullable=False, reuse=None):
"""
Returns an alias for the join in 'connection', either reusing an
existing alias for that join or creating a new one. 'connection' is a
tuple (lhs, table, lhs_col, col) where 'lhs' is either an existing
table alias or a table name. The join correspods to the SQL equivalent
of::
lhs.lhs_col = table.col
If 'always_create' is True and 'reuse' is None, a new alias is always
created, regardless of whether one already exists or not. If
'always_create' is True and 'reuse' is a set, an alias in 'reuse' that
matches the connection will be returned, if possible. If
'always_create' is False, the first existing alias that matches the
'connection' is returned, if any. Otherwise a new join is created.
If 'exclusions' is specified, it is something satisfying the container
protocol ("foo in exclusions" must work) and specifies a list of
aliases that should not be returned, even if they satisfy the join.
If 'promote' is True, the join type for the alias will be LOUTER (if
the alias previously existed, the join type will be promoted from INNER
to LOUTER, if necessary).
If 'outer_if_first' is True and a new join is created, it will have the
LOUTER join type. This is used when joining certain types of querysets
and Q-objects together.
If 'nullable' is True, the join can potentially involve NULL values and
is a candidate for promotion (to "left outer") when combining querysets.
"""
lhs, table, lhs_col, col = connection
if lhs in self.alias_map:
lhs_table = self.alias_map[lhs][TABLE_NAME]
else:
lhs_table = lhs
if reuse and always_create and table in self.table_map:
# Convert the 'reuse' to case to be "exclude everything but the
# reusable set, minus exclusions, for this table".
exclusions = set(self.table_map[table]).difference(reuse).union(set(exclusions))
always_create = False
t_ident = (lhs_table, table, lhs_col, col)
if not always_create:
for alias in self.join_map.get(t_ident, ()):
if alias not in exclusions:
if lhs_table and not self.alias_refcount[self.alias_map[alias][LHS_ALIAS]]:
# The LHS of this join tuple is no longer part of the
# query, so skip this possibility.
continue
if self.alias_map[alias][LHS_ALIAS] != lhs:
continue
self.ref_alias(alias)
if promote:
self.promote_alias(alias)
return alias
# No reuse is possible, so we need a new alias.
alias, _ = self.table_alias(table, True)
if not lhs:
# Not all tables need to be joined to anything. No join type
# means the later columns are ignored.
join_type = None
elif promote or outer_if_first:
join_type = self.LOUTER
else:
join_type = self.INNER
join = (table, alias, join_type, lhs, lhs_col, col, nullable)
self.alias_map[alias] = join
if t_ident in self.join_map:
self.join_map[t_ident] += (alias,)
else:
self.join_map[t_ident] = (alias,)
self.rev_join_map[alias] = t_ident
return alias
def setup_inherited_models(self):
"""
If the model that is the basis for this QuerySet inherits other models,
we need to ensure that those other models have their tables included in
the query.
We do this as a separate step so that subclasses know which
tables are going to be active in the query, without needing to compute
all the select columns (this method is called from pre_sql_setup(),
whereas column determination is a later part, and side-effect, of
as_sql()).
"""
opts = self.model._meta
root_alias = self.tables[0]
seen = {None: root_alias}
# Skip all proxy to the root proxied model
proxied_model = opts.concrete_model
for field, model in opts.get_fields_with_model():
if model not in seen:
if model is proxied_model:
seen[model] = root_alias
else:
link_field = opts.get_ancestor_link(model)
seen[model] = self.join((root_alias, model._meta.db_table,
link_field.column, model._meta.pk.column))
self.included_inherited_models = seen
def remove_inherited_models(self):
"""
Undoes the effects of setup_inherited_models(). Should be called
whenever select columns (self.select) are set explicitly.
"""
for key, alias in self.included_inherited_models.items():
if key:
self.unref_alias(alias)
self.included_inherited_models = {}
def need_force_having(self, q_object):
"""
Returns whether or not all elements of this q_object need to be put
together in the HAVING clause.
"""
for child in q_object.children:
if isinstance(child, Node):
if self.need_force_having(child):
return True
else:
if child[0].split(LOOKUP_SEP)[0] in self.aggregates:
return True
return False
def add_aggregate(self, aggregate, model, alias, is_summary):
"""
Adds a single aggregate expression to the Query
"""
opts = model._meta
field_list = aggregate.lookup.split(LOOKUP_SEP)
if len(field_list) == 1 and aggregate.lookup in self.aggregates:
# Aggregate is over an annotation
field_name = field_list[0]
col = field_name
source = self.aggregates[field_name]
if not is_summary:
raise FieldError("Cannot compute %s('%s'): '%s' is an aggregate" % (
aggregate.name, field_name, field_name))
elif ((len(field_list) > 1) or
(field_list[0] not in [i.name for i in opts.fields]) or
self.group_by is None or
not is_summary):
# If:
# - the field descriptor has more than one part (foo__bar), or
# - the field descriptor is referencing an m2m/m2o field, or
# - this is a reference to a model field (possibly inherited), or
# - this is an annotation over a model field
# then we need to explore the joins that are required.
field, source, opts, join_list, last, _ = self.setup_joins(
field_list, opts, self.get_initial_alias(), False)
# Process the join chain to see if it can be trimmed
col, _, join_list = self.trim_joins(source, join_list, last, False)
# If the aggregate references a model or field that requires a join,
# those joins must be LEFT OUTER - empty join rows must be returned
# in order for zeros to be returned for those aggregates.
for column_alias in join_list:
self.promote_alias(column_alias, unconditional=True)
col = (join_list[-1], col)
else:
# The simplest cases. No joins required -
# just reference the provided column alias.
field_name = field_list[0]
source = opts.get_field(field_name)
col = field_name
# Add the aggregate to the query
aggregate.add_to_query(self, alias, col=col, source=source, is_summary=is_summary)
def add_filter(self, filter_expr, connector=AND, negate=False, trim=False,
can_reuse=None, process_extras=True, force_having=False):
"""
Add a single filter to the query. The 'filter_expr' is a pair:
(filter_string, value). E.g. ('name__contains', 'fred')
If 'negate' is True, this is an exclude() filter. It's important to
note that this method does not negate anything in the where-clause
object when inserting the filter constraints. This is because negated
filters often require multiple calls to add_filter() and the negation
should only happen once. So the caller is responsible for this (the
caller will normally be add_q(), so that as an example).
If 'trim' is True, we automatically trim the final join group (used
internally when constructing nested queries).
If 'can_reuse' is a set, we are processing a component of a
multi-component filter (e.g. filter(Q1, Q2)). In this case, 'can_reuse'
will be a set of table aliases that can be reused in this filter, even
if we would otherwise force the creation of new aliases for a join
(needed for nested Q-filters). The set is updated by this method.
If 'process_extras' is set, any extra filters returned from the table
joining process will be processed. This parameter is set to False
during the processing of extra filters to avoid infinite recursion.
"""
arg, value = filter_expr
parts = arg.split(LOOKUP_SEP)
if not parts:
raise FieldError("Cannot parse keyword query %r" % arg)
# Work out the lookup type and remove it from the end of 'parts',
# if necessary.
lookup_type = 'exact' # Default lookup type
num_parts = len(parts)
if (len(parts) > 1 and parts[-1] in self.query_terms
and arg not in self.aggregates):
# Traverse the lookup query to distinguish related fields from
# lookup types.
lookup_model = self.model
for counter, field_name in enumerate(parts):
try:
lookup_field = lookup_model._meta.get_field(field_name)
except FieldDoesNotExist:
# Not a field. Bail out.
lookup_type = parts.pop()
break
# Unless we're at the end of the list of lookups, let's attempt
# to continue traversing relations.
if (counter + 1) < num_parts:
try:
lookup_model = lookup_field.rel.to
except AttributeError:
# Not a related field. Bail out.
lookup_type = parts.pop()
break
# By default, this is a WHERE clause. If an aggregate is referenced
# in the value, the filter will be promoted to a HAVING
having_clause = False
# Interpret '__exact=None' as the sql 'is NULL'; otherwise, reject all
# uses of None as a query value.
if value is None:
if lookup_type != 'exact':
raise ValueError("Cannot use None as a query value")
lookup_type = 'isnull'
value = True
elif callable(value):
value = value()
elif isinstance(value, ExpressionNode):
# If value is a query expression, evaluate it
value = SQLEvaluator(value, self)
having_clause = value.contains_aggregate
for alias, aggregate in self.aggregates.items():
if alias in (parts[0], LOOKUP_SEP.join(parts)):
entry = self.where_class()
entry.add((aggregate, lookup_type, value), AND)
if negate:
entry.negate()
self.having.add(entry, connector)
return
opts = self.get_meta()
alias = self.get_initial_alias()
allow_many = trim or not negate
try:
field, target, opts, join_list, last, extra_filters = self.setup_joins(
parts, opts, alias, True, allow_many, allow_explicit_fk=True,
can_reuse=can_reuse, negate=negate,
process_extras=process_extras)
except MultiJoin, e:
self.split_exclude(filter_expr, LOOKUP_SEP.join(parts[:e.level]),
can_reuse)
return
table_promote = False
join_promote = False
if (lookup_type == 'isnull' and value is True and not negate and
len(join_list) > 1):
# If the comparison is against NULL, we may need to use some left
# outer joins when creating the join chain. This is only done when
# needed, as it's less efficient at the database level.
self.promote_alias_chain(join_list)
join_promote = True
# Process the join list to see if we can remove any inner joins from
# the far end (fewer tables in a query is better).
nonnull_comparison = (lookup_type == 'isnull' and value is False)
col, alias, join_list = self.trim_joins(target, join_list, last, trim,
nonnull_comparison)
if connector == OR:
# Some joins may need to be promoted when adding a new filter to a
# disjunction. We walk the list of new joins and where it diverges
# from any previous joins (ref count is 1 in the table list), we
# make the new additions (and any existing ones not used in the new
# join list) an outer join.
join_it = iter(join_list)
table_it = iter(self.tables)
join_it.next(), table_it.next()
unconditional = False
for join in join_it:
table = table_it.next()
# Once we hit an outer join, all subsequent joins must
# also be promoted, regardless of whether they have been
# promoted as a result of this pass through the tables.
unconditional = (unconditional or
self.alias_map[join][JOIN_TYPE] == self.LOUTER)
if join == table and self.alias_refcount[join] > 1:
# We have more than one reference to this join table.
# This means that we are dealing with two different query
# subtrees, so we don't need to do any join promotion.
continue
join_promote = join_promote or self.promote_alias(join, unconditional)
if table != join:
table_promote = self.promote_alias(table)
# We only get here if we have found a table that exists
# in the join list, but isn't on the original tables list.
# This means we've reached the point where we only have
# new tables, so we can break out of this promotion loop.
break
self.promote_alias_chain(join_it, join_promote)
self.promote_alias_chain(table_it, table_promote or join_promote)
if having_clause or force_having:
if (alias, col) not in self.group_by:
self.group_by.append((alias, col))
self.having.add((Constraint(alias, col, field), lookup_type, value),
connector)
else:
self.where.add((Constraint(alias, col, field), lookup_type, value),
connector)
if negate:
self.promote_alias_chain(join_list)
if lookup_type != 'isnull':
if len(join_list) > 1:
for alias in join_list:
if self.alias_map[alias][JOIN_TYPE] == self.LOUTER:
j_col = self.alias_map[alias][RHS_JOIN_COL]
entry = self.where_class()
entry.add(
(Constraint(alias, j_col, None), 'isnull', True),
AND
)
entry.negate()
self.where.add(entry, AND)
break
if not (lookup_type == 'in'
and not hasattr(value, 'as_sql')
and not hasattr(value, '_as_sql')
and not value) and field.null:
# Leaky abstraction artifact: We have to specifically
# exclude the "foo__in=[]" case from this handling, because
# it's short-circuited in the Where class.
# We also need to handle the case where a subquery is provided
self.where.add((Constraint(alias, col, None), 'isnull', False), AND)
if can_reuse is not None:
can_reuse.update(join_list)
if process_extras:
for filter in extra_filters:
self.add_filter(filter, negate=negate, can_reuse=can_reuse,
process_extras=False)
def add_q(self, q_object, used_aliases=None, force_having=False):
"""
Adds a Q-object to the current filter.
Can also be used to add anything that has an 'add_to_query()' method.
"""
if used_aliases is None:
used_aliases = self.used_aliases
if hasattr(q_object, 'add_to_query'):
# Complex custom objects are responsible for adding themselves.
q_object.add_to_query(self, used_aliases)
else:
if self.where and q_object.connector != AND and len(q_object) > 1:
self.where.start_subtree(AND)
subtree = True
else:
subtree = False
connector = AND
if q_object.connector == OR and not force_having:
force_having = self.need_force_having(q_object)
for child in q_object.children:
if connector == OR:
refcounts_before = self.alias_refcount.copy()
if force_having:
self.having.start_subtree(connector)
else:
self.where.start_subtree(connector)
if isinstance(child, Node):
self.add_q(child, used_aliases, force_having=force_having)
else:
self.add_filter(child, connector, q_object.negated,
can_reuse=used_aliases, force_having=force_having)
if force_having:
self.having.end_subtree()
else:
self.where.end_subtree()
if connector == OR:
# Aliases that were newly added or not used at all need to
# be promoted to outer joins if they are nullable relations.
# (they shouldn't turn the whole conditional into the empty
# set just because they don't match anything).
self.promote_unused_aliases(refcounts_before, used_aliases)
connector = q_object.connector
if q_object.negated:
self.where.negate()
if subtree:
self.where.end_subtree()
if self.filter_is_sticky:
self.used_aliases = used_aliases
def setup_joins(self, names, opts, alias, dupe_multis, allow_many=True,
allow_explicit_fk=False, can_reuse=None, negate=False,
process_extras=True):
"""
Compute the necessary table joins for the passage through the fields
given in 'names'. 'opts' is the Options class for the current model
(which gives the table we are joining to), 'alias' is the alias for the
table we are joining to. If dupe_multis is True, any many-to-many or
many-to-one joins will always create a new alias (necessary for
disjunctive filters). If can_reuse is not None, it's a list of aliases
that can be reused in these joins (nothing else can be reused in this
case). Finally, 'negate' is used in the same sense as for add_filter()
-- it indicates an exclude() filter, or something similar. It is only
passed in here so that it can be passed to a field's extra_filter() for
customized behavior.
Returns the final field involved in the join, the target database
column (used for any 'where' constraint), the final 'opts' value and the
list of tables joined.
"""
joins = [alias]
last = [0]
dupe_set = set()
exclusions = set()
extra_filters = []
int_alias = None
for pos, name in enumerate(names):
if int_alias is not None:
exclusions.add(int_alias)
exclusions.add(alias)
last.append(len(joins))
if name == 'pk':
name = opts.pk.name
try:
field, model, direct, m2m = opts.get_field_by_name(name)
except FieldDoesNotExist:
for f in opts.fields:
if allow_explicit_fk and name == f.attname:
# XXX: A hack to allow foo_id to work in values() for
# backwards compatibility purposes. If we dropped that
# feature, this could be removed.
field, model, direct, m2m = opts.get_field_by_name(f.name)
break
else:
names = opts.get_all_field_names() + self.aggregate_select.keys()
raise FieldError("Cannot resolve keyword %r into field. "
"Choices are: %s" % (name, ", ".join(names)))
if not allow_many and (m2m or not direct):
for alias in joins:
self.unref_alias(alias)
raise MultiJoin(pos + 1)
if model:
# The field lives on a base class of the current model.
# Skip the chain of proxy to the concrete proxied model
proxied_model = opts.concrete_model
for int_model in opts.get_base_chain(model):
if int_model is proxied_model:
opts = int_model._meta
else:
lhs_col = opts.parents[int_model].column
dedupe = lhs_col in opts.duplicate_targets
if dedupe:
exclusions.update(self.dupe_avoidance.get(
(id(opts), lhs_col), ()))
dupe_set.add((opts, lhs_col))
opts = int_model._meta
alias = self.join((alias, opts.db_table, lhs_col,
opts.pk.column), exclusions=exclusions)
joins.append(alias)
exclusions.add(alias)
for (dupe_opts, dupe_col) in dupe_set:
self.update_dupe_avoidance(dupe_opts, dupe_col,
alias)
cached_data = opts._join_cache.get(name)
orig_opts = opts
dupe_col = direct and field.column or field.field.column
dedupe = dupe_col in opts.duplicate_targets
if dupe_set or dedupe:
if dedupe:
dupe_set.add((opts, dupe_col))
exclusions.update(self.dupe_avoidance.get((id(opts), dupe_col),
()))
if process_extras and hasattr(field, 'extra_filters'):
extra_filters.extend(field.extra_filters(names, pos, negate))
if direct:
if m2m:
# Many-to-many field defined on the current model.
if cached_data:
(table1, from_col1, to_col1, table2, from_col2,
to_col2, opts, target) = cached_data
else:
table1 = field.m2m_db_table()
from_col1 = opts.get_field_by_name(
field.m2m_target_field_name())[0].column
to_col1 = field.m2m_column_name()
opts = field.rel.to._meta
table2 = opts.db_table
from_col2 = field.m2m_reverse_name()
to_col2 = opts.get_field_by_name(
field.m2m_reverse_target_field_name())[0].column
target = opts.pk
orig_opts._join_cache[name] = (table1, from_col1,
to_col1, table2, from_col2, to_col2, opts,
target)
int_alias = self.join((alias, table1, from_col1, to_col1),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
if int_alias == table2 and from_col2 == to_col2:
joins.append(int_alias)
alias = int_alias
else:
alias = self.join(
(int_alias, table2, from_col2, to_col2),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
joins.extend([int_alias, alias])
elif field.rel:
# One-to-one or many-to-one field
if cached_data:
(table, from_col, to_col, opts, target) = cached_data
else:
opts = field.rel.to._meta
target = field.rel.get_related_field()
table = opts.db_table
from_col = field.column
to_col = target.column
orig_opts._join_cache[name] = (table, from_col, to_col,
opts, target)
alias = self.join((alias, table, from_col, to_col),
exclusions=exclusions, nullable=field.null)
joins.append(alias)
else:
# Non-relation fields.
target = field
break
else:
orig_field = field
field = field.field
if m2m:
# Many-to-many field defined on the target model.
if cached_data:
(table1, from_col1, to_col1, table2, from_col2,
to_col2, opts, target) = cached_data
else:
table1 = field.m2m_db_table()
from_col1 = opts.get_field_by_name(
field.m2m_reverse_target_field_name())[0].column
to_col1 = field.m2m_reverse_name()
opts = orig_field.opts
table2 = opts.db_table
from_col2 = field.m2m_column_name()
to_col2 = opts.get_field_by_name(
field.m2m_target_field_name())[0].column
target = opts.pk
orig_opts._join_cache[name] = (table1, from_col1,
to_col1, table2, from_col2, to_col2, opts,
target)
int_alias = self.join((alias, table1, from_col1, to_col1),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
alias = self.join((int_alias, table2, from_col2, to_col2),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
joins.extend([int_alias, alias])
else:
# One-to-many field (ForeignKey defined on the target model)
if cached_data:
(table, from_col, to_col, opts, target) = cached_data
else:
local_field = opts.get_field_by_name(
field.rel.field_name)[0]
opts = orig_field.opts
table = opts.db_table
from_col = local_field.column
to_col = field.column
# In case of a recursive FK, use the to_field for
# reverse lookups as well
if orig_field.model is local_field.model:
target = opts.get_field_by_name(
field.rel.field_name)[0]
else:
target = opts.pk
orig_opts._join_cache[name] = (table, from_col, to_col,
opts, target)
alias = self.join((alias, table, from_col, to_col),
dupe_multis, exclusions, nullable=True,
reuse=can_reuse)
joins.append(alias)
for (dupe_opts, dupe_col) in dupe_set:
if int_alias is None:
to_avoid = alias
else:
to_avoid = int_alias
self.update_dupe_avoidance(dupe_opts, dupe_col, to_avoid)
if pos != len(names) - 1:
if pos == len(names) - 2:
raise FieldError("Join on field %r not permitted. Did you misspell %r for the lookup type?" % (name, names[pos + 1]))
else:
raise FieldError("Join on field %r not permitted." % name)
return field, target, opts, joins, last, extra_filters
def trim_joins(self, target, join_list, last, trim, nonnull_check=False):
"""
Sometimes joins at the end of a multi-table sequence can be trimmed. If
the final join is against the same column as we are comparing against,
and is an inner join, we can go back one step in a join chain and
compare against the LHS of the join instead (and then repeat the
optimization). The result, potentially, involves fewer table joins.
The 'target' parameter is the final field being joined to, 'join_list'
is the full list of join aliases.
The 'last' list contains offsets into 'join_list', corresponding to
each component of the filter. Many-to-many relations, for example, add
two tables to the join list and we want to deal with both tables the
same way, so 'last' has an entry for the first of the two tables and
then the table immediately after the second table, in that case.
The 'trim' parameter forces the final piece of the join list to be
trimmed before anything. See the documentation of add_filter() for
details about this.
The 'nonnull_check' parameter is True when we are using inner joins
between tables explicitly to exclude NULL entries. In that case, the
tables shouldn't be trimmed, because the very action of joining to them
alters the result set.
Returns the final active column and table alias and the new active
join_list.
"""
final = len(join_list)
penultimate = last.pop()
if penultimate == final:
penultimate = last.pop()
if trim and final > 1:
extra = join_list[penultimate:]
join_list = join_list[:penultimate]
final = penultimate
penultimate = last.pop()
col = self.alias_map[extra[0]][LHS_JOIN_COL]
for alias in extra:
self.unref_alias(alias)
else:
col = target.column
alias = join_list[-1]
while final > 1:
join = self.alias_map[alias]
if (col != join[RHS_JOIN_COL] or join[JOIN_TYPE] != self.INNER or
nonnull_check):
break
self.unref_alias(alias)
alias = join[LHS_ALIAS]
col = join[LHS_JOIN_COL]
join_list.pop()
final -= 1
if final == penultimate:
penultimate = last.pop()
return col, alias, join_list
def update_dupe_avoidance(self, opts, col, alias):
"""
For a column that is one of multiple pointing to the same table, update
the internal data structures to note that this alias shouldn't be used
for those other columns.
"""
ident = id(opts)
for name in opts.duplicate_targets[col]:
try:
self.dupe_avoidance[ident, name].add(alias)
except KeyError:
self.dupe_avoidance[ident, name] = set([alias])
def split_exclude(self, filter_expr, prefix, can_reuse):
"""
When doing an exclude against any kind of N-to-many relation, we need
to use a subquery. This method constructs the nested query, given the
original exclude filter (filter_expr) and the portion up to the first
N-to-many relation field.
"""
query = Query(self.model)
query.add_filter(filter_expr, can_reuse=can_reuse)
query.bump_prefix()
query.clear_ordering(True)
query.set_start(prefix)
# Adding extra check to make sure the selected field will not be null
# since we are adding a IN <subquery> clause. This prevents the
# database from tripping over IN (...,NULL,...) selects and returning
# nothing
alias, col = query.select[0]
query.where.add((Constraint(alias, col, None), 'isnull', False), AND)
self.add_filter(('%s__in' % prefix, query), negate=True, trim=True,
can_reuse=can_reuse)
# If there's more than one join in the inner query (before any initial
# bits were trimmed -- which means the last active table is more than
# two places into the alias list), we need to also handle the
# possibility that the earlier joins don't match anything by adding a
# comparison to NULL (e.g. in
# Tag.objects.exclude(parent__parent__name='t1'), a tag with no parent
# would otherwise be overlooked).
active_positions = [pos for (pos, count) in
enumerate(query.alias_refcount.itervalues()) if count]
if active_positions[-1] > 1:
self.add_filter(('%s__isnull' % prefix, False), negate=True,
trim=True, can_reuse=can_reuse)
def set_limits(self, low=None, high=None):
"""
Adjusts the limits on the rows retrieved. We use low/high to set these,
as it makes it more Pythonic to read and write. When the SQL query is
created, they are converted to the appropriate offset and limit values.
Any limits passed in here are applied relative to the existing
constraints. So low is added to the current low value and both will be
clamped to any existing high value.
"""
if high is not None:
if self.high_mark is not None:
self.high_mark = min(self.high_mark, self.low_mark + high)
else:
self.high_mark = self.low_mark + high
if low is not None:
if self.high_mark is not None:
self.low_mark = min(self.high_mark, self.low_mark + low)
else:
self.low_mark = self.low_mark + low
def clear_limits(self):
"""
Clears any existing limits.
"""
self.low_mark, self.high_mark = 0, None
def can_filter(self):
"""
Returns True if adding filters to this instance is still possible.
Typically, this means no limits or offsets have been put on the results.
"""
return not self.low_mark and self.high_mark is None
def clear_select_fields(self):
"""
Clears the list of fields to select (but not extra_select columns).
Some queryset types completely replace any existing list of select
columns.
"""
self.select = []
self.select_fields = []
def add_distinct_fields(self, *field_names):
"""
Adds and resolves the given fields to the query's "distinct on" clause.
"""
self.distinct_fields = field_names
self.distinct = True
def add_fields(self, field_names, allow_m2m=True):
"""
Adds the given (model) fields to the select set. The field names are
added in the order specified.
"""
alias = self.get_initial_alias()
opts = self.get_meta()
try:
for name in field_names:
field, target, u2, joins, u3, u4 = self.setup_joins(
name.split(LOOKUP_SEP), opts, alias, False, allow_m2m,
True)
final_alias = joins[-1]
col = target.column
if len(joins) > 1:
join = self.alias_map[final_alias]
if col == join[RHS_JOIN_COL]:
self.unref_alias(final_alias)
final_alias = join[LHS_ALIAS]
col = join[LHS_JOIN_COL]
joins = joins[:-1]
self.promote_alias_chain(joins[1:])
self.select.append((final_alias, col))
self.select_fields.append(field)
except MultiJoin:
raise FieldError("Invalid field name: '%s'" % name)
except FieldError:
names = opts.get_all_field_names() + self.extra.keys() + self.aggregate_select.keys()
names.sort()
raise FieldError("Cannot resolve keyword %r into field. "
"Choices are: %s" % (name, ", ".join(names)))
self.remove_inherited_models()
def add_ordering(self, *ordering):
"""
Adds items from the 'ordering' sequence to the query's "order by"
clause. These items are either field names (not column names) --
possibly with a direction prefix ('-' or '?') -- or ordinals,
corresponding to column positions in the 'select' list.
If 'ordering' is empty, all ordering is cleared from the query.
"""
errors = []
for item in ordering:
if not ORDER_PATTERN.match(item):
errors.append(item)
if errors:
raise FieldError('Invalid order_by arguments: %s' % errors)
if ordering:
self.order_by.extend(ordering)
else:
self.default_ordering = False
def clear_ordering(self, force_empty=False):
"""
Removes any ordering settings. If 'force_empty' is True, there will be
no ordering in the resulting query (not even the model's default).
"""
self.order_by = []
self.extra_order_by = ()
if force_empty:
self.default_ordering = False
def set_group_by(self):
"""
Expands the GROUP BY clause required by the query.
This will usually be the set of all non-aggregate fields in the
return data. If the database backend supports grouping by the
primary key, and the query would be equivalent, the optimization
will be made automatically.
"""
self.group_by = []
for sel in self.select:
self.group_by.append(sel)
def add_count_column(self):
"""
Converts the query to do count(...) or count(distinct(pk)) in order to
get its size.
"""
if not self.distinct:
if not self.select:
count = self.aggregates_module.Count('*', is_summary=True)
else:
assert len(self.select) == 1, \
"Cannot add count col with multiple cols in 'select': %r" % self.select
count = self.aggregates_module.Count(self.select[0])
else:
opts = self.model._meta
if not self.select:
count = self.aggregates_module.Count((self.join((None, opts.db_table, None, None)), opts.pk.column),
is_summary=True, distinct=True)
else:
# Because of SQL portability issues, multi-column, distinct
# counts need a sub-query -- see get_count() for details.
assert len(self.select) == 1, \
"Cannot add count col with multiple cols in 'select'."
count = self.aggregates_module.Count(self.select[0], distinct=True)
# Distinct handling is done in Count(), so don't do it at this
# level.
self.distinct = False
# Set only aggregate to be the count column.
# Clear out the select cache to reflect the new unmasked aggregates.
self.aggregates = {None: count}
self.set_aggregate_mask(None)
self.group_by = None
def add_select_related(self, fields):
"""
Sets up the select_related data structure so that we only select
certain related models (as opposed to all models, when
self.select_related=True).
"""
field_dict = {}
for field in fields:
d = field_dict
for part in field.split(LOOKUP_SEP):
d = d.setdefault(part, {})
self.select_related = field_dict
self.related_select_cols = []
self.related_select_fields = []
def add_extra(self, select, select_params, where, params, tables, order_by):
"""
Adds data to the various extra_* attributes for user-created additions
to the query.
"""
if select:
# We need to pair any placeholder markers in the 'select'
# dictionary with their parameters in 'select_params' so that
# subsequent updates to the select dictionary also adjust the
# parameters appropriately.
select_pairs = SortedDict()
if select_params:
param_iter = iter(select_params)
else:
param_iter = iter([])
for name, entry in select.items():
entry = force_unicode(entry)
entry_params = []
pos = entry.find("%s")
while pos != -1:
entry_params.append(param_iter.next())
pos = entry.find("%s", pos + 2)
select_pairs[name] = (entry, entry_params)
# This is order preserving, since self.extra_select is a SortedDict.
self.extra.update(select_pairs)
if where or params:
self.where.add(ExtraWhere(where, params), AND)
if tables:
self.extra_tables += tuple(tables)
if order_by:
self.extra_order_by = order_by
def clear_deferred_loading(self):
"""
Remove any fields from the deferred loading set.
"""
self.deferred_loading = (set(), True)
def add_deferred_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
exclude from loading from the database when automatic column selection
is done. The new field names are added to any existing field names that
are deferred (or removed from any existing field names that are marked
as the only ones for immediate loading).
"""
# Fields on related models are stored in the literal double-underscore
# format, so that we can use a set datastructure. We do the foo__bar
# splitting and handling when computing the SQL colum names (as part of
# get_columns()).
existing, defer = self.deferred_loading
if defer:
# Add to existing deferred names.
self.deferred_loading = existing.union(field_names), True
else:
# Remove names from the set of any existing "immediate load" names.
self.deferred_loading = existing.difference(field_names), False
def add_immediate_loading(self, field_names):
"""
Add the given list of model field names to the set of fields to
retrieve when the SQL is executed ("immediate loading" fields). The
field names replace any existing immediate loading field names. If
there are field names already specified for deferred loading, those
names are removed from the new field_names before storing the new names
for immediate loading. (That is, immediate loading overrides any
existing immediate values, but respects existing deferrals.)
"""
existing, defer = self.deferred_loading
field_names = set(field_names)
if 'pk' in field_names:
field_names.remove('pk')
field_names.add(self.model._meta.pk.name)
if defer:
# Remove any existing deferred names from the current set before
# setting the new names.
self.deferred_loading = field_names.difference(existing), False
else:
# Replace any existing "immediate load" field names.
self.deferred_loading = field_names, False
def get_loaded_field_names(self):
"""
If any fields are marked to be deferred, returns a dictionary mapping
models to a set of names in those fields that will be loaded. If a
model is not in the returned dictionary, none of it's fields are
deferred.
If no fields are marked for deferral, returns an empty dictionary.
"""
collection = {}
self.deferred_to_data(collection, self.get_loaded_field_names_cb)
return collection
def get_loaded_field_names_cb(self, target, model, fields):
"""
Callback used by get_deferred_field_names().
"""
target[model] = set([f.name for f in fields])
def set_aggregate_mask(self, names):
"Set the mask of aggregates that will actually be returned by the SELECT"
if names is None:
self.aggregate_select_mask = None
else:
self.aggregate_select_mask = set(names)
self._aggregate_select_cache = None
def set_extra_mask(self, names):
"""
Set the mask of extra select items that will be returned by SELECT,
we don't actually remove them from the Query since they might be used
later
"""
if names is None:
self.extra_select_mask = None
else:
self.extra_select_mask = set(names)
self._extra_select_cache = None
def _aggregate_select(self):
"""The SortedDict of aggregate columns that are not masked, and should
be used in the SELECT clause.
This result is cached for optimization purposes.
"""
if self._aggregate_select_cache is not None:
return self._aggregate_select_cache
elif self.aggregate_select_mask is not None:
self._aggregate_select_cache = SortedDict([
(k,v) for k,v in self.aggregates.items()
if k in self.aggregate_select_mask
])
return self._aggregate_select_cache
else:
return self.aggregates
aggregate_select = property(_aggregate_select)
def _extra_select(self):
if self._extra_select_cache is not None:
return self._extra_select_cache
elif self.extra_select_mask is not None:
self._extra_select_cache = SortedDict([
(k,v) for k,v in self.extra.items()
if k in self.extra_select_mask
])
return self._extra_select_cache
else:
return self.extra
extra_select = property(_extra_select)
def set_start(self, start):
"""
Sets the table from which to start joining. The start position is
specified by the related attribute from the base model. This will
automatically set to the select column to be the column linked from the
previous table.
This method is primarily for internal use and the error checking isn't
as friendly as add_filter(). Mostly useful for querying directly
against the join table of many-to-many relation in a subquery.
"""
opts = self.model._meta
alias = self.get_initial_alias()
field, col, opts, joins, last, extra = self.setup_joins(
start.split(LOOKUP_SEP), opts, alias, False)
select_col = self.alias_map[joins[1]][LHS_JOIN_COL]
select_alias = alias
# The call to setup_joins added an extra reference to everything in
# joins. Reverse that.
for alias in joins:
self.unref_alias(alias)
# We might be able to trim some joins from the front of this query,
# providing that we only traverse "always equal" connections (i.e. rhs
# is *always* the same value as lhs).
for alias in joins[1:]:
join_info = self.alias_map[alias]
if (join_info[LHS_JOIN_COL] != select_col
or join_info[JOIN_TYPE] != self.INNER):
break
self.unref_alias(select_alias)
select_alias = join_info[RHS_ALIAS]
select_col = join_info[RHS_JOIN_COL]
self.select = [(select_alias, select_col)]
self.remove_inherited_models()
def get_order_dir(field, default='ASC'):
"""
Returns the field name and direction for an order specification. For
example, '-foo' is returned as ('foo', 'DESC').
The 'default' param is used to indicate which way no prefix (or a '+'
prefix) should sort. The '-' prefix always sorts the opposite way.
"""
dirn = ORDER_DIR[default]
if field[0] == '-':
return field[1:], dirn[1]
return field, dirn[0]
def setup_join_cache(sender, **kwargs):
"""
The information needed to join between model fields is something that is
invariant over the life of the model, so we cache it in the model's Options
class, rather than recomputing it all the time.
This method initialises the (empty) cache when the model is created.
"""
sender._meta._join_cache = {}
signals.class_prepared.connect(setup_join_cache)
def add_to_dict(data, key, value):
"""
A helper function to add "value" to the set of values for "key", whether or
not "key" already exists.
"""
if key in data:
data[key].add(value)
else:
data[key] = set([value])
| apache-2.0 |
ptisserand/ansible | lib/ansible/modules/network/nxos/nxos_udld.py | 16 | 7946 | #!/usr/bin/python
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
#
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'network'}
DOCUMENTATION = '''
---
module: nxos_udld
extends_documentation_fragment: nxos
version_added: "2.2"
short_description: Manages UDLD global configuration params.
description:
- Manages UDLD global configuration params.
author:
- Jason Edelman (@jedelman8)
notes:
- Tested against NXOSv 7.3.(0)D1(1) on VIRL
- Module will fail if the udld feature has not been previously enabled.
options:
aggressive:
description:
- Toggles aggressive mode.
choices: ['enabled','disabled']
msg_time:
description:
- Message time in seconds for UDLD packets or keyword 'default'.
reset:
description:
- Ability to reset all ports shut down by UDLD. 'state' parameter
cannot be 'absent' when this is present.
type: bool
default: 'no'
state:
description:
- Manage the state of the resource. When set to 'absent',
aggressive and msg_time are set to their default values.
default: present
choices: ['present','absent']
'''
EXAMPLES = '''
# ensure udld aggressive mode is globally disabled and se global message interval is 20
- nxos_udld:
aggressive: disabled
msg_time: 20
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
# Ensure agg mode is globally enabled and msg time is 15
- nxos_udld:
aggressive: enabled
msg_time: 15
host: "{{ inventory_hostname }}"
username: "{{ un }}"
password: "{{ pwd }}"
'''
RETURN = '''
proposed:
description: k/v pairs of parameters passed into module
returned: always
type: dict
sample: {"aggressive": "enabled", "msg_time": "40"}
existing:
description:
- k/v pairs of existing udld configuration
returned: always
type: dict
sample: {"aggressive": "disabled", "msg_time": "15"}
end_state:
description: k/v pairs of udld configuration after module execution
returned: always
type: dict
sample: {"aggressive": "enabled", "msg_time": "40"}
updates:
description: command sent to the device
returned: always
type: list
sample: ["udld message-time 40", "udld aggressive"]
changed:
description: check to see if a change was made on the device
returned: always
type: boolean
sample: true
'''
import re
from ansible.module_utils.network.nxos.nxos import get_config, load_config, run_commands
from ansible.module_utils.network.nxos.nxos import get_capabilities, nxos_argument_spec
from ansible.module_utils.basic import AnsibleModule
PARAM_TO_DEFAULT_KEYMAP = {
'msg_time': '15',
}
def execute_show_command(command, module, command_type='cli_show'):
device_info = get_capabilities(module)
network_api = device_info.get('network_api', 'nxapi')
if network_api == 'cliconf':
if 'show run' not in command:
command += ' | json'
cmds = [command]
body = run_commands(module, cmds)
elif network_api == 'nxapi':
cmds = [command]
body = run_commands(module, cmds)
return body
def flatten_list(command_lists):
flat_command_list = []
for command in command_lists:
if isinstance(command, list):
flat_command_list.extend(command)
else:
flat_command_list.append(command)
return flat_command_list
def apply_key_map(key_map, table):
new_dict = {}
for key, value in table.items():
new_key = key_map.get(key)
if new_key:
value = table.get(key)
if value:
new_dict[new_key] = str(value)
else:
new_dict[new_key] = value
return new_dict
def get_commands_config_udld_global(delta, reset, existing):
commands = []
for param, value in delta.items():
if param == 'aggressive':
command = 'udld aggressive' if value == 'enabled' else 'no udld aggressive'
commands.append(command)
elif param == 'msg_time':
if value == 'default':
if existing.get('msg_time') != PARAM_TO_DEFAULT_KEYMAP.get('msg_time'):
commands.append('no udld message-time')
else:
commands.append('udld message-time ' + value)
if reset:
command = 'udld reset'
commands.append(command)
return commands
def get_commands_remove_udld_global(existing):
commands = []
if existing.get('aggressive') == 'enabled':
command = 'no udld aggressive'
commands.append(command)
if existing.get('msg_time') != PARAM_TO_DEFAULT_KEYMAP.get('msg_time'):
command = 'no udld message-time'
commands.append(command)
return commands
def get_udld_global(module):
command = 'show udld global'
udld_table = execute_show_command(command, module)[0]
status = str(udld_table.get('udld-global-mode', None))
if status == 'enabled-aggressive':
aggressive = 'enabled'
else:
aggressive = 'disabled'
interval = str(udld_table.get('message-interval', None))
udld = dict(msg_time=interval, aggressive=aggressive)
return udld
def main():
argument_spec = dict(
aggressive=dict(required=False, choices=['enabled', 'disabled']),
msg_time=dict(required=False, type='str'),
reset=dict(required=False, type='bool'),
state=dict(choices=['absent', 'present'], default='present'),
)
argument_spec.update(nxos_argument_spec)
module = AnsibleModule(argument_spec=argument_spec,
supports_check_mode=True)
warnings = list()
aggressive = module.params['aggressive']
msg_time = module.params['msg_time']
reset = module.params['reset']
state = module.params['state']
if reset and state == 'absent':
module.fail_json(msg="state must be present when using reset flag.")
args = dict(aggressive=aggressive, msg_time=msg_time, reset=reset)
proposed = dict((k, v) for k, v in args.items() if v is not None)
existing = get_udld_global(module)
end_state = existing
delta = set(proposed.items()).difference(existing.items())
changed = False
commands = []
if state == 'present':
if delta:
command = get_commands_config_udld_global(dict(delta), reset, existing)
commands.append(command)
elif state == 'absent':
command = get_commands_remove_udld_global(existing)
if command:
commands.append(command)
cmds = flatten_list(commands)
if cmds:
if module.check_mode:
module.exit_json(changed=True, commands=cmds)
else:
changed = True
load_config(module, cmds)
end_state = get_udld_global(module)
if 'configure' in cmds:
cmds.pop(0)
results = {}
results['proposed'] = proposed
results['existing'] = existing
results['end_state'] = end_state
results['updates'] = cmds
results['changed'] = changed
results['warnings'] = warnings
module.exit_json(**results)
if __name__ == '__main__':
main()
| gpl-3.0 |
peterm-itr/edx-platform | cms/djangoapps/contentstore/features/common.py | 4 | 12232 | # pylint: disable=missing-docstring
# pylint: disable=redefined-outer-name
import os
from lettuce import world, step
from nose.tools import assert_true, assert_in # pylint: disable=no-name-in-module
from django.conf import settings
from student.roles import CourseStaffRole, CourseInstructorRole, GlobalStaff
from student.models import get_user
from selenium.webdriver.common.keys import Keys
from logging import getLogger
from student.tests.factories import AdminFactory
from student import auth
logger = getLogger(__name__)
from terrain.browser import reset_data
TEST_ROOT = settings.COMMON_TEST_DATA_ROOT
@step('I (?:visit|access|open) the Studio homepage$')
def i_visit_the_studio_homepage(_step):
# To make this go to port 8001, put
# LETTUCE_SERVER_PORT = 8001
# in your settings.py file.
world.visit('/')
signin_css = 'a.action-signin'
assert world.is_css_present(signin_css)
@step('I am logged into Studio$')
def i_am_logged_into_studio(_step):
log_into_studio()
@step('I confirm the alert$')
def i_confirm_with_ok(_step):
world.browser.get_alert().accept()
@step(u'I press the "([^"]*)" delete icon$')
def i_press_the_category_delete_icon(_step, category):
if category == 'section':
css = 'a.action.delete-section-button'
elif category == 'subsection':
css = 'a.action.delete-subsection-button'
else:
assert False, 'Invalid category: %s' % category
world.css_click(css)
@step('I have opened a new course in Studio$')
def i_have_opened_a_new_course(_step):
open_new_course()
@step('I have populated a new course in Studio$')
def i_have_populated_a_new_course(_step):
world.clear_courses()
course = world.CourseFactory.create()
world.scenario_dict['COURSE'] = course
section = world.ItemFactory.create(parent_location=course.location)
world.ItemFactory.create(
parent_location=section.location,
category='sequential',
display_name='Subsection One',
)
user = create_studio_user(is_staff=False)
add_course_author(user, course)
log_into_studio()
world.css_click('a.course-link')
world.wait_for_js_to_load()
@step('(I select|s?he selects) the new course')
def select_new_course(_step, whom):
course_link_css = 'a.course-link'
world.css_click(course_link_css)
@step(u'I press the "([^"]*)" notification button$')
def press_the_notification_button(_step, name):
# Because the notification uses a CSS transition,
# Selenium will always report it as being visible.
# This makes it very difficult to successfully click
# the "Save" button at the UI level.
# Instead, we use JavaScript to reliably click
# the button.
btn_css = 'div#page-notification a.action-%s' % name.lower()
world.trigger_event(btn_css, event='focus')
world.browser.execute_script("$('{}').click()".format(btn_css))
world.wait_for_ajax_complete()
@step('I change the "(.*)" field to "(.*)"$')
def i_change_field_to_value(_step, field, value):
field_css = '#%s' % '-'.join([s.lower() for s in field.split()])
ele = world.css_find(field_css).first
ele.fill(value)
ele._element.send_keys(Keys.ENTER)
@step('I reset the database')
def reset_the_db(_step):
"""
When running Lettuce tests using examples (i.e. "Confirmation is
shown on save" in course-settings.feature), the normal hooks
aren't called between examples. reset_data should run before each
scenario to flush the test database. When this doesn't happen we
get errors due to trying to insert a non-unique entry. So instead,
we delete the database manually. This has the effect of removing
any users and courses that have been created during the test run.
"""
reset_data(None)
@step('I see a confirmation that my changes have been saved')
def i_see_a_confirmation(step):
confirmation_css = '#alert-confirmation'
assert world.is_css_present(confirmation_css)
def open_new_course():
world.clear_courses()
create_studio_user()
log_into_studio()
create_a_course()
def create_studio_user(
uname='robot',
email='[email protected]',
password='test',
is_staff=False):
studio_user = world.UserFactory(
username=uname,
email=email,
password=password,
is_staff=is_staff)
registration = world.RegistrationFactory(user=studio_user)
registration.register(studio_user)
registration.activate()
return studio_user
def fill_in_course_info(
name='Robot Super Course',
org='MITx',
num='101',
run='2013_Spring'):
world.css_fill('.new-course-name', name)
world.css_fill('.new-course-org', org)
world.css_fill('.new-course-number', num)
world.css_fill('.new-course-run', run)
def log_into_studio(
uname='robot',
email='[email protected]',
password='test',
name='Robot Studio'):
world.log_in(username=uname, password=password, email=email, name=name)
# Navigate to the studio dashboard
world.visit('/')
assert_in(uname, world.css_text('h2.title', timeout=10))
def add_course_author(user, course):
"""
Add the user to the instructor group of the course
so they will have the permissions to see it in studio
"""
global_admin = AdminFactory()
for role in (CourseStaffRole, CourseInstructorRole):
auth.add_users(global_admin, role(course.id), user)
def create_a_course():
course = world.CourseFactory.create(org='MITx', course='999', display_name='Robot Super Course')
world.scenario_dict['COURSE'] = course
user = world.scenario_dict.get("USER")
if not user:
user = get_user('[email protected]')
add_course_author(user, course)
# Navigate to the studio dashboard
world.visit('/')
course_link_css = 'a.course-link'
world.css_click(course_link_css)
course_title_css = 'span.course-title'
assert_true(world.is_css_present(course_title_css))
def add_section():
world.css_click('.outline .button-new')
assert_true(world.is_css_present('.outline-section .xblock-field-value'))
def set_date_and_time(date_css, desired_date, time_css, desired_time, key=None):
set_element_value(date_css, desired_date, key)
world.wait_for_ajax_complete()
set_element_value(time_css, desired_time, key)
world.wait_for_ajax_complete()
def set_element_value(element_css, element_value, key=None):
element = world.css_find(element_css).first
element.fill(element_value)
# hit TAB or provided key to trigger save content
if key is not None:
element._element.send_keys(getattr(Keys, key)) # pylint: disable=protected-access
else:
element._element.send_keys(Keys.TAB) # pylint: disable=protected-access
@step('I have enabled the (.*) advanced module$')
def i_enabled_the_advanced_module(step, module):
step.given('I have opened a new course section in Studio')
world.css_click('.nav-course-settings')
world.css_click('.nav-course-settings-advanced a')
type_in_codemirror(0, '["%s"]' % module)
press_the_notification_button(step, 'Save')
@world.absorb
def create_unit_from_course_outline():
"""
Expands the section and clicks on the New Unit link.
The end result is the page where the user is editing the new unit.
"""
css_selectors = [
'.outline-subsection .expand-collapse', '.outline-subsection .button-new'
]
for selector in css_selectors:
world.css_click(selector)
world.wait_for_mathjax()
world.wait_for_xmodule()
world.wait_for_loading()
assert world.is_css_present('ul.new-component-type')
@world.absorb
def wait_for_loading():
"""
Waits for the loading indicator to be hidden.
"""
world.wait_for(lambda _driver: len(world.browser.find_by_css('div.ui-loading.is-hidden')) > 0)
@step('I have clicked the new unit button$')
@step(u'I am in Studio editing a new unit$')
def edit_new_unit(step):
step.given('I have populated a new course in Studio')
create_unit_from_course_outline()
@step('the save notification button is disabled')
def save_button_disabled(step):
button_css = '.action-save'
disabled = 'is-disabled'
assert world.css_has_class(button_css, disabled)
@step('the "([^"]*)" button is disabled')
def button_disabled(step, value):
button_css = 'input[value="%s"]' % value
assert world.css_has_class(button_css, 'is-disabled')
def _do_studio_prompt_action(intent, action):
"""
Wait for a studio prompt to appear and press the specified action button
See cms/static/js/views/feedback_prompt.js for implementation
"""
assert intent in [
'warning',
'error',
'confirmation',
'announcement',
'step-required',
'help',
'mini',
]
assert action in ['primary', 'secondary']
world.wait_for_present('div.wrapper-prompt.is-shown#prompt-{}'.format(intent))
action_css = 'li.nav-item > a.action-{}'.format(action)
world.trigger_event(action_css, event='focus')
world.browser.execute_script("$('{}').click()".format(action_css))
world.wait_for_ajax_complete()
world.wait_for_present('div.wrapper-prompt.is-hiding#prompt-{}'.format(intent))
@world.absorb
def confirm_studio_prompt():
_do_studio_prompt_action('warning', 'primary')
@step('I confirm the prompt')
def confirm_the_prompt(step):
confirm_studio_prompt()
@step(u'I am shown a prompt$')
def i_am_shown_a_notification(step):
assert world.is_css_present('.wrapper-prompt')
def type_in_codemirror(index, text, find_prefix="$"):
script = """
var cm = {find_prefix}('div.CodeMirror:eq({index})').get(0).CodeMirror;
cm.getInputField().focus();
cm.setValue(arguments[0]);
cm.getInputField().blur();""".format(index=index, find_prefix=find_prefix)
world.browser.driver.execute_script(script, str(text))
world.wait_for_ajax_complete()
def get_codemirror_value(index=0, find_prefix="$"):
return world.browser.driver.execute_script(
"""
return {find_prefix}('div.CodeMirror:eq({index})').get(0).CodeMirror.getValue();
""".format(index=index, find_prefix=find_prefix)
)
def attach_file(filename, sub_path):
path = os.path.join(TEST_ROOT, sub_path, filename)
world.browser.execute_script("$('input.file-input').css('display', 'block')")
assert_true(os.path.exists(path))
world.browser.attach_file('file', os.path.abspath(path))
def upload_file(filename, sub_path=''):
attach_file(filename, sub_path)
button_css = '.wrapper-modal-window-assetupload .action-upload'
world.css_click(button_css)
@step(u'"([^"]*)" logs in$')
def other_user_login(step, name):
step.given('I log out')
world.visit('/')
signin_css = 'a.action-signin'
world.is_css_present(signin_css)
world.css_click(signin_css)
def fill_login_form():
login_form = world.browser.find_by_css('form#login_form')
login_form.find_by_name('email').fill(name + '@edx.org')
login_form.find_by_name('password').fill("test")
login_form.find_by_name('submit').click()
world.retry_on_exception(fill_login_form)
assert_true(world.is_css_present('.new-course-button'))
world.scenario_dict['USER'] = get_user(name + '@edx.org')
@step(u'the user "([^"]*)" exists( as a course (admin|staff member|is_staff))?$')
def create_other_user(_step, name, has_extra_perms, role_name):
email = name + '@edx.org'
user = create_studio_user(uname=name, password="test", email=email)
if has_extra_perms:
if role_name == "is_staff":
GlobalStaff().add_users(user)
else:
if role_name == "admin":
# admins get staff privileges, as well
roles = (CourseStaffRole, CourseInstructorRole)
else:
roles = (CourseStaffRole,)
course_key = world.scenario_dict["COURSE"].id
global_admin = AdminFactory()
for role in roles:
auth.add_users(global_admin, role(course_key), user)
@step('I log out')
def log_out(_step):
world.visit('logout')
| agpl-3.0 |
pyocd/pyOCD | pyocd/target/builtin/target_MKE15Z256xxx7.py | 3 | 6018 | # pyOCD debugger
# Copyright (c) 2018-2019 Arm Limited
# Copyright (c) 2017 NXP
# Copyright (c) 2016 Freescale Semiconductor, Inc.
# SPDX-License-Identifier: Apache-2.0
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ..family.target_kinetis import Kinetis
from ..family.flash_kinetis import Flash_Kinetis
from ...core.memory_map import (FlashRegion, RamRegion, MemoryMap)
from ...debug.svd.loader import SVDFile
RCM_MR = 0x4007f010
RCM_MR_BOOTROM_MASK = 0x6
FLASH_ALGO = {
'load_address' : 0x20000000,
'instructions' : [
0xE00ABE00, 0x062D780D, 0x24084068, 0xD3000040, 0x1E644058, 0x1C49D1FA, 0x2A001E52, 0x4770D1F2,
0x4829b510, 0x60414927, 0x60814928, 0x22806801, 0x22204391, 0x60014311, 0x44484825, 0xf84cf000,
0xd0002800, 0xbd102001, 0x47702000, 0xb5104820, 0x44484920, 0xf88ef000, 0xd1042800, 0x2100481c,
0xf0004448, 0xbd10f948, 0x4c19b570, 0x444c4605, 0x4b184601, 0x68e24620, 0xf8b5f000, 0xd1052800,
0x46292300, 0x68e24620, 0xf93ff000, 0xb570bd70, 0x460b460c, 0x46014606, 0xb084480d, 0x44484615,
0xf8e4f000, 0xd10a2800, 0x90029001, 0x48082101, 0x462b9100, 0x46314622, 0xf0004448, 0xb004f96d,
0x0000bd70, 0xd928c520, 0x40052000, 0x0000ffff, 0x00000004, 0x6b65666b, 0xd00b2800, 0x68c949dd,
0x0f090109, 0xd007290f, 0x00494adb, 0x5a51447a, 0xe0030289, 0x47702004, 0x04892101, 0x2300b430,
0x60416003, 0x02cc2101, 0x608160c4, 0x7a0d49d3, 0x40aa158a, 0x7ac96142, 0x61816103, 0x06892105,
0x62016244, 0x2000bc30, 0x28004770, 0x6101d002, 0x47702000, 0x47702004, 0x48c94602, 0x210168c0,
0x43080289, 0x60c849c6, 0x48c64770, 0x70012170, 0x70012180, 0x06097801, 0x7800d5fc, 0xd5010681,
0x47702067, 0xd50106c1, 0x47702068, 0xd0fc07c0, 0x47702069, 0xd1012800, 0x47702004, 0x4604b510,
0x48b94ab8, 0x48b96050, 0xd0014281, 0xe000206b, 0x28002000, 0x4620d107, 0xffd7f7ff, 0x46204603,
0xffcaf7ff, 0xbd104618, 0xd1012800, 0x47702004, 0x4614b510, 0x60622200, 0x60e260a2, 0x61626122,
0x61e261a2, 0x68c16021, 0x68816061, 0xf0006840, 0x60a0f953, 0x60e02008, 0x61606120, 0x200461a0,
0x200061e0, 0xb5ffbd10, 0x4615b089, 0x466a460c, 0xf7ff9809, 0x462affd9, 0x9b044621, 0xf0009809,
0x0007f90c, 0x9c00d130, 0x19659e01, 0x46311e6d, 0xf0004628, 0x2900f931, 0x1c40d002, 0x1e454370,
0xd81d42ac, 0x20090221, 0x06000a09, 0x488d1809, 0x498e6041, 0x4288980c, 0x206bd001, 0x2000e000,
0xd1112800, 0xf7ff9809, 0x4607ff80, 0x69009809, 0xd0002800, 0x2f004780, 0x19a4d102, 0xd9e142ac,
0xf7ff9809, 0x4638ff69, 0xbdf0b00d, 0xd1012a00, 0x47702004, 0xb089b5ff, 0x461e4614, 0x466a460d,
0xf7ff9809, 0x4632ff91, 0x9b034629, 0xf0009809, 0x0007f8c4, 0x9d00d12d, 0xd0262e00, 0x4871cc02,
0x99036081, 0xd0022904, 0xd0072908, 0x022ae00e, 0x0a122103, 0x18510649, 0xe0076041, 0x60c1cc02,
0x2107022a, 0x06090a12, 0x60411851, 0xf7ff9809, 0x4607ff3c, 0x69009809, 0xd0002800, 0x2f004780,
0x9803d103, 0x1a361945, 0x9809d1d8, 0xff24f7ff, 0xb00d4638, 0x2800bdf0, 0x4a5dd005, 0x18890409,
0x60514a58, 0x2004e721, 0xb5ff4770, 0x4614b08b, 0x460d461e, 0x980b466a, 0xff46f7ff, 0x46294622,
0x980b9b05, 0xf879f000, 0xd1332800, 0x4629466a, 0xf7ff980b, 0x9d00ff39, 0x90089802, 0x42404269,
0x424f4001, 0xd10142af, 0x183f9808, 0xd0202c00, 0x90090230, 0x42a61b7e, 0x4626d900, 0x99054630,
0xf88af000, 0x2101022a, 0x06090a12, 0x493d1852, 0x9a09604a, 0x43100400, 0x608830ff, 0xf7ff980b,
0x2800fee4, 0x9808d106, 0x19ad1ba4, 0x2c00183f, 0x2000d1e0, 0xbdf0b00f, 0xd1012b00, 0x47702004,
0xb089b5ff, 0x461d4616, 0x466a460c, 0x98099f12, 0xfefaf7ff, 0x46214632, 0x98099b07, 0xf82df000,
0xd11d2800, 0x2e009c00, 0x492ad01a, 0x18470638, 0x20010221, 0x06400a09, 0x48221809, 0x60876041,
0x60c16829, 0xf7ff9809, 0x2800feb0, 0x9913d00a, 0xd0002900, 0x9914600c, 0xd0012900, 0x600a2200,
0xbdf0b00d, 0x1a769907, 0x00890889, 0x9907194d, 0x2e00190c, 0xb00dd1dc, 0x2800bdf0, 0x2004d101,
0xb4104770, 0x460c1e5b, 0xd101421c, 0xd002421a, 0x2065bc10, 0x68034770, 0xd804428b, 0x18896840,
0x42881818, 0xbc10d202, 0x47702066, 0x2000bc10, 0x00004770, 0x40048040, 0x000003bc, 0x40020020,
0xf0003000, 0x40020000, 0x44ffffff, 0x6b65666b, 0x4000ffff, 0x00ffffff, 0x460bb530, 0x20004601,
0x24012220, 0x460de009, 0x429d40d5, 0x461dd305, 0x1b494095, 0x40954625, 0x46151940, 0x2d001e52,
0xbd30dcf1, 0x40020004, 0x40020010, 0x00100008, 0x00200018, 0x00400030, 0x00800060, 0x010000c0,
0x02000180, 0x04000300, 0x00000600, 0x00000000,
],
'pc_init' : 0x20000021,
'pc_unInit': 0x20000049,
'pc_program_page': 0x2000008F,
'pc_erase_sector': 0x20000069,
'pc_eraseAll' : 0x2000004D,
'static_base' : 0x20000000 + 0x00000020 + 0x000004ac,
'begin_stack' : 0x20000000 + 0x00000800,
'begin_data' : 0x20000000 + 0x00000A00,
'page_size' : 0x00000800,
'analyzer_supported' : True,
'analyzer_address' : 0x1ffff000, # Analyzer 0x1ffff000..0x1ffff600
'page_buffers' : [0x20003000, 0x20004000], # Enable double buffering
'min_program_length' : 8,
}
class KE15Z7(Kinetis):
MEMORY_MAP = MemoryMap(
FlashRegion( start=0, length=0x40000, blocksize=0x800, is_boot_memory=True,
algo=FLASH_ALGO, flash_class=Flash_Kinetis),
RamRegion( start=0x1fffe000, length=0x8000)
)
def __init__(self, session):
super(KE15Z7, self).__init__(session, self.MEMORY_MAP)
self._svd_location = SVDFile.from_builtin("MKE15Z7.svd")
def post_connect_hook(self):
# Disable ROM vector table remapping.
self.write32(RCM_MR, RCM_MR_BOOTROM_MASK)
| apache-2.0 |
ychen820/microblog | y/google-cloud-sdk/platform/google_appengine/lib/django-1.4/django/views/generic/create_update.py | 87 | 8928 | from django.forms.models import ModelFormMetaclass, ModelForm
from django.template import RequestContext, loader
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.core.xheaders import populate_xheaders
from django.core.exceptions import ObjectDoesNotExist, ImproperlyConfigured
from django.utils.translation import ugettext
from django.contrib.auth.views import redirect_to_login
from django.views.generic import GenericViewError
from django.contrib import messages
import warnings
warnings.warn(
'Function-based generic views have been deprecated; use class-based views instead.',
DeprecationWarning
)
def apply_extra_context(extra_context, context):
"""
Adds items from extra_context dict to context. If a value in extra_context
is callable, then it is called and the result is added to context.
"""
for key, value in extra_context.iteritems():
if callable(value):
context[key] = value()
else:
context[key] = value
def get_model_and_form_class(model, form_class):
"""
Returns a model and form class based on the model and form_class
parameters that were passed to the generic view.
If ``form_class`` is given then its associated model will be returned along
with ``form_class`` itself. Otherwise, if ``model`` is given, ``model``
itself will be returned along with a ``ModelForm`` class created from
``model``.
"""
if form_class:
return form_class._meta.model, form_class
if model:
# The inner Meta class fails if model = model is used for some reason.
tmp_model = model
# TODO: we should be able to construct a ModelForm without creating
# and passing in a temporary inner class.
class Meta:
model = tmp_model
class_name = model.__name__ + 'Form'
form_class = ModelFormMetaclass(class_name, (ModelForm,), {'Meta': Meta})
return model, form_class
raise GenericViewError("Generic view must be called with either a model or"
" form_class argument.")
def redirect(post_save_redirect, obj):
"""
Returns a HttpResponseRedirect to ``post_save_redirect``.
``post_save_redirect`` should be a string, and can contain named string-
substitution place holders of ``obj`` field names.
If ``post_save_redirect`` is None, then redirect to ``obj``'s URL returned
by ``get_absolute_url()``. If ``obj`` has no ``get_absolute_url`` method,
then raise ImproperlyConfigured.
This function is meant to handle the post_save_redirect parameter to the
``create_object`` and ``update_object`` views.
"""
if post_save_redirect:
return HttpResponseRedirect(post_save_redirect % obj.__dict__)
elif hasattr(obj, 'get_absolute_url'):
return HttpResponseRedirect(obj.get_absolute_url())
else:
raise ImproperlyConfigured(
"No URL to redirect to. Either pass a post_save_redirect"
" parameter to the generic view or define a get_absolute_url"
" method on the Model.")
def lookup_object(model, object_id, slug, slug_field):
"""
Return the ``model`` object with the passed ``object_id``. If
``object_id`` is None, then return the object whose ``slug_field``
equals the passed ``slug``. If ``slug`` and ``slug_field`` are not passed,
then raise Http404 exception.
"""
lookup_kwargs = {}
if object_id:
lookup_kwargs['%s__exact' % model._meta.pk.name] = object_id
elif slug and slug_field:
lookup_kwargs['%s__exact' % slug_field] = slug
else:
raise GenericViewError(
"Generic view must be called with either an object_id or a"
" slug/slug_field.")
try:
return model.objects.get(**lookup_kwargs)
except ObjectDoesNotExist:
raise Http404("No %s found for %s"
% (model._meta.verbose_name, lookup_kwargs))
def create_object(request, model=None, template_name=None,
template_loader=loader, extra_context=None, post_save_redirect=None,
login_required=False, context_processors=None, form_class=None):
"""
Generic object-creation function.
Templates: ``<app_label>/<model_name>_form.html``
Context:
form
the form for the object
"""
if extra_context is None: extra_context = {}
if login_required and not request.user.is_authenticated():
return redirect_to_login(request.path)
model, form_class = get_model_and_form_class(model, form_class)
if request.method == 'POST':
form = form_class(request.POST, request.FILES)
if form.is_valid():
new_object = form.save()
msg = ugettext("The %(verbose_name)s was created successfully.") %\
{"verbose_name": model._meta.verbose_name}
messages.success(request, msg, fail_silently=True)
return redirect(post_save_redirect, new_object)
else:
form = form_class()
# Create the template, context, response
if not template_name:
template_name = "%s/%s_form.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = RequestContext(request, {
'form': form,
}, context_processors)
apply_extra_context(extra_context, c)
return HttpResponse(t.render(c))
def update_object(request, model=None, object_id=None, slug=None,
slug_field='slug', template_name=None, template_loader=loader,
extra_context=None, post_save_redirect=None, login_required=False,
context_processors=None, template_object_name='object',
form_class=None):
"""
Generic object-update function.
Templates: ``<app_label>/<model_name>_form.html``
Context:
form
the form for the object
object
the original object being edited
"""
if extra_context is None: extra_context = {}
if login_required and not request.user.is_authenticated():
return redirect_to_login(request.path)
model, form_class = get_model_and_form_class(model, form_class)
obj = lookup_object(model, object_id, slug, slug_field)
if request.method == 'POST':
form = form_class(request.POST, request.FILES, instance=obj)
if form.is_valid():
obj = form.save()
msg = ugettext("The %(verbose_name)s was updated successfully.") %\
{"verbose_name": model._meta.verbose_name}
messages.success(request, msg, fail_silently=True)
return redirect(post_save_redirect, obj)
else:
form = form_class(instance=obj)
if not template_name:
template_name = "%s/%s_form.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = RequestContext(request, {
'form': form,
template_object_name: obj,
}, context_processors)
apply_extra_context(extra_context, c)
response = HttpResponse(t.render(c))
populate_xheaders(request, response, model, getattr(obj, obj._meta.pk.attname))
return response
def delete_object(request, model, post_delete_redirect, object_id=None,
slug=None, slug_field='slug', template_name=None,
template_loader=loader, extra_context=None, login_required=False,
context_processors=None, template_object_name='object'):
"""
Generic object-delete function.
The given template will be used to confirm deletetion if this view is
fetched using GET; for safty, deletion will only be performed if this
view is POSTed.
Templates: ``<app_label>/<model_name>_confirm_delete.html``
Context:
object
the original object being deleted
"""
if extra_context is None: extra_context = {}
if login_required and not request.user.is_authenticated():
return redirect_to_login(request.path)
obj = lookup_object(model, object_id, slug, slug_field)
if request.method == 'POST':
obj.delete()
msg = ugettext("The %(verbose_name)s was deleted.") %\
{"verbose_name": model._meta.verbose_name}
messages.success(request, msg, fail_silently=True)
return HttpResponseRedirect(post_delete_redirect)
else:
if not template_name:
template_name = "%s/%s_confirm_delete.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = RequestContext(request, {
template_object_name: obj,
}, context_processors)
apply_extra_context(extra_context, c)
response = HttpResponse(t.render(c))
populate_xheaders(request, response, model, getattr(obj, obj._meta.pk.attname))
return response
| bsd-3-clause |
alexallah/django | django/views/generic/edit.py | 24 | 8343 | from django.core.exceptions import ImproperlyConfigured
from django.forms import models as model_forms
from django.http import HttpResponseRedirect
from django.views.generic.base import ContextMixin, TemplateResponseMixin, View
from django.views.generic.detail import (
BaseDetailView, SingleObjectMixin, SingleObjectTemplateResponseMixin,
)
class FormMixin(ContextMixin):
"""Provide a way to show and handle a form in a request."""
initial = {}
form_class = None
success_url = None
prefix = None
def get_initial(self):
"""Return the initial data to use for forms on this view."""
return self.initial.copy()
def get_prefix(self):
"""Return the prefix to use for forms."""
return self.prefix
def get_form_class(self):
"""Return the form class to use."""
return self.form_class
def get_form(self, form_class=None):
"""Return an instance of the form to be used in this view."""
if form_class is None:
form_class = self.get_form_class()
return form_class(**self.get_form_kwargs())
def get_form_kwargs(self):
"""Return the keyword arguments for instantiating the form."""
kwargs = {
'initial': self.get_initial(),
'prefix': self.get_prefix(),
}
if self.request.method in ('POST', 'PUT'):
kwargs.update({
'data': self.request.POST,
'files': self.request.FILES,
})
return kwargs
def get_success_url(self):
"""Return the URL to redirect to after processing a valid form."""
if not self.success_url:
raise ImproperlyConfigured("No URL to redirect to. Provide a success_url.")
return str(self.success_url) # success_url may be lazy
def form_valid(self, form):
"""If the form is valid, redirect to the supplied URL."""
return HttpResponseRedirect(self.get_success_url())
def form_invalid(self, form):
"""If the form is invalid, render the invalid form."""
return self.render_to_response(self.get_context_data(form=form))
def get_context_data(self, **kwargs):
"""Insert the form into the context dict."""
if 'form' not in kwargs:
kwargs['form'] = self.get_form()
return super().get_context_data(**kwargs)
class ModelFormMixin(FormMixin, SingleObjectMixin):
"""Provide a way to show and handle a ModelForm in a request."""
fields = None
def get_form_class(self):
"""Return the form class to use in this view."""
if self.fields is not None and self.form_class:
raise ImproperlyConfigured(
"Specifying both 'fields' and 'form_class' is not permitted."
)
if self.form_class:
return self.form_class
else:
if self.model is not None:
# If a model has been explicitly provided, use it
model = self.model
elif hasattr(self, 'object') and self.object is not None:
# If this view is operating on a single object, use
# the class of that object
model = self.object.__class__
else:
# Try to get a queryset and extract the model class
# from that
model = self.get_queryset().model
if self.fields is None:
raise ImproperlyConfigured(
"Using ModelFormMixin (base class of %s) without "
"the 'fields' attribute is prohibited." % self.__class__.__name__
)
return model_forms.modelform_factory(model, fields=self.fields)
def get_form_kwargs(self):
"""Return the keyword arguments for instantiating the form."""
kwargs = super().get_form_kwargs()
if hasattr(self, 'object'):
kwargs.update({'instance': self.object})
return kwargs
def get_success_url(self):
"""Return the URL to redirect to after processing a valid form."""
if self.success_url:
url = self.success_url.format(**self.object.__dict__)
else:
try:
url = self.object.get_absolute_url()
except AttributeError:
raise ImproperlyConfigured(
"No URL to redirect to. Either provide a url or define"
" a get_absolute_url method on the Model.")
return url
def form_valid(self, form):
"""If the form is valid, save the associated model."""
self.object = form.save()
return super().form_valid(form)
class ProcessFormView(View):
"""Render a form on GET and processes it on POST."""
def get(self, request, *args, **kwargs):
"""Handle GET requests: instantiate a blank version of the form."""
return self.render_to_response(self.get_context_data())
def post(self, request, *args, **kwargs):
"""
Handle POST requests: instantiate a form instance with the passed
POST variables and then check if it's valid.
"""
form = self.get_form()
if form.is_valid():
return self.form_valid(form)
else:
return self.form_invalid(form)
# PUT is a valid HTTP verb for creating (with a known URL) or editing an
# object, note that browsers only support POST for now.
def put(self, *args, **kwargs):
return self.post(*args, **kwargs)
class BaseFormView(FormMixin, ProcessFormView):
"""A base view for displaying a form."""
class FormView(TemplateResponseMixin, BaseFormView):
"""A view for displaying a form and rendering a template response."""
class BaseCreateView(ModelFormMixin, ProcessFormView):
"""
Base view for creating an new object instance.
Using this base class requires subclassing to provide a response mixin.
"""
def get(self, request, *args, **kwargs):
self.object = None
return super().get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = None
return super().post(request, *args, **kwargs)
class CreateView(SingleObjectTemplateResponseMixin, BaseCreateView):
"""
View for creating a new object, with a response rendered by a template.
"""
template_name_suffix = '_form'
class BaseUpdateView(ModelFormMixin, ProcessFormView):
"""
Base view for updating an existing object.
Using this base class requires subclassing to provide a response mixin.
"""
def get(self, request, *args, **kwargs):
self.object = self.get_object()
return super().get(request, *args, **kwargs)
def post(self, request, *args, **kwargs):
self.object = self.get_object()
return super().post(request, *args, **kwargs)
class UpdateView(SingleObjectTemplateResponseMixin, BaseUpdateView):
"""View for updating an object, with a response rendered by a template."""
template_name_suffix = '_form'
class DeletionMixin:
"""Provide the ability to delete objects."""
success_url = None
def delete(self, request, *args, **kwargs):
"""
Call the delete() method on the fetched object and then redirect to the
success URL.
"""
self.object = self.get_object()
success_url = self.get_success_url()
self.object.delete()
return HttpResponseRedirect(success_url)
# Add support for browsers which only accept GET and POST for now.
def post(self, request, *args, **kwargs):
return self.delete(request, *args, **kwargs)
def get_success_url(self):
if self.success_url:
return self.success_url.format(**self.object.__dict__)
else:
raise ImproperlyConfigured(
"No URL to redirect to. Provide a success_url.")
class BaseDeleteView(DeletionMixin, BaseDetailView):
"""
Base view for deleting an object.
Using this base class requires subclassing to provide a response mixin.
"""
class DeleteView(SingleObjectTemplateResponseMixin, BaseDeleteView):
"""
View for deleting an object retrieved with self.get_object(), with a
response rendered by a template.
"""
template_name_suffix = '_confirm_delete'
| bsd-3-clause |
gbaty/pyside2 | tests/signals/decorators_test.py | 3 | 2225 | #!/usr/bin/env python
import unittest
from PySide2.QtCore import QObject, Slot, SIGNAL, SLOT
class MyObject(QObject):
def __init__(self, parent=None):
QObject.__init__(self, parent)
self._slotCalledCount = 0
@Slot()
def mySlot(self):
self._slotCalledCount = self._slotCalledCount + 1
@Slot(int)
@Slot('QString')
def mySlot2(self, arg0):
self._slotCalledCount = self._slotCalledCount + 1
@Slot(name='mySlot3')
def foo(self):
self._slotCalledCount = self._slotCalledCount + 1
@Slot(str, int)
def mySlot4(self, a, b):
self._slotCalledCount = self._slotCalledCount + 1
@Slot(result=int)
def mySlot5(self):
self._slotCalledCount = self._slotCalledCount + 1
@Slot(result=QObject)
def mySlot6(self):
self._slotCalledCount = self._slotCalledCount + 1
class StaticMetaObjectTest(unittest.TestCase):
def testSignalPropagation(self):
o = MyObject()
m = o.metaObject()
self.assertTrue(m.indexOfSlot('mySlot()') > 0)
self.assertTrue(m.indexOfSlot('mySlot2(int)') > 0)
self.assertTrue(m.indexOfSlot('mySlot2(QString)') > 0)
self.assertTrue(m.indexOfSlot('mySlot3()') > 0)
self.assertTrue(m.indexOfSlot('mySlot4(QString,int)') > 0)
def testEmission(self):
o = MyObject()
o.connect(SIGNAL("mySignal()"), o, SLOT("mySlot()"))
o.emit(SIGNAL("mySignal()"))
self.assertTrue(o._slotCalledCount == 1)
def testResult(self):
o = MyObject()
mo = o.metaObject()
i = mo.indexOfSlot('mySlot5()')
m = mo.method(i)
self.assertEqual(m.typeName(), "int")
def testResultObject(self):
o = MyObject()
mo = o.metaObject()
i = mo.indexOfSlot('mySlot6()')
m = mo.method(i)
self.assertEqual(m.typeName(), "QObject*")
class SlotWithoutArgs(unittest.TestCase):
def testError(self):
# It should be an error to call the slot without the
# arguments, as just @Slot would end up in a slot
# accepting argument functions
self.assertRaises(TypeError, Slot, lambda: 3)
if __name__ == '__main__':
unittest.main()
| lgpl-2.1 |
ZihengJiang/mxnet | example/autoencoder/model.py | 27 | 2810 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# pylint: skip-file
import mxnet as mx
import numpy as np
import logging
from solver import Solver, Monitor
try:
import cPickle as pickle
except:
import pickle
def extract_feature(sym, args, auxs, data_iter, N, xpu=mx.cpu()):
input_buffs = [mx.nd.empty(shape, ctx=xpu) for k, shape in data_iter.provide_data]
input_names = [k for k, shape in data_iter.provide_data]
args = dict(args, **dict(zip(input_names, input_buffs)))
exe = sym.bind(xpu, args=args, aux_states=auxs)
outputs = [[] for i in exe.outputs]
output_buffs = None
data_iter.hard_reset()
for batch in data_iter:
for data, buff in zip(batch.data, input_buffs):
data.copyto(buff)
exe.forward(is_train=False)
if output_buffs is None:
output_buffs = [mx.nd.empty(i.shape, ctx=mx.cpu()) for i in exe.outputs]
else:
for out, buff in zip(outputs, output_buffs):
out.append(buff.asnumpy())
for out, buff in zip(exe.outputs, output_buffs):
out.copyto(buff)
for out, buff in zip(outputs, output_buffs):
out.append(buff.asnumpy())
outputs = [np.concatenate(i, axis=0)[:N] for i in outputs]
return dict(zip(sym.list_outputs(), outputs))
class MXModel(object):
def __init__(self, xpu=mx.cpu(), *args, **kwargs):
self.xpu = xpu
self.loss = None
self.args = {}
self.args_grad = {}
self.args_mult = {}
self.auxs = {}
self.setup(*args, **kwargs)
def save(self, fname):
args_save = {key: v.asnumpy() for key, v in self.args.items()}
with open(fname, 'wb') as fout:
pickle.dump(args_save, fout)
def load(self, fname):
with open(fname, 'rb') as fin:
args_save = pickle.load(fin)
for key, v in args_save.items():
if key in self.args:
self.args[key][:] = v
def setup(self, *args, **kwargs):
raise NotImplementedError("must override this")
| apache-2.0 |
switchboardOp/ansible | lib/ansible/modules/windows/win_msg.py | 22 | 3527 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2017, Jon Hawkesworth (@jhawkesworth) <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# this is a windows documentation stub. actual code lives in the .ps1
# file of the same name
ANSIBLE_METADATA = {'metadata_version': '1.0',
'status': ['preview'],
'supported_by': 'community'}
DOCUMENTATION = r'''
---
module: win_msg
version_added: "2.3"
short_description: Sends a message to logged in users on Windows hosts.
description:
- Wraps the msg.exe command in order to send messages to Windows hosts.
options:
to:
description:
- Who to send the message to. Can be a username, sessionname or sessionid.
default: '*'
display_seconds:
description:
- How long to wait for receiver to acknowledge message, in seconds.
default: 10
wait:
description:
- Whether to wait for users to respond. Module will only wait for the number of seconds specified in display_seconds or 10 seconds if not specified.
However, if I(wait) is true, the message is sent to each logged on user in turn, waiting for the user to either press 'ok' or for
the timeout to elapse before moving on to the next user.
type: bool
default: 'no'
msg:
description:
- The text of the message to be displayed.
default: Hello world!
author:
- Jon Hawkesworth (@jhawkesworth)
notes:
- This module must run on a windows host, so ensure your play targets windows
hosts, or delegates to a windows host.
- Messages are only sent to the local host where the module is run.
- The module does not support sending to users listed in a file.
- Setting wait to true can result in long run times on systems with many logged in users.
'''
EXAMPLES = r'''
- name: Warn logged in users of impending upgrade
win_msg:
display_seconds: 60
msg: Automated upgrade about to start. Please save your work and log off before {{ deployment_start_time }}
'''
RETURN = r'''
msg:
description: Test of the message that was sent.
returned: changed
type: string
sample: Automated upgrade about to start. Please save your work and log off before 22 July 2016 18:00:00
display_seconds:
description: Value of display_seconds module parameter.
returned: success
type: string
sample: 10
rc:
description: The return code of the API call
returned: always
type: int
sample: 0
runtime_seconds:
description: How long the module took to run on the remote windows host.
returned: success
type: string
sample: 22 July 2016 17:45:51
sent_localtime:
description: local time from windows host when the message was sent.
returned: success
type: string
sample: 22 July 2016 17:45:51
wait:
description: Value of wait module parameter.
returned: success
type: boolean
sample: false
'''
| gpl-3.0 |
ChronoMonochrome/android_external_chromium_org | tools/telemetry/telemetry/core/backends/chrome/misc_web_contents_backend.py | 24 | 1494 | # Copyright 2013 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import json
from telemetry.core import web_contents
from telemetry.core.backends.chrome import inspector_backend
class MiscWebContentsBackend(object):
"""Provides acccess to chrome://oobe/login page, which is neither an extension
nor a tab."""
def __init__(self, browser_backend):
self._browser_backend = browser_backend
def GetOobe(self):
oobe_web_contents_info = self._FindWebContentsInfo()
if oobe_web_contents_info:
debugger_url = oobe_web_contents_info.get('webSocketDebuggerUrl')
if debugger_url:
inspector = self._CreateInspectorBackend(debugger_url)
return web_contents.WebContents(inspector)
return None
def _CreateInspectorBackend(self, debugger_url):
return inspector_backend.InspectorBackend(self._browser_backend.browser,
self._browser_backend,
debugger_url)
def _ListWebContents(self, timeout=None):
data = self._browser_backend.Request('', timeout=timeout)
return json.loads(data)
def _FindWebContentsInfo(self):
for web_contents_info in self._ListWebContents():
# Prior to crrev.com/203152, url was chrome://oobe/login.
if (web_contents_info.get('url').startswith('chrome://oobe')):
return web_contents_info
return None
| bsd-3-clause |
pbrod/numpy | numpy/distutils/fcompiler/absoft.py | 17 | 5499 |
# http://www.absoft.com/literature/osxuserguide.pdf
# http://www.absoft.com/documentation.html
# Notes:
# - when using -g77 then use -DUNDERSCORE_G77 to compile f2py
# generated extension modules (works for f2py v2.45.241_1936 and up)
import os
from numpy.distutils.cpuinfo import cpu
from numpy.distutils.fcompiler import FCompiler, dummy_fortran_file
from numpy.distutils.misc_util import cyg2win32
compilers = ['AbsoftFCompiler']
class AbsoftFCompiler(FCompiler):
compiler_type = 'absoft'
description = 'Absoft Corp Fortran Compiler'
#version_pattern = r'FORTRAN 77 Compiler (?P<version>[^\s*,]*).*?Absoft Corp'
version_pattern = r'(f90:.*?(Absoft Pro FORTRAN Version|FORTRAN 77 Compiler|Absoft Fortran Compiler Version|Copyright Absoft Corporation.*?Version))'+\
r' (?P<version>[^\s*,]*)(.*?Absoft Corp|)'
# on windows: f90 -V -c dummy.f
# f90: Copyright Absoft Corporation 1994-1998 mV2; Cray Research, Inc. 1994-1996 CF90 (2.x.x.x f36t87) Version 2.3 Wed Apr 19, 2006 13:05:16
# samt5735(8)$ f90 -V -c dummy.f
# f90: Copyright Absoft Corporation 1994-2002; Absoft Pro FORTRAN Version 8.0
# Note that fink installs g77 as f77, so need to use f90 for detection.
executables = {
'version_cmd' : None, # set by update_executables
'compiler_f77' : ["f77"],
'compiler_fix' : ["f90"],
'compiler_f90' : ["f90"],
'linker_so' : ["<F90>"],
'archiver' : ["ar", "-cr"],
'ranlib' : ["ranlib"]
}
if os.name=='nt':
library_switch = '/out:' #No space after /out:!
module_dir_switch = None
module_include_switch = '-p'
def update_executables(self):
f = cyg2win32(dummy_fortran_file())
self.executables['version_cmd'] = ['<F90>', '-V', '-c',
f+'.f', '-o', f+'.o']
def get_flags_linker_so(self):
if os.name=='nt':
opt = ['/dll']
# The "-K shared" switches are being left in for pre-9.0 versions
# of Absoft though I don't think versions earlier than 9 can
# actually be used to build shared libraries. In fact, version
# 8 of Absoft doesn't recognize "-K shared" and will fail.
elif self.get_version() >= '9.0':
opt = ['-shared']
else:
opt = ["-K", "shared"]
return opt
def library_dir_option(self, dir):
if os.name=='nt':
return ['-link', '/PATH:%s' % (dir)]
return "-L" + dir
def library_option(self, lib):
if os.name=='nt':
return '%s.lib' % (lib)
return "-l" + lib
def get_library_dirs(self):
opt = FCompiler.get_library_dirs(self)
d = os.environ.get('ABSOFT')
if d:
if self.get_version() >= '10.0':
# use shared libraries, the static libraries were not compiled -fPIC
prefix = 'sh'
else:
prefix = ''
if cpu.is_64bit():
suffix = '64'
else:
suffix = ''
opt.append(os.path.join(d, '%slib%s' % (prefix, suffix)))
return opt
def get_libraries(self):
opt = FCompiler.get_libraries(self)
if self.get_version() >= '11.0':
opt.extend(['af90math', 'afio', 'af77math', 'amisc'])
elif self.get_version() >= '10.0':
opt.extend(['af90math', 'afio', 'af77math', 'U77'])
elif self.get_version() >= '8.0':
opt.extend(['f90math', 'fio', 'f77math', 'U77'])
else:
opt.extend(['fio', 'f90math', 'fmath', 'U77'])
if os.name =='nt':
opt.append('COMDLG32')
return opt
def get_flags(self):
opt = FCompiler.get_flags(self)
if os.name != 'nt':
opt.extend(['-s'])
if self.get_version():
if self.get_version()>='8.2':
opt.append('-fpic')
return opt
def get_flags_f77(self):
opt = FCompiler.get_flags_f77(self)
opt.extend(['-N22', '-N90', '-N110'])
v = self.get_version()
if os.name == 'nt':
if v and v>='8.0':
opt.extend(['-f', '-N15'])
else:
opt.append('-f')
if v:
if v<='4.6':
opt.append('-B108')
else:
# Though -N15 is undocumented, it works with
# Absoft 8.0 on Linux
opt.append('-N15')
return opt
def get_flags_f90(self):
opt = FCompiler.get_flags_f90(self)
opt.extend(["-YCFRL=1", "-YCOM_NAMES=LCS", "-YCOM_PFX", "-YEXT_PFX",
"-YCOM_SFX=_", "-YEXT_SFX=_", "-YEXT_NAMES=LCS"])
if self.get_version():
if self.get_version()>'4.6':
opt.extend(["-YDEALLOC=ALL"])
return opt
def get_flags_fix(self):
opt = FCompiler.get_flags_fix(self)
opt.extend(["-YCFRL=1", "-YCOM_NAMES=LCS", "-YCOM_PFX", "-YEXT_PFX",
"-YCOM_SFX=_", "-YEXT_SFX=_", "-YEXT_NAMES=LCS"])
opt.extend(["-f", "fixed"])
return opt
def get_flags_opt(self):
opt = ['-O']
return opt
if __name__ == '__main__':
from distutils import log
log.set_verbosity(2)
from numpy.distutils import customized_fcompiler
print(customized_fcompiler(compiler='absoft').get_version())
| bsd-3-clause |
eusi/MissionPlanerHM | Lib/site-packages/scipy/ndimage/_ni_support.py | 62 | 3326 | # Copyright (C) 2003-2005 Peter J. Verveer
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
#
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following
# disclaimer in the documentation and/or other materials provided
# with the distribution.
#
# 3. The name of the author may not be used to endorse or promote
# products derived from this software without specific prior
# written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS
# OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY
# DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE
# GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY,
# WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import types
import numpy
def _extend_mode_to_code(mode):
"""Convert an extension mode to the corresponding integer code.
"""
if mode == 'nearest':
return 0
elif mode == 'wrap':
return 1
elif mode == 'reflect':
return 2
elif mode == 'mirror':
return 3
elif mode == 'constant':
return 4
else:
raise RuntimeError('boundary mode not supported')
def _normalize_sequence(input, rank, array_type=None):
"""If input is a scalar, create a sequence of length equal to the
rank by duplicating the input. If input is a sequence,
check if its length is equal to the length of array.
"""
if (isinstance(input, (types.IntType, types.LongType,
types.FloatType))):
normalized = [input] * rank
else:
normalized = list(input)
if len(normalized) != rank:
err = "sequence argument must have length equal to input rank"
raise RuntimeError(err)
return normalized
def _get_output(output, input, shape=None):
if shape is None:
shape = input.shape
if output is None:
output = numpy.zeros(shape, dtype = input.dtype.name)
return_value = output
elif type(output) in [type(types.TypeType), type(numpy.zeros((4,)).dtype)]:
output = numpy.zeros(shape, dtype = output)
return_value = output
elif type(output) is types.StringType:
output = numpy.typeDict[output]
output = numpy.zeros(shape, dtype = output)
return_value = output
else:
if output.shape != shape:
raise RuntimeError("output shape not correct")
return_value = None
return output, return_value
def _check_axis(axis, rank):
if axis < 0:
axis += rank
if axis < 0 or axis >= rank:
raise ValueError('invalid axis')
return axis
| gpl-3.0 |
nikesh-mahalka/nova | nova/openstack/common/cliutils.py | 57 | 7940 | # Copyright 2012 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
# W0603: Using the global statement
# W0621: Redefining name %s from outer scope
# pylint: disable=W0603,W0621
from __future__ import print_function
import getpass
import inspect
import os
import sys
import textwrap
from oslo_utils import encodeutils
from oslo_utils import strutils
import prettytable
import six
from six import moves
from nova.openstack.common._i18n import _
class MissingArgs(Exception):
"""Supplied arguments are not sufficient for calling a function."""
def __init__(self, missing):
self.missing = missing
msg = _("Missing arguments: %s") % ", ".join(missing)
super(MissingArgs, self).__init__(msg)
def validate_args(fn, *args, **kwargs):
"""Check that the supplied args are sufficient for calling a function.
>>> validate_args(lambda a: None)
Traceback (most recent call last):
...
MissingArgs: Missing argument(s): a
>>> validate_args(lambda a, b, c, d: None, 0, c=1)
Traceback (most recent call last):
...
MissingArgs: Missing argument(s): b, d
:param fn: the function to check
:param arg: the positional arguments supplied
:param kwargs: the keyword arguments supplied
"""
argspec = inspect.getargspec(fn)
num_defaults = len(argspec.defaults or [])
required_args = argspec.args[:len(argspec.args) - num_defaults]
def isbound(method):
return getattr(method, '__self__', None) is not None
if isbound(fn):
required_args.pop(0)
missing = [arg for arg in required_args if arg not in kwargs]
missing = missing[len(args):]
if missing:
raise MissingArgs(missing)
def arg(*args, **kwargs):
"""Decorator for CLI args.
Example:
>>> @arg("name", help="Name of the new entity")
... def entity_create(args):
... pass
"""
def _decorator(func):
add_arg(func, *args, **kwargs)
return func
return _decorator
def env(*args, **kwargs):
"""Returns the first environment variable set.
If all are empty, defaults to '' or keyword arg `default`.
"""
for arg in args:
value = os.environ.get(arg)
if value:
return value
return kwargs.get('default', '')
def add_arg(func, *args, **kwargs):
"""Bind CLI arguments to a shell.py `do_foo` function."""
if not hasattr(func, 'arguments'):
func.arguments = []
# NOTE(sirp): avoid dups that can occur when the module is shared across
# tests.
if (args, kwargs) not in func.arguments:
# Because of the semantics of decorator composition if we just append
# to the options list positional options will appear to be backwards.
func.arguments.insert(0, (args, kwargs))
def unauthenticated(func):
"""Adds 'unauthenticated' attribute to decorated function.
Usage:
>>> @unauthenticated
... def mymethod(f):
... pass
"""
func.unauthenticated = True
return func
def isunauthenticated(func):
"""Checks if the function does not require authentication.
Mark such functions with the `@unauthenticated` decorator.
:returns: bool
"""
return getattr(func, 'unauthenticated', False)
def print_list(objs, fields, formatters=None, sortby_index=0,
mixed_case_fields=None, field_labels=None):
"""Print a list or objects as a table, one row per object.
:param objs: iterable of :class:`Resource`
:param fields: attributes that correspond to columns, in order
:param formatters: `dict` of callables for field formatting
:param sortby_index: index of the field for sorting table rows
:param mixed_case_fields: fields corresponding to object attributes that
have mixed case names (e.g., 'serverId')
:param field_labels: Labels to use in the heading of the table, default to
fields.
"""
formatters = formatters or {}
mixed_case_fields = mixed_case_fields or []
field_labels = field_labels or fields
if len(field_labels) != len(fields):
raise ValueError(_("Field labels list %(labels)s has different number "
"of elements than fields list %(fields)s"),
{'labels': field_labels, 'fields': fields})
if sortby_index is None:
kwargs = {}
else:
kwargs = {'sortby': field_labels[sortby_index]}
pt = prettytable.PrettyTable(field_labels)
pt.align = 'l'
for o in objs:
row = []
for field in fields:
if field in formatters:
row.append(formatters[field](o))
else:
if field in mixed_case_fields:
field_name = field.replace(' ', '_')
else:
field_name = field.lower().replace(' ', '_')
data = getattr(o, field_name, '')
row.append(data)
pt.add_row(row)
if six.PY3:
print(encodeutils.safe_encode(pt.get_string(**kwargs)).decode())
else:
print(encodeutils.safe_encode(pt.get_string(**kwargs)))
def print_dict(dct, dict_property="Property", wrap=0):
"""Print a `dict` as a table of two columns.
:param dct: `dict` to print
:param dict_property: name of the first column
:param wrap: wrapping for the second column
"""
pt = prettytable.PrettyTable([dict_property, 'Value'])
pt.align = 'l'
for k, v in six.iteritems(dct):
# convert dict to str to check length
if isinstance(v, dict):
v = six.text_type(v)
if wrap > 0:
v = textwrap.fill(six.text_type(v), wrap)
# if value has a newline, add in multiple rows
# e.g. fault with stacktrace
if v and isinstance(v, six.string_types) and r'\n' in v:
lines = v.strip().split(r'\n')
col1 = k
for line in lines:
pt.add_row([col1, line])
col1 = ''
else:
pt.add_row([k, v])
if six.PY3:
print(encodeutils.safe_encode(pt.get_string()).decode())
else:
print(encodeutils.safe_encode(pt.get_string()))
def get_password(max_password_prompts=3):
"""Read password from TTY."""
verify = strutils.bool_from_string(env("OS_VERIFY_PASSWORD"))
pw = None
if hasattr(sys.stdin, "isatty") and sys.stdin.isatty():
# Check for Ctrl-D
try:
for __ in moves.range(max_password_prompts):
pw1 = getpass.getpass("OS Password: ")
if verify:
pw2 = getpass.getpass("Please verify: ")
else:
pw2 = pw1
if pw1 == pw2 and pw1:
pw = pw1
break
except EOFError:
pass
return pw
def service_type(stype):
"""Adds 'service_type' attribute to decorated function.
Usage:
.. code-block:: python
@service_type('volume')
def mymethod(f):
...
"""
def inner(f):
f.service_type = stype
return f
return inner
def get_service_type(f):
"""Retrieves service type from function."""
return getattr(f, 'service_type', None)
def pretty_choice_list(l):
return ', '.join("'%s'" % i for i in l)
def exit(msg=''):
if msg:
print (msg, file=sys.stderr)
sys.exit(1)
| apache-2.0 |
mitocw/edx-platform | openedx/features/enterprise_support/tests/test_logout.py | 3 | 2724 | """
Tests for logout for enterprise flow
"""
import ddt
import mock
from django.test.utils import override_settings
from django.urls import reverse
from openedx.core.djangolib.testing.utils import CacheIsolationTestCase, skip_unless_lms
from openedx.features.enterprise_support.api import enterprise_enabled
from openedx.features.enterprise_support.tests import (
FAKE_ENTERPRISE_CUSTOMER,
FEATURES_WITH_ENTERPRISE_ENABLED,
factories
)
from openedx.features.enterprise_support.tests.mixins.enterprise import EnterpriseServiceMockMixin
from student.tests.factories import UserFactory
from util.testing import UrlResetMixin
@ddt.ddt
@override_settings(FEATURES=FEATURES_WITH_ENTERPRISE_ENABLED)
@skip_unless_lms
class EnterpriseLogoutTests(EnterpriseServiceMockMixin, CacheIsolationTestCase, UrlResetMixin):
""" Tests for the enterprise logout functionality. """
def setUp(self):
super(EnterpriseLogoutTests, self).setUp()
self.user = UserFactory()
self.enterprise_customer = FAKE_ENTERPRISE_CUSTOMER
self.enterprise_learner = factories.EnterpriseCustomerUserFactory(user_id=self.user.id)
self.client.login(username=self.user.username, password='test')
patcher = mock.patch('openedx.features.enterprise_support.api.enterprise_customer_from_api')
self.mock_enterprise_customer_from_api = patcher.start()
self.mock_enterprise_customer_from_api.return_value = self.enterprise_customer
self.addCleanup(patcher.stop)
@ddt.data(
('https%3A%2F%2Ftest.edx.org%2Fcourses', False),
('/courses/course-v1:ARTS+D1+2018_T/course/', False),
('invalid-url', False),
('/enterprise/c5dad9a7-741c-4841-868f-850aca3ff848/course/Microsoft+DAT206x/enroll/', True),
('%2Fenterprise%2Fc5dad9a7-741c-4841-868f-850aca3ff848%2Fcourse%2FMicrosoft%2BDAT206x%2Fenroll%2F', True),
('/enterprise/handle_consent_enrollment/efd91463-dc40-4882-aeb9-38202131e7b2/course', True),
('%2Fenterprise%2Fhandle_consent_enrollment%2Fefd91463-dc40-4882-aeb9-38202131e7b2%2Fcourse', True),
)
@ddt.unpack
def test_logout_enterprise_target(self, redirect_url, enterprise_target):
url = '{logout_path}?redirect_url={redirect_url}'.format(
logout_path=reverse('logout'),
redirect_url=redirect_url
)
self.assertTrue(enterprise_enabled())
response = self.client.get(url, HTTP_HOST='testserver')
expected = {
'enterprise_target': enterprise_target,
}
self.assertDictContainsSubset(expected, response.context_data)
if enterprise_target:
self.assertContains(response, 'We are signing you in.')
| agpl-3.0 |
smathot/PyGaze | opensesame_plugins/pygaze_drift_correct/pygaze_drift_correct.py | 3 | 3469 | #-*- coding:utf-8 -*-
"""
This file is part of PyGaze.
PyGaze is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
PyGaze is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with PyGaze. If not, see <http://www.gnu.org/licenses/>.
"""
import inspect
from openexp.canvas import canvas
from libopensesame.item import item
from libqtopensesame.items.qtautoplugin import qtautoplugin
from pygaze.display import Display
class pygaze_drift_correct(item):
"""Plug-in runtime definition."""
description = u'Perform eye-tracker drift correction'
def reset(self):
"""
desc:
Resets plug-in settings.
"""
self.var.xpos = 0
self.var.ypos = 0
self.var.fixation_triggered = u'no'
self.var.target_color = u'[foreground]'
self.var.target_style = u'default'
self.var.draw_target = u'yes'
def prepare_drift_correction_canvas(self):
"""A hook to prepare the canvas with the drift-correction target."""
if self.var.draw_target == u'yes':
self.dc_canvas = canvas(self.experiment)
self.dc_canvas.fixdot(self.var.xpos, self.var.ypos,
color=self.var.target_color, style=self.var.target_style)
else:
self.dc_canvas = None
def draw_drift_correction_canvas(self, x, y):
"""
A hook to show the canvas with the drift-correction target.
Arguments:
x -- The X coordinate (unused).
y -- The Y coordinate (unused).
"""
if self.dc_canvas is not None:
self.dc_canvas.show()
def prepare(self):
"""The preparation phase of the plug-in goes here."""
item.prepare(self)
self.prepare_drift_correction_canvas()
self.experiment.pygaze_eyetracker.set_draw_drift_correction_target_func(
self.draw_drift_correction_canvas)
def run(self):
"""The run phase of the plug-in goes here."""
self.set_item_onset()
if self.var.uniform_coordinates == u'yes':
xpos = self.var.width / 2 + self.var.xpos
ypos = self.var.height / 2 + self.var.ypos
else:
xpos = self.var.xpos
ypos = self.var.ypos
while True:
success = self.experiment.pygaze_eyetracker.drift_correction(
pos=(xpos, ypos),
fix_triggered=self.var.fixation_triggered==u'yes')
if success:
break
class qtpygaze_drift_correct(pygaze_drift_correct, qtautoplugin):
"""Plug-in GUI definition."""
def __init__(self, name, experiment, script=None):
"""
Constructor.
Arguments:
name -- The name of the plug-in.
experiment -- The experiment object.
Keyword arguments:
script -- A definition script. (default=None)
"""
pygaze_drift_correct.__init__(self, name, experiment, script)
qtautoplugin.__init__(self, __file__)
self.custom_interactions()
def apply_edit_changes(self):
"""Apply the controls"""
if not qtautoplugin.apply_edit_changes(self) or self.lock:
return False
self.custom_interactions()
def custom_interactions(self):
"""
Disables the target-style combobox if no target display should be drawn.
"""
draw_target = self.var.draw_target == u'yes'
self.combobox_target_style.setEnabled(draw_target)
self.line_edit_target_color.setEnabled(draw_target)
| gpl-3.0 |
LeonChambers/bwhglass-server | lib/werkzeug/utils.py | 317 | 22676 | # -*- coding: utf-8 -*-
"""
werkzeug.utils
~~~~~~~~~~~~~~
This module implements various utilities for WSGI applications. Most of
them are used by the request and response wrappers but especially for
middleware development it makes sense to use them without the wrappers.
:copyright: (c) 2013 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import re
import os
import sys
import pkgutil
try:
from html.entities import name2codepoint
except ImportError:
from htmlentitydefs import name2codepoint
from werkzeug._compat import unichr, text_type, string_types, iteritems, \
reraise, PY2
from werkzeug._internal import _DictAccessorProperty, \
_parse_signature, _missing
_format_re = re.compile(r'\$(?:(%s)|\{(%s)\})' % (('[a-zA-Z_][a-zA-Z0-9_]*',) * 2))
_entity_re = re.compile(r'&([^;]+);')
_filename_ascii_strip_re = re.compile(r'[^A-Za-z0-9_.-]')
_windows_device_files = ('CON', 'AUX', 'COM1', 'COM2', 'COM3', 'COM4', 'LPT1',
'LPT2', 'LPT3', 'PRN', 'NUL')
class cached_property(object):
"""A decorator that converts a function into a lazy property. The
function wrapped is called the first time to retrieve the result
and then that calculated result is used the next time you access
the value::
class Foo(object):
@cached_property
def foo(self):
# calculate something important here
return 42
The class has to have a `__dict__` in order for this property to
work.
"""
# implementation detail: this property is implemented as non-data
# descriptor. non-data descriptors are only invoked if there is
# no entry with the same name in the instance's __dict__.
# this allows us to completely get rid of the access function call
# overhead. If one choses to invoke __get__ by hand the property
# will still work as expected because the lookup logic is replicated
# in __get__ for manual invocation.
def __init__(self, func, name=None, doc=None):
self.__name__ = name or func.__name__
self.__module__ = func.__module__
self.__doc__ = doc or func.__doc__
self.func = func
def __get__(self, obj, type=None):
if obj is None:
return self
value = obj.__dict__.get(self.__name__, _missing)
if value is _missing:
value = self.func(obj)
obj.__dict__[self.__name__] = value
return value
class environ_property(_DictAccessorProperty):
"""Maps request attributes to environment variables. This works not only
for the Werzeug request object, but also any other class with an
environ attribute:
>>> class Test(object):
... environ = {'key': 'value'}
... test = environ_property('key')
>>> var = Test()
>>> var.test
'value'
If you pass it a second value it's used as default if the key does not
exist, the third one can be a converter that takes a value and converts
it. If it raises :exc:`ValueError` or :exc:`TypeError` the default value
is used. If no default value is provided `None` is used.
Per default the property is read only. You have to explicitly enable it
by passing ``read_only=False`` to the constructor.
"""
read_only = True
def lookup(self, obj):
return obj.environ
class header_property(_DictAccessorProperty):
"""Like `environ_property` but for headers."""
def lookup(self, obj):
return obj.headers
class HTMLBuilder(object):
"""Helper object for HTML generation.
Per default there are two instances of that class. The `html` one, and
the `xhtml` one for those two dialects. The class uses keyword parameters
and positional parameters to generate small snippets of HTML.
Keyword parameters are converted to XML/SGML attributes, positional
arguments are used as children. Because Python accepts positional
arguments before keyword arguments it's a good idea to use a list with the
star-syntax for some children:
>>> html.p(class_='foo', *[html.a('foo', href='foo.html'), ' ',
... html.a('bar', href='bar.html')])
u'<p class="foo"><a href="foo.html">foo</a> <a href="bar.html">bar</a></p>'
This class works around some browser limitations and can not be used for
arbitrary SGML/XML generation. For that purpose lxml and similar
libraries exist.
Calling the builder escapes the string passed:
>>> html.p(html("<foo>"))
u'<p><foo></p>'
"""
_entity_re = re.compile(r'&([^;]+);')
_entities = name2codepoint.copy()
_entities['apos'] = 39
_empty_elements = set([
'area', 'base', 'basefont', 'br', 'col', 'command', 'embed', 'frame',
'hr', 'img', 'input', 'keygen', 'isindex', 'link', 'meta', 'param',
'source', 'wbr'
])
_boolean_attributes = set([
'selected', 'checked', 'compact', 'declare', 'defer', 'disabled',
'ismap', 'multiple', 'nohref', 'noresize', 'noshade', 'nowrap'
])
_plaintext_elements = set(['textarea'])
_c_like_cdata = set(['script', 'style'])
def __init__(self, dialect):
self._dialect = dialect
def __call__(self, s):
return escape(s)
def __getattr__(self, tag):
if tag[:2] == '__':
raise AttributeError(tag)
def proxy(*children, **arguments):
buffer = '<' + tag
for key, value in iteritems(arguments):
if value is None:
continue
if key[-1] == '_':
key = key[:-1]
if key in self._boolean_attributes:
if not value:
continue
if self._dialect == 'xhtml':
value = '="' + key + '"'
else:
value = ''
else:
value = '="' + escape(value) + '"'
buffer += ' ' + key + value
if not children and tag in self._empty_elements:
if self._dialect == 'xhtml':
buffer += ' />'
else:
buffer += '>'
return buffer
buffer += '>'
children_as_string = ''.join([text_type(x) for x in children
if x is not None])
if children_as_string:
if tag in self._plaintext_elements:
children_as_string = escape(children_as_string)
elif tag in self._c_like_cdata and self._dialect == 'xhtml':
children_as_string = '/*<![CDATA[*/' + \
children_as_string + '/*]]>*/'
buffer += children_as_string + '</' + tag + '>'
return buffer
return proxy
def __repr__(self):
return '<%s for %r>' % (
self.__class__.__name__,
self._dialect
)
html = HTMLBuilder('html')
xhtml = HTMLBuilder('xhtml')
def get_content_type(mimetype, charset):
"""Return the full content type string with charset for a mimetype.
If the mimetype represents text the charset will be appended as charset
parameter, otherwise the mimetype is returned unchanged.
:param mimetype: the mimetype to be used as content type.
:param charset: the charset to be appended in case it was a text mimetype.
:return: the content type.
"""
if mimetype.startswith('text/') or \
mimetype == 'application/xml' or \
(mimetype.startswith('application/') and
mimetype.endswith('+xml')):
mimetype += '; charset=' + charset
return mimetype
def format_string(string, context):
"""String-template format a string:
>>> format_string('$foo and ${foo}s', dict(foo=42))
'42 and 42s'
This does not do any attribute lookup etc. For more advanced string
formattings have a look at the `werkzeug.template` module.
:param string: the format string.
:param context: a dict with the variables to insert.
"""
def lookup_arg(match):
x = context[match.group(1) or match.group(2)]
if not isinstance(x, string_types):
x = type(string)(x)
return x
return _format_re.sub(lookup_arg, string)
def secure_filename(filename):
r"""Pass it a filename and it will return a secure version of it. This
filename can then safely be stored on a regular file system and passed
to :func:`os.path.join`. The filename returned is an ASCII only string
for maximum portability.
On windows system the function also makes sure that the file is not
named after one of the special device files.
>>> secure_filename("My cool movie.mov")
'My_cool_movie.mov'
>>> secure_filename("../../../etc/passwd")
'etc_passwd'
>>> secure_filename(u'i contain cool \xfcml\xe4uts.txt')
'i_contain_cool_umlauts.txt'
The function might return an empty filename. It's your responsibility
to ensure that the filename is unique and that you generate random
filename if the function returned an empty one.
.. versionadded:: 0.5
:param filename: the filename to secure
"""
if isinstance(filename, text_type):
from unicodedata import normalize
filename = normalize('NFKD', filename).encode('ascii', 'ignore')
if not PY2:
filename = filename.decode('ascii')
for sep in os.path.sep, os.path.altsep:
if sep:
filename = filename.replace(sep, ' ')
filename = str(_filename_ascii_strip_re.sub('', '_'.join(
filename.split()))).strip('._')
# on nt a couple of special files are present in each folder. We
# have to ensure that the target file is not such a filename. In
# this case we prepend an underline
if os.name == 'nt' and filename and \
filename.split('.')[0].upper() in _windows_device_files:
filename = '_' + filename
return filename
def escape(s, quote=None):
"""Replace special characters "&", "<", ">" and (") to HTML-safe sequences.
There is a special handling for `None` which escapes to an empty string.
.. versionchanged:: 0.9
`quote` is now implicitly on.
:param s: the string to escape.
:param quote: ignored.
"""
if s is None:
return ''
elif hasattr(s, '__html__'):
return text_type(s.__html__())
elif not isinstance(s, string_types):
s = text_type(s)
if quote is not None:
from warnings import warn
warn(DeprecationWarning('quote parameter is implicit now'), stacklevel=2)
s = s.replace('&', '&').replace('<', '<') \
.replace('>', '>').replace('"', """)
return s
def unescape(s):
"""The reverse function of `escape`. This unescapes all the HTML
entities, not only the XML entities inserted by `escape`.
:param s: the string to unescape.
"""
def handle_match(m):
name = m.group(1)
if name in HTMLBuilder._entities:
return unichr(HTMLBuilder._entities[name])
try:
if name[:2] in ('#x', '#X'):
return unichr(int(name[2:], 16))
elif name.startswith('#'):
return unichr(int(name[1:]))
except ValueError:
pass
return u''
return _entity_re.sub(handle_match, s)
def redirect(location, code=302):
"""Return a response object (a WSGI application) that, if called,
redirects the client to the target location. Supported codes are 301,
302, 303, 305, and 307. 300 is not supported because it's not a real
redirect and 304 because it's the answer for a request with a request
with defined If-Modified-Since headers.
.. versionadded:: 0.6
The location can now be a unicode string that is encoded using
the :func:`iri_to_uri` function.
:param location: the location the response should redirect to.
:param code: the redirect status code. defaults to 302.
"""
from werkzeug.wrappers import Response
display_location = escape(location)
if isinstance(location, text_type):
from werkzeug.urls import iri_to_uri
location = iri_to_uri(location)
response = Response(
'<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 3.2 Final//EN">\n'
'<title>Redirecting...</title>\n'
'<h1>Redirecting...</h1>\n'
'<p>You should be redirected automatically to target URL: '
'<a href="%s">%s</a>. If not click the link.' %
(escape(location), display_location), code, mimetype='text/html')
response.headers['Location'] = location
return response
def append_slash_redirect(environ, code=301):
"""Redirect to the same URL but with a slash appended. The behavior
of this function is undefined if the path ends with a slash already.
:param environ: the WSGI environment for the request that triggers
the redirect.
:param code: the status code for the redirect.
"""
new_path = environ['PATH_INFO'].strip('/') + '/'
query_string = environ.get('QUERY_STRING')
if query_string:
new_path += '?' + query_string
return redirect(new_path, code)
def import_string(import_name, silent=False):
"""Imports an object based on a string. This is useful if you want to
use import paths as endpoints or something similar. An import path can
be specified either in dotted notation (``xml.sax.saxutils.escape``)
or with a colon as object delimiter (``xml.sax.saxutils:escape``).
If `silent` is True the return value will be `None` if the import fails.
:param import_name: the dotted name for the object to import.
:param silent: if set to `True` import errors are ignored and
`None` is returned instead.
:return: imported object
"""
#XXX: py3 review needed
assert isinstance(import_name, string_types)
# force the import name to automatically convert to strings
import_name = str(import_name)
try:
if ':' in import_name:
module, obj = import_name.split(':', 1)
elif '.' in import_name:
module, obj = import_name.rsplit('.', 1)
else:
return __import__(import_name)
# __import__ is not able to handle unicode strings in the fromlist
# if the module is a package
if PY2 and isinstance(obj, unicode):
obj = obj.encode('utf-8')
try:
return getattr(__import__(module, None, None, [obj]), obj)
except (ImportError, AttributeError):
# support importing modules not yet set up by the parent module
# (or package for that matter)
modname = module + '.' + obj
__import__(modname)
return sys.modules[modname]
except ImportError as e:
if not silent:
reraise(
ImportStringError,
ImportStringError(import_name, e),
sys.exc_info()[2])
def find_modules(import_path, include_packages=False, recursive=False):
"""Find all the modules below a package. This can be useful to
automatically import all views / controllers so that their metaclasses /
function decorators have a chance to register themselves on the
application.
Packages are not returned unless `include_packages` is `True`. This can
also recursively list modules but in that case it will import all the
packages to get the correct load path of that module.
:param import_name: the dotted name for the package to find child modules.
:param include_packages: set to `True` if packages should be returned, too.
:param recursive: set to `True` if recursion should happen.
:return: generator
"""
module = import_string(import_path)
path = getattr(module, '__path__', None)
if path is None:
raise ValueError('%r is not a package' % import_path)
basename = module.__name__ + '.'
for importer, modname, ispkg in pkgutil.iter_modules(path):
modname = basename + modname
if ispkg:
if include_packages:
yield modname
if recursive:
for item in find_modules(modname, include_packages, True):
yield item
else:
yield modname
def validate_arguments(func, args, kwargs, drop_extra=True):
"""Check if the function accepts the arguments and keyword arguments.
Returns a new ``(args, kwargs)`` tuple that can safely be passed to
the function without causing a `TypeError` because the function signature
is incompatible. If `drop_extra` is set to `True` (which is the default)
any extra positional or keyword arguments are dropped automatically.
The exception raised provides three attributes:
`missing`
A set of argument names that the function expected but where
missing.
`extra`
A dict of keyword arguments that the function can not handle but
where provided.
`extra_positional`
A list of values that where given by positional argument but the
function cannot accept.
This can be useful for decorators that forward user submitted data to
a view function::
from werkzeug.utils import ArgumentValidationError, validate_arguments
def sanitize(f):
def proxy(request):
data = request.values.to_dict()
try:
args, kwargs = validate_arguments(f, (request,), data)
except ArgumentValidationError:
raise BadRequest('The browser failed to transmit all '
'the data expected.')
return f(*args, **kwargs)
return proxy
:param func: the function the validation is performed against.
:param args: a tuple of positional arguments.
:param kwargs: a dict of keyword arguments.
:param drop_extra: set to `False` if you don't want extra arguments
to be silently dropped.
:return: tuple in the form ``(args, kwargs)``.
"""
parser = _parse_signature(func)
args, kwargs, missing, extra, extra_positional = parser(args, kwargs)[:5]
if missing:
raise ArgumentValidationError(tuple(missing))
elif (extra or extra_positional) and not drop_extra:
raise ArgumentValidationError(None, extra, extra_positional)
return tuple(args), kwargs
def bind_arguments(func, args, kwargs):
"""Bind the arguments provided into a dict. When passed a function,
a tuple of arguments and a dict of keyword arguments `bind_arguments`
returns a dict of names as the function would see it. This can be useful
to implement a cache decorator that uses the function arguments to build
the cache key based on the values of the arguments.
:param func: the function the arguments should be bound for.
:param args: tuple of positional arguments.
:param kwargs: a dict of keyword arguments.
:return: a :class:`dict` of bound keyword arguments.
"""
args, kwargs, missing, extra, extra_positional, \
arg_spec, vararg_var, kwarg_var = _parse_signature(func)(args, kwargs)
values = {}
for (name, has_default, default), value in zip(arg_spec, args):
values[name] = value
if vararg_var is not None:
values[vararg_var] = tuple(extra_positional)
elif extra_positional:
raise TypeError('too many positional arguments')
if kwarg_var is not None:
multikw = set(extra) & set([x[0] for x in arg_spec])
if multikw:
raise TypeError('got multiple values for keyword argument ' +
repr(next(iter(multikw))))
values[kwarg_var] = extra
elif extra:
raise TypeError('got unexpected keyword argument ' +
repr(next(iter(extra))))
return values
class ArgumentValidationError(ValueError):
"""Raised if :func:`validate_arguments` fails to validate"""
def __init__(self, missing=None, extra=None, extra_positional=None):
self.missing = set(missing or ())
self.extra = extra or {}
self.extra_positional = extra_positional or []
ValueError.__init__(self, 'function arguments invalid. ('
'%d missing, %d additional)' % (
len(self.missing),
len(self.extra) + len(self.extra_positional)
))
class ImportStringError(ImportError):
"""Provides information about a failed :func:`import_string` attempt."""
#: String in dotted notation that failed to be imported.
import_name = None
#: Wrapped exception.
exception = None
def __init__(self, import_name, exception):
self.import_name = import_name
self.exception = exception
msg = (
'import_string() failed for %r. Possible reasons are:\n\n'
'- missing __init__.py in a package;\n'
'- package or module path not included in sys.path;\n'
'- duplicated package or module name taking precedence in '
'sys.path;\n'
'- missing module, class, function or variable;\n\n'
'Debugged import:\n\n%s\n\n'
'Original exception:\n\n%s: %s')
name = ''
tracked = []
for part in import_name.replace(':', '.').split('.'):
name += (name and '.') + part
imported = import_string(name, silent=True)
if imported:
tracked.append((name, getattr(imported, '__file__', None)))
else:
track = ['- %r found in %r.' % (n, i) for n, i in tracked]
track.append('- %r not found.' % name)
msg = msg % (import_name, '\n'.join(track),
exception.__class__.__name__, str(exception))
break
ImportError.__init__(self, msg)
def __repr__(self):
return '<%s(%r, %r)>' % (self.__class__.__name__, self.import_name,
self.exception)
# circular dependencies
from werkzeug.http import quote_header_value, unquote_header_value, \
cookie_date
# DEPRECATED
# these objects were previously in this module as well. we import
# them here for backwards compatibility with old pickles.
from werkzeug.datastructures import MultiDict, CombinedMultiDict, \
Headers, EnvironHeaders
from werkzeug.http import parse_cookie, dump_cookie
| apache-2.0 |
sumedhasingla/VTK | Filters/General/Testing/Python/warplens.py | 20 | 1517 | #!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
# Create the RenderWindow, Renderer and both Actors
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# load in the texture map
#
pngReader = vtk.vtkPNGReader()
pngReader.SetFileName(VTK_DATA_ROOT + "/Data/camscene.png")
pngReader.Update()
xWidth = pngReader.GetOutput().GetDimensions()[0]
yHeight = pngReader.GetOutput().GetDimensions()[1]
wl = vtk.vtkWarpLens()
wl.SetInputConnection(pngReader.GetOutputPort())
wl.SetPrincipalPoint(2.4507, 1.7733)
wl.SetFormatWidth(4.792)
wl.SetFormatHeight(3.6)
wl.SetImageWidth(xWidth)
wl.SetImageHeight(yHeight)
wl.SetK1(0.01307)
wl.SetK2(0.0003102)
wl.SetP1(1.953e-005)
wl.SetP2(-9.655e-005)
gf = vtk.vtkGeometryFilter()
gf.SetInputConnection(wl.GetOutputPort())
tf = vtk.vtkTriangleFilter()
tf.SetInputConnection(gf.GetOutputPort())
strip = vtk.vtkStripper()
strip.SetInputConnection(tf.GetOutputPort())
strip.SetMaximumLength(250)
dsm = vtk.vtkPolyDataMapper()
dsm.SetInputConnection(strip.GetOutputPort())
planeActor = vtk.vtkActor()
planeActor.SetMapper(dsm)
# Add the actors to the renderer, set the background and size
ren1.AddActor(planeActor)
ren1.SetBackground(0.1, 0.2, 0.4)
renWin.SetSize(300, 300)
# render the image
iren.Initialize()
renWin.Render()
ren1.GetActiveCamera().Zoom(1.4)
renWin.Render()
#iren.Start()
| bsd-3-clause |
vertexproject/synapse | synapse/tests/test_lib_rstorm.py | 1 | 7127 | import os
import synapse.exc as s_exc
import synapse.common as s_common
import synapse.lib.rstorm as s_rstorm
import synapse.tests.utils as s_test
rst_in = '''
HI
##
.. storm-cortex:: default
.. storm-cortex:: default
.. storm-opts:: {"vars": {"foo": 10, "bar": "baz"}}
.. storm-pre:: [ inet:asn=$foo ]
.. storm:: $lib.print($bar) $lib.warn(omgomgomg)
.. storm-expect:: baz
.. storm-pre:: [ inet:ipv6=0 ]
.. storm-pkg:: synapse/tests/files/stormpkg/testpkg.yaml
.. storm:: --hide-props testpkgcmd foo
.. storm:: --hide-query $lib.print(secret)
.. storm:: --hide-query file:bytes
.. storm-svc:: synapse.tests.files.rstorm.testsvc.Testsvc test {"secret": "jupiter"}
.. storm:: testsvc.test
'''
rst_out = '''
HI
##
::
> $lib.print($bar) $lib.warn(omgomgomg)
baz
WARNING: omgomgomg
::
> testpkgcmd foo
inet:ipv6=::ffff:0
::
secret
::
::
> testsvc.test
jupiter
'''
rst_in_debug = '''
HI
##
.. storm-cortex:: default
.. storm:: --debug [ inet:ipv4=0 ]
'''
rst_in_props = '''
HI
##
.. storm-cortex:: default
.. storm:: [ inet:ipv4=0 ]
'''
rst_out_props = '''
HI
##
::
> [ inet:ipv4=0 ]
inet:ipv4=0.0.0.0
:type = private
'''
rst_in_http = '''
HI
##
.. storm-cortex:: default
.. storm:: $resp=$lib.inet.http.get("http://foo.com") $d=$resp.json() $lib.print($d)
.. storm-mock-http:: synapse/tests/files/rstorm/httpresp1.json
.. storm:: $resp=$lib.inet.http.get("http://foo.com") $d=$resp.json() [ inet:ipv4=$d.data ]
.. storm-mock-http:: synapse/tests/files/rstorm/httpresp2.json
.. storm:: $resp=$lib.inet.http.get("http://foo.com") $d=$resp.json() [ inet:ipv4=$d.data ]
.. storm-mock-http:: synapse/tests/files/rstorm/httpresp3.json
.. storm:: $resp=$lib.inet.http.get("http://foo.com") $d=$resp.body.decode() [ it:dev:str=$d ]
'''
boom1 = '''
.. storm:: $lib.print(newp)
'''
boom2 = '''
.. storm-pre:: $lib.print(newp)
'''
boom3 = '''
.. storm-cortex:: default
.. storm:: $x = (10 + "foo")
'''
boom4 = '''
.. storm-pkg:: synapse/tests/files/stormpkg/testpkg.yaml
'''
boom5 = '''
.. storm-svc:: synapse.tests.files.rstorm.testsvc.Testsvc test {"secret": "jupiter"}
'''
boom6 = '''
.. storm-cortex:: default
.. storm-svc:: synapse.tests.files.rstorm.testsvc.Testsvc test
'''
boom7 = '''
.. storm-cortex:: default
.. storm-pkg:: synapse/tests/files/stormpkg/newp.newp
'''
boom8 = '''
.. storm-cortex:: default
.. storm-mock-http:: synapse/tests/files/rstorm/newp.newp
'''
boom9 = '''
.. storm-newp:: newp
'''
async def get_rst_text(rstfile):
async with await s_rstorm.StormRst.anit(rstfile) as rstorm:
lines = await rstorm.run()
return ''.join(lines)
class RStormLibTest(s_test.SynTest):
async def test_lib_rstorm(self):
with self.getTestDir() as dirn:
path = s_common.genpath(dirn, 'test.rst')
with s_common.genfile(path) as fd:
fd.write(rst_in.encode())
text = await get_rst_text(path)
self.eq(text, rst_out)
# debug output
path = s_common.genpath(dirn, 'test2.rst')
with s_common.genfile(path) as fd:
fd.write(rst_in_debug.encode())
text = await get_rst_text(path)
self.isin('node:edits', text)
self.isin('inet:ipv4', text)
# props output
path = s_common.genpath(dirn, 'test3.rst')
with s_common.genfile(path) as fd:
fd.write(rst_in_props.encode())
text = await get_rst_text(path)
text_nocrt = '\n'.join(line for line in text.split('\n') if '.created =' not in line)
self.eq(text_nocrt, rst_out_props)
# http
path = s_common.genpath(dirn, 'http.rst')
with s_common.genfile(path) as fd:
fd.write(rst_in_http.encode())
text = await get_rst_text(path)
self.isin('{}', text) # no mock gives empty response
self.isin('inet:ipv4=1.2.3.4', text) # first mock
self.isin('inet:ipv4=5.6.7.8', text) # one mock at a time
self.isin('it:dev:str=notjson', text) # one mock at a time
# boom1 test
path = s_common.genpath(dirn, 'boom1.rst')
with s_common.genfile(path) as fd:
fd.write(boom1.encode())
with self.raises(s_exc.NoSuchVar):
await get_rst_text(path)
# boom2 test
path = s_common.genpath(dirn, 'boom2.rst')
with s_common.genfile(path) as fd:
fd.write(boom2.encode())
with self.raises(s_exc.NoSuchVar):
await get_rst_text(path)
# boom3 test
path_boom3 = s_common.genpath(dirn, 'boom3.rst')
with s_common.genfile(path_boom3) as fd:
fd.write(boom3.encode())
with self.raises(s_exc.StormRuntimeError):
await get_rst_text(path_boom3)
# boom4 test
path = s_common.genpath(dirn, 'boom4.rst')
with s_common.genfile(path) as fd:
fd.write(boom4.encode())
with self.raises(s_exc.NoSuchVar):
await get_rst_text(path)
# boom5 test
path = s_common.genpath(dirn, 'boom5.rst')
with s_common.genfile(path) as fd:
fd.write(boom5.encode())
with self.raises(s_exc.NoSuchVar):
await get_rst_text(path)
# boom6 test
path = s_common.genpath(dirn, 'boom6.rst')
with s_common.genfile(path) as fd:
fd.write(boom6.encode())
with self.raises(s_exc.NeedConfValu):
await get_rst_text(path)
# boom7 test
path = s_common.genpath(dirn, 'boom7.rst')
with s_common.genfile(path) as fd:
fd.write(boom7.encode())
with self.raises(s_exc.NoSuchFile):
await get_rst_text(path)
# boom8 test
path = s_common.genpath(dirn, 'boom8.rst')
with s_common.genfile(path) as fd:
fd.write(boom8.encode())
with self.raises(s_exc.NoSuchFile):
await get_rst_text(path)
# boom9 test
path = s_common.genpath(dirn, 'boom9.rst')
with s_common.genfile(path) as fd:
fd.write(boom9.encode())
with self.raises(s_exc.NoSuchName):
await get_rst_text(path)
# make sure things get cleaned up
async with await s_rstorm.StormRst.anit(path_boom3) as rstorm:
try:
await rstorm.run()
self.fail('This must raise')
except s_exc.StormRuntimeError:
pass
self.true(rstorm.core.isfini)
self.true(rstorm.isfini)
self.false(os.path.exists(rstorm.core.dirn))
# bad path
path = s_common.genpath(dirn, 'newp.newp')
with self.raises(s_exc.BadConfValu):
await get_rst_text(path)
| apache-2.0 |
jmesteve/openerpseda | openerp/tools/which.py | 456 | 6884 | #!/usr/bin/env python
""" Which - locate a command
* adapted from Brian Curtin's http://bugs.python.org/file15381/shutil_which.patch
* see http://bugs.python.org/issue444582
* uses ``PATHEXT`` on Windows
* searches current directory before ``PATH`` on Windows,
but not before an explicitly passed path
* accepts both string or iterable for an explicitly passed path, or pathext
* accepts an explicitly passed empty path, or pathext (either '' or [])
* does not search ``PATH`` for files that have a path specified in their name already
* moved defpath and defpathext lists initialization to module level,
instead of initializing them on each function call
* changed interface: which_files() returns generator, which() returns first match,
or raises IOError(errno.ENOENT)
.. function:: which_files(file [, mode=os.F_OK | os.X_OK[, path=None[, pathext=None]]])
Return a generator which yields full paths in which the *file* name exists
in a directory that is part of the file name, or on *path*,
and has the given *mode*.
By default, *mode* matches an inclusive OR of os.F_OK and os.X_OK - an
existing executable file.
The *path* is, by default, the ``PATH`` variable on the platform,
or the string/iterable passed in as *path*.
In the event that a ``PATH`` variable is not found, :const:`os.defpath` is used.
On Windows, a current directory is searched before using the ``PATH`` variable,
but not before an explicitly passed *path*.
The *pathext* is only used on Windows to match files with given extensions appended as well.
It defaults to the ``PATHEXT`` variable, or the string/iterable passed in as *pathext*.
In the event that a ``PATHEXT`` variable is not found,
default value for Windows XP/Vista is used.
The command is always searched without extension first,
even when *pathext* is explicitly passed.
.. function:: which(file [, mode=os.F_OK | os.X_OK[, path=None[, pathext=None]]])
Return first match generated by which_files(file, mode, path, pathext),
or raise IOError(errno.ENOENT).
"""
__docformat__ = 'restructuredtext en'
__all__ = 'which which_files pathsep defpath defpathext F_OK R_OK W_OK X_OK'.split()
import sys
from os import access, defpath, pathsep, environ, F_OK, R_OK, W_OK, X_OK
from os.path import exists, dirname, split, join
windows = sys.platform.startswith('win')
defpath = environ.get('PATH', defpath).split(pathsep)
if windows:
defpath.insert(0, '.') # can insert without checking, when duplicates are removed
# given the quite usual mess in PATH on Windows, let's rather remove duplicates
seen = set()
defpath = [dir for dir in defpath if dir.lower() not in seen and not seen.add(dir.lower())]
del seen
defpathext = [''] + environ.get('PATHEXT',
'.COM;.EXE;.BAT;.CMD;.VBS;.VBE;.JS;.JSE;.WSF;.WSH;.MSC').lower().split(pathsep)
else:
defpathext = ['']
def which_files(file, mode=F_OK | X_OK, path=None, pathext=None):
""" Locate a file in a path supplied as a part of the file name,
or the user's path, or a supplied path.
The function yields full paths (not necessarily absolute paths),
in which the given file name matches an existing file in a directory on the path.
>>> def test_which(expected, *args, **argd):
... result = list(which_files(*args, **argd))
... assert result == expected, 'which_files: %s != %s' % (result, expected)
...
... try:
... result = [ which(*args, **argd) ]
... except IOError:
... result = []
... assert result[:1] == expected[:1], 'which: %s != %s' % (result[:1], expected[:1])
>>> if windows: cmd = environ['COMSPEC']
>>> if windows: test_which([cmd], 'cmd')
>>> if windows: test_which([cmd], 'cmd.exe')
>>> if windows: test_which([cmd], 'cmd', path=dirname(cmd))
>>> if windows: test_which([cmd], 'cmd', pathext='.exe')
>>> if windows: test_which([cmd], cmd)
>>> if windows: test_which([cmd], cmd, path='<nonexistent>')
>>> if windows: test_which([cmd], cmd, pathext='<nonexistent>')
>>> if windows: test_which([cmd], cmd[:-4])
>>> if windows: test_which([cmd], cmd[:-4], path='<nonexistent>')
>>> if windows: test_which([], 'cmd', path='<nonexistent>')
>>> if windows: test_which([], 'cmd', pathext='<nonexistent>')
>>> if windows: test_which([], '<nonexistent>/cmd')
>>> if windows: test_which([], cmd[:-4], pathext='<nonexistent>')
>>> if not windows: sh = '/bin/sh'
>>> if not windows: test_which([sh], 'sh')
>>> if not windows: test_which([sh], 'sh', path=dirname(sh))
>>> if not windows: test_which([sh], 'sh', pathext='<nonexistent>')
>>> if not windows: test_which([sh], sh)
>>> if not windows: test_which([sh], sh, path='<nonexistent>')
>>> if not windows: test_which([sh], sh, pathext='<nonexistent>')
>>> if not windows: test_which([], 'sh', mode=W_OK) # not running as root, are you?
>>> if not windows: test_which([], 'sh', path='<nonexistent>')
>>> if not windows: test_which([], '<nonexistent>/sh')
"""
filepath, file = split(file)
if filepath:
path = (filepath,)
elif path is None:
path = defpath
elif isinstance(path, str):
path = path.split(pathsep)
if pathext is None:
pathext = defpathext
elif isinstance(pathext, str):
pathext = pathext.split(pathsep)
if not '' in pathext:
pathext.insert(0, '') # always check command without extension, even for custom pathext
for dir in path:
basepath = join(dir, file)
for ext in pathext:
fullpath = basepath + ext
if exists(fullpath) and access(fullpath, mode):
yield fullpath
def which(file, mode=F_OK | X_OK, path=None, pathext=None):
""" Locate a file in a path supplied as a part of the file name,
or the user's path, or a supplied path.
The function returns full path (not necessarily absolute path),
in which the given file name matches an existing file in a directory on the path,
or raises IOError(errno.ENOENT).
>>> # for doctest see which_files()
"""
try:
return iter(which_files(file, mode, path, pathext)).next()
except StopIteration:
try:
from errno import ENOENT
except ImportError:
ENOENT = 2
raise IOError(ENOENT, '%s not found' % (mode & X_OK and 'command' or 'file'), file)
if __name__ == '__main__':
import doctest
doctest.testmod()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
fangxingli/hue | desktop/core/ext-py/pycparser-2.14/pycparser/yacctab.py | 35 | 129568 |
# yacctab.py
# This file is automatically generated. Do not edit.
_tabversion = '3.2'
_lr_method = 'LALR'
_lr_signature = '\x11\x82\x05\xfb:\x10\xfeo5\xb4\x11N\xe7S\xb4b'
_lr_action_items = {'VOID':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[6,6,-63,-74,-73,-60,-56,-57,-35,-31,-61,6,-36,-55,-70,-65,-54,6,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,6,-69,6,-72,-76,6,-59,-86,-261,-85,6,-113,-112,-32,-102,-101,6,6,6,-47,-48,6,-115,6,6,6,6,-92,6,6,6,6,-38,6,-49,6,6,-87,-93,-262,-103,-121,-120,6,6,6,6,6,-39,-41,-44,-40,-42,6,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,6,-175,-174,6,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'LBRACKET':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,25,26,27,28,29,30,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,60,62,63,67,68,69,70,74,78,81,83,84,86,90,94,96,97,110,112,115,116,118,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,153,155,166,167,174,176,177,178,179,180,191,197,198,218,221,222,224,228,235,239,262,267,269,270,300,302,303,310,311,314,315,323,324,325,326,329,334,338,339,360,362,364,366,367,368,388,389,391,392,399,401,428,429,430,437,],[-263,-63,-74,-73,-60,-56,-57,-61,-263,-55,-70,-65,-54,-58,-178,65,-68,-263,-71,72,-75,-114,-66,-62,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-261,-85,-51,65,-102,-101,-28,-122,-124,-27,72,72,164,-50,-53,72,-115,-263,-263,72,72,-248,-125,-123,-252,-243,-255,-259,-256,-253,-241,-242,226,-251,-228,-257,-249,-240,-254,-250,164,264,72,72,-87,-262,-23,-84,-24,-83,-103,-121,-120,-260,-258,-237,-236,-150,-152,72,-140,264,-154,-148,-248,-89,-88,-105,-104,-116,-119,-235,-234,-233,-232,-231,-244,72,72,-143,264,-141,-149,-151,-153,-118,-117,-229,-230,264,-142,-245,264,-238,-239,]),'WCHAR_CONST':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,124,124,-47,124,-28,-263,-125,-227,124,-225,124,-224,124,-223,124,124,-222,-226,-263,-223,124,124,124,-262,124,124,-223,124,124,-184,-187,-185,-181,-182,-186,-188,124,-190,-191,-183,-189,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,124,-12,124,124,-11,-223,-41,-44,-40,124,-42,124,124,-156,-155,-45,-157,124,-43,124,124,124,-263,-139,-175,-174,124,-172,124,124,-158,124,-171,-159,124,124,124,124,-263,124,124,-11,-170,-173,124,-162,124,-160,124,124,-161,124,124,124,-263,124,-166,-165,-163,124,124,124,-167,-164,124,-169,-168,]),'FLOAT_CONST':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,125,125,-47,125,-28,-263,-125,-227,125,-225,125,-224,125,-223,125,125,-222,-226,-263,-223,125,125,125,-262,125,125,-223,125,125,-184,-187,-185,-181,-182,-186,-188,125,-190,-191,-183,-189,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,125,-12,125,125,-11,-223,-41,-44,-40,125,-42,125,125,-156,-155,-45,-157,125,-43,125,125,125,-263,-139,-175,-174,125,-172,125,125,-158,125,-171,-159,125,125,125,125,-263,125,125,-11,-170,-173,125,-162,125,-160,125,125,-161,125,125,125,-263,125,-166,-165,-163,125,125,125,-167,-164,125,-169,-168,]),'MINUS':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,112,115,118,119,120,121,122,123,124,125,126,127,128,129,130,132,134,135,137,138,139,140,141,142,143,144,145,146,147,148,149,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,224,226,227,230,231,232,233,234,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,309,323,324,325,326,329,334,335,336,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,362,365,370,371,373,374,375,376,379,380,381,383,384,385,390,391,392,393,395,398,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,128,128,-47,128,-28,-263,-248,-125,-252,-227,-214,-243,-255,-259,-256,-253,-241,128,-225,-242,-216,-195,128,-224,128,-251,-223,-228,128,128,-257,-222,-249,-240,244,-254,-250,-226,-263,-223,128,128,128,-262,128,128,-223,128,128,-184,-187,-185,-181,-182,-186,-188,128,-190,-191,-183,-189,-260,128,-220,-258,-237,-236,128,128,128,-214,-219,128,-217,-218,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,128,-12,128,128,-11,-223,-41,-44,-40,128,-42,128,128,-156,-155,-45,-157,128,-43,-248,128,-235,-234,-233,-232,-231,-244,128,128,244,244,244,-200,244,244,244,-199,244,244,-197,-196,244,244,244,244,244,-198,-263,-139,-175,-174,128,-172,128,128,-158,128,-171,-159,128,128,-221,-229,-230,128,128,-215,-263,128,128,-11,-170,-173,128,-162,128,-160,128,128,-161,128,128,128,-245,-263,-238,128,-166,-165,-163,-239,128,128,128,-167,-164,128,-169,-168,]),'RPAREN':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,25,26,27,28,29,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,53,54,56,58,59,60,62,63,66,67,68,69,70,74,78,81,83,84,90,94,96,106,107,108,109,110,111,112,113,114,115,116,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,151,153,158,159,160,161,165,166,167,174,176,177,178,179,180,191,197,198,199,200,201,202,218,220,221,222,224,227,228,231,232,234,235,236,237,238,239,240,269,270,275,285,302,303,310,311,314,315,318,319,320,321,322,323,324,325,326,328,329,330,332,333,334,337,338,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,366,367,368,378,388,389,390,391,392,397,398,410,412,415,416,417,419,428,430,432,435,437,438,439,442,],[-263,-63,-74,-73,-60,-56,-57,-61,-263,-55,-70,-65,-54,-58,-178,-111,-68,-263,-71,-75,-114,-66,-62,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,90,-86,-85,-51,-113,-112,-102,-101,-263,-28,-122,-124,-27,-145,-263,-147,-50,-53,-115,-263,-263,197,-15,198,-128,-263,-16,-248,-126,-132,-125,-123,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,-193,-254,-250,-179,-146,-21,-22,269,270,-263,-145,-263,-87,-262,-23,-84,-24,-83,-103,-121,-120,-131,-1,-2,-130,-260,-220,-258,-237,-236,329,-150,-214,-219,-217,-152,334,336,-176,-263,-218,-154,-148,368,-14,-89,-88,-105,-104,-116,-119,-133,-127,-129,-180,390,-235,-234,-233,-232,-246,-231,392,395,396,-244,-144,-263,-145,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-149,-151,-153,-13,-118,-117,-221,-229,-230,-177,-215,423,425,427,-247,428,-194,-245,-238,-263,440,-239,-263,443,446,]),'LONG':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[19,19,-63,-74,-73,-60,-56,-57,-35,-31,-61,19,-36,-55,-70,-65,-54,19,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,19,-69,19,-72,-76,19,-59,-86,-261,-85,19,-113,-112,-32,-102,-101,19,19,19,-47,-48,19,-115,19,19,19,19,-92,19,19,19,19,-38,19,-49,19,19,-87,-93,-262,-103,-121,-120,19,19,19,19,19,-39,-41,-44,-40,-42,19,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,19,-175,-174,19,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'PLUS':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,112,115,118,119,120,121,122,123,124,125,126,127,128,129,130,132,134,135,137,138,139,140,141,142,143,144,145,146,147,148,149,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,224,226,227,230,231,232,233,234,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,309,323,324,325,326,329,334,335,336,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,362,365,370,371,373,374,375,376,379,380,381,383,384,385,390,391,392,393,395,398,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,135,135,-47,135,-28,-263,-248,-125,-252,-227,-214,-243,-255,-259,-256,-253,-241,135,-225,-242,-216,-195,135,-224,135,-251,-223,-228,135,135,-257,-222,-249,-240,248,-254,-250,-226,-263,-223,135,135,135,-262,135,135,-223,135,135,-184,-187,-185,-181,-182,-186,-188,135,-190,-191,-183,-189,-260,135,-220,-258,-237,-236,135,135,135,-214,-219,135,-217,-218,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,135,-12,135,135,-11,-223,-41,-44,-40,135,-42,135,135,-156,-155,-45,-157,135,-43,-248,135,-235,-234,-233,-232,-231,-244,135,135,248,248,248,-200,248,248,248,-199,248,248,-197,-196,248,248,248,248,248,-198,-263,-139,-175,-174,135,-172,135,135,-158,135,-171,-159,135,135,-221,-229,-230,135,135,-215,-263,135,135,-11,-170,-173,135,-162,135,-160,135,135,-161,135,135,135,-245,-263,-238,135,-166,-165,-163,-239,135,135,135,-167,-164,135,-169,-168,]),'ELLIPSIS':([204,],[319,]),'GT':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,249,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,249,-202,-200,-204,249,-203,-199,-206,249,-197,-196,-205,249,249,249,249,-198,-221,-229,-230,-215,-245,-238,-239,]),'GOTO':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,278,-262,-41,-44,-40,-42,278,-156,-155,-45,-157,278,-43,-175,-174,-172,278,-158,-171,-159,278,-170,-173,-162,278,-160,278,-161,278,278,-166,-165,-163,278,278,-167,-164,278,-169,-168,]),'ENUM':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[24,24,-63,-74,-73,-60,-56,-57,-35,-31,-61,24,-36,-55,-70,-65,-54,24,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,24,-69,24,-72,-76,24,-59,-86,-261,-85,24,-113,-112,-32,-102,-101,24,24,24,-47,-48,24,-115,24,24,24,24,-92,24,24,24,24,-38,24,-49,24,24,-87,-93,-262,-103,-121,-120,24,24,24,24,24,-39,-41,-44,-40,-42,24,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,24,-175,-174,24,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'PERIOD':([55,112,118,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,155,176,218,221,222,224,262,267,300,323,324,325,326,329,334,360,362,364,391,392,399,401,428,429,430,437,],[-261,-248,-252,-243,-255,-259,-256,-253,-241,-242,225,-251,-228,-257,-249,-240,-254,-250,263,-262,-260,-258,-237,-236,-140,263,-248,-235,-234,-233,-232,-231,-244,-143,263,-141,-229,-230,263,-142,-245,263,-238,-239,]),'GE':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,253,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,253,-202,-200,-204,253,-203,-199,-206,253,-197,-196,-205,253,253,253,253,-198,-221,-229,-230,-215,-245,-238,-239,]),'INT_CONST_DEC':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,145,145,-47,145,-28,-263,-125,-227,145,-225,145,-224,145,-223,145,145,-222,-226,-263,-223,145,145,145,-262,145,145,-223,145,145,-184,-187,-185,-181,-182,-186,-188,145,-190,-191,-183,-189,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,145,-12,145,145,-11,-223,-41,-44,-40,145,-42,145,145,-156,-155,-45,-157,145,-43,145,145,145,-263,-139,-175,-174,145,-172,145,145,-158,145,-171,-159,145,145,145,145,-263,145,145,-11,-170,-173,145,-162,145,-160,145,145,-161,145,145,145,-263,145,-166,-165,-163,145,145,145,-167,-164,145,-169,-168,]),'ARROW':([112,118,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,221,222,224,300,323,324,325,326,329,334,391,392,428,430,437,],[-248,-252,-243,-255,-259,-256,-253,-241,-242,223,-251,-228,-257,-249,-240,-254,-250,-262,-260,-258,-237,-236,-248,-235,-234,-233,-232,-231,-244,-229,-230,-245,-238,-239,]),'HEX_FLOAT_CONST':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,148,148,-47,148,-28,-263,-125,-227,148,-225,148,-224,148,-223,148,148,-222,-226,-263,-223,148,148,148,-262,148,148,-223,148,148,-184,-187,-185,-181,-182,-186,-188,148,-190,-191,-183,-189,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,148,-12,148,148,-11,-223,-41,-44,-40,148,-42,148,148,-156,-155,-45,-157,148,-43,148,148,148,-263,-139,-175,-174,148,-172,148,148,-158,148,-171,-159,148,148,148,148,-263,148,148,-11,-170,-173,148,-162,148,-160,148,148,-161,148,148,148,-263,148,-166,-165,-163,148,148,148,-167,-164,148,-169,-168,]),'DOUBLE':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[40,40,-63,-74,-73,-60,-56,-57,-35,-31,-61,40,-36,-55,-70,-65,-54,40,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,40,-69,40,-72,-76,40,-59,-86,-261,-85,40,-113,-112,-32,-102,-101,40,40,40,-47,-48,40,-115,40,40,40,40,-92,40,40,40,40,-38,40,-49,40,40,-87,-93,-262,-103,-121,-120,40,40,40,40,40,-39,-41,-44,-40,-42,40,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,40,-175,-174,40,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'MINUSEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,207,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'INT_CONST_OCT':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,149,149,-47,149,-28,-263,-125,-227,149,-225,149,-224,149,-223,149,149,-222,-226,-263,-223,149,149,149,-262,149,149,-223,149,149,-184,-187,-185,-181,-182,-186,-188,149,-190,-191,-183,-189,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,149,-12,149,149,-11,-223,-41,-44,-40,149,-42,149,149,-156,-155,-45,-157,149,-43,149,149,149,-263,-139,-175,-174,149,-172,149,149,-158,149,-171,-159,149,149,149,149,-263,149,149,-11,-170,-173,149,-162,149,-160,149,149,-161,149,149,149,-263,149,-166,-165,-163,149,149,149,-167,-164,149,-169,-168,]),'TIMESEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,216,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'OR':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,258,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,258,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,258,-207,-209,-210,-198,-221,-229,-230,-215,-245,-238,-239,]),'SHORT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[2,2,-63,-74,-73,-60,-56,-57,-35,-31,-61,2,-36,-55,-70,-65,-54,2,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,2,-69,2,-72,-76,2,-59,-86,-261,-85,2,-113,-112,-32,-102,-101,2,2,2,-47,-48,2,-115,2,2,2,2,-92,2,2,2,2,-38,2,-49,2,2,-87,-93,-262,-103,-121,-120,2,2,2,2,2,-39,-41,-44,-40,-42,2,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,2,-175,-174,2,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'RETURN':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,281,-262,-41,-44,-40,-42,281,-156,-155,-45,-157,281,-43,-175,-174,-172,281,-158,-171,-159,281,-170,-173,-162,281,-160,281,-161,281,281,-166,-165,-163,281,281,-167,-164,281,-169,-168,]),'RSHIFTEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,217,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'RESTRICT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,28,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,65,66,67,69,78,80,82,87,89,90,91,92,93,94,95,96,104,105,115,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[32,32,-63,-74,-73,-60,-56,-57,-35,-31,-61,32,-36,-55,-70,-65,-54,32,-58,-178,-111,-68,32,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,32,-69,32,-72,-76,32,-59,-86,-261,-85,32,-113,-112,-32,-102,-101,32,32,32,-124,32,32,-47,-48,32,-115,32,32,32,32,-92,32,32,32,-125,32,32,32,-38,32,-49,32,32,-87,-93,-262,-103,-121,-120,32,32,32,32,32,-39,-41,-44,-40,-42,32,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,32,-175,-174,32,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'STATIC':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,65,66,69,78,80,82,87,89,90,104,115,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[9,9,-63,-74,-73,-60,-56,-57,-35,-31,-61,9,-36,-55,-70,-65,-54,9,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,9,-69,9,-72,-76,9,-59,-86,-261,-85,-113,-112,-32,-102,-101,105,9,-124,9,9,-47,-48,9,-115,195,-125,9,9,-38,9,-49,-87,-262,-103,-121,-120,9,-39,-41,-44,-40,-42,9,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,9,-175,-174,9,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'SIZEOF':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,127,127,-47,127,-28,-263,-125,-227,127,-225,127,-224,127,-223,127,127,-222,-226,-263,-223,127,127,127,-262,127,127,-223,127,127,-184,-187,-185,-181,-182,-186,-188,127,-190,-191,-183,-189,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,127,-12,127,127,-11,-223,-41,-44,-40,127,-42,127,127,-156,-155,-45,-157,127,-43,127,127,127,-263,-139,-175,-174,127,-172,127,127,-158,127,-171,-159,127,127,127,127,-263,127,127,-11,-170,-173,127,-162,127,-160,127,127,-161,127,127,127,-263,127,-166,-165,-163,127,127,127,-167,-164,127,-169,-168,]),'UNSIGNED':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[18,18,-63,-74,-73,-60,-56,-57,-35,-31,-61,18,-36,-55,-70,-65,-54,18,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,18,-69,18,-72,-76,18,-59,-86,-261,-85,18,-113,-112,-32,-102,-101,18,18,18,-47,-48,18,-115,18,18,18,18,-92,18,18,18,18,-38,18,-49,18,18,-87,-93,-262,-103,-121,-120,18,18,18,18,18,-39,-41,-44,-40,-42,18,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,18,-175,-174,18,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'UNION':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[20,20,-63,-74,-73,-60,-56,-57,-35,-31,-61,20,-36,-55,-70,-65,-54,20,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,20,-69,20,-72,-76,20,-59,-86,-261,-85,20,-113,-112,-32,-102,-101,20,20,20,-47,-48,20,-115,20,20,20,20,-92,20,20,20,20,-38,20,-49,20,20,-87,-93,-262,-103,-121,-120,20,20,20,20,20,-39,-41,-44,-40,-42,20,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,20,-175,-174,20,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'COLON':([2,3,5,6,13,18,19,25,26,27,29,32,33,35,37,39,40,43,45,46,54,56,59,60,62,63,90,94,96,97,112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,151,174,176,177,178,179,180,187,191,197,198,218,220,221,222,224,231,232,234,238,240,286,300,302,303,305,306,310,311,314,315,321,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,377,388,389,390,391,392,397,398,419,428,430,437,],[-63,-74,-73,-60,-61,-70,-65,-178,-111,-68,-71,-75,-114,-66,-62,-64,-67,-69,-72,-76,-86,-85,-113,-112,-102,-101,-115,-263,-263,181,-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,-193,-254,-250,-179,-87,-262,-23,-84,-24,-83,309,-103,-121,-120,-260,-220,-258,-237,-236,-214,-219,-217,-176,-218,375,384,-89,-88,-192,181,-105,-104,-116,-119,-180,-235,-234,-233,-232,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,400,-210,-198,411,-118,-117,-221,-229,-230,-177,-215,-194,-245,-238,-239,]),'$end':([0,8,11,12,15,22,31,36,38,47,61,82,169,176,272,383,],[-263,0,-35,-31,-36,-29,-34,-33,-37,-30,-32,-47,-38,-262,-39,-159,]),'WSTRING_LITERAL':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,121,123,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,123,123,-47,123,-28,-263,-125,-227,218,-259,123,-225,123,-224,123,-223,123,123,-222,-226,-263,-223,123,123,123,-262,123,123,-223,123,123,-184,-187,-185,-181,-182,-186,-188,123,-190,-191,-183,-189,-260,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,123,-12,123,123,-11,-223,-41,-44,-40,123,-42,123,123,-156,-155,-45,-157,123,-43,123,123,123,-263,-139,-175,-174,123,-172,123,123,-158,123,-171,-159,123,123,123,123,-263,123,123,-11,-170,-173,123,-162,123,-160,123,123,-161,123,123,123,-263,123,-166,-165,-163,123,123,123,-167,-164,123,-169,-168,]),'DIVIDE':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,251,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,251,251,251,251,251,251,251,251,251,251,-197,-196,251,251,251,251,251,-198,-221,-229,-230,-215,-245,-238,-239,]),'FOR':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,283,-262,-41,-44,-40,-42,283,-156,-155,-45,-157,283,-43,-175,-174,-172,283,-158,-171,-159,283,-170,-173,-162,283,-160,283,-161,283,283,-166,-165,-163,283,283,-167,-164,283,-169,-168,]),'PLUSPLUS':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,112,115,118,119,121,122,123,124,125,126,127,128,129,130,134,135,137,138,139,140,141,142,143,144,145,146,148,149,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,221,222,224,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,309,323,324,325,326,329,334,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,391,392,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,137,137,-47,137,-28,-263,-248,-125,-252,-227,-243,-255,-259,-256,-253,-241,137,-225,-242,224,137,-224,137,-251,-223,-228,137,137,-257,-222,-249,-240,-254,-250,-226,-263,-223,137,137,137,-262,137,137,-223,137,137,-184,-187,-185,-181,-182,-186,-188,137,-190,-191,-183,-189,-260,137,-258,-237,-236,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,137,-12,137,137,-11,-223,-41,-44,-40,137,-42,137,137,-156,-155,-45,-157,137,-43,-248,137,-235,-234,-233,-232,-231,-244,137,137,-263,-139,-175,-174,137,-172,137,137,-158,137,-171,-159,137,137,-229,-230,137,137,-263,137,137,-11,-170,-173,137,-162,137,-160,137,137,-161,137,137,137,-245,-263,-238,137,-166,-165,-163,-239,137,137,137,-167,-164,137,-169,-168,]),'EQUALS':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,25,26,27,29,30,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,54,56,58,59,60,62,63,80,83,84,86,90,102,112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,168,174,176,191,197,198,218,220,221,222,224,231,232,234,240,262,267,300,302,303,310,311,314,315,323,324,325,326,329,334,360,364,388,389,390,391,392,398,401,428,430,437,],[-263,-63,-74,-73,-60,-56,-57,-61,-263,-55,-70,-65,-54,-58,-178,-111,-68,-71,77,-75,-114,-66,-62,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-85,-51,-113,-112,-102,-101,162,-50,-53,77,-115,192,-248,-252,209,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,162,-87,-262,-103,-121,-120,-260,-220,-258,-237,-236,-214,-219,-217,-218,-140,365,-248,-89,-88,-105,-104,-116,-119,-235,-234,-233,-232,-231,-244,-143,-141,-118,-117,-221,-229,-230,-215,-142,-245,-238,-239,]),'ELSE':([176,276,277,280,282,293,298,370,371,374,381,383,405,406,409,414,424,433,434,436,444,445,447,448,],[-262,-41,-44,-40,-42,-45,-43,-175,-174,-172,-171,-159,-170,-173,-162,-160,-161,-166,-165,441,-167,-164,-169,-168,]),'ANDEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,214,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'EQ':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,255,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,255,-202,-200,-204,-208,-203,-199,-206,255,-197,-196,-205,255,-207,255,255,-198,-221,-229,-230,-215,-245,-238,-239,]),'AND':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,112,115,118,119,120,121,122,123,124,125,126,127,128,129,130,132,134,135,137,138,139,140,141,142,143,144,145,146,147,148,149,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,224,226,227,230,231,232,233,234,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,309,323,324,325,326,329,334,335,336,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,362,365,370,371,373,374,375,376,379,380,381,383,384,385,390,391,392,393,395,398,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,144,144,-47,144,-28,-263,-248,-125,-252,-227,-214,-243,-255,-259,-256,-253,-241,144,-225,-242,-216,-195,144,-224,144,-251,-223,-228,144,144,-257,-222,-249,-240,256,-254,-250,-226,-263,-223,144,144,144,-262,144,144,-223,144,144,-184,-187,-185,-181,-182,-186,-188,144,-190,-191,-183,-189,-260,144,-220,-258,-237,-236,144,144,144,-214,-219,144,-217,-218,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,144,-12,144,144,-11,-223,-41,-44,-40,144,-42,144,144,-156,-155,-45,-157,144,-43,-248,144,-235,-234,-233,-232,-231,-244,144,144,-201,256,-202,-200,-204,-208,-203,-199,-206,256,-197,-196,-205,256,-207,-209,256,-198,-263,-139,-175,-174,144,-172,144,144,-158,144,-171,-159,144,144,-221,-229,-230,144,144,-215,-263,144,144,-11,-170,-173,144,-162,144,-160,144,144,-161,144,144,144,-245,-263,-238,144,-166,-165,-163,-239,144,144,144,-167,-164,144,-169,-168,]),'TYPEID':([0,1,2,3,5,6,7,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,26,27,28,29,31,32,33,34,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,67,68,69,70,74,78,80,82,87,89,90,91,92,93,94,95,96,115,116,141,165,166,167,169,170,171,172,173,174,175,176,191,197,198,204,219,223,225,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[25,25,-63,-74,-73,-60,54,-56,-57,-35,-31,-61,25,-36,59,-55,-70,-65,-91,-54,25,-58,62,-178,-111,-68,-263,-71,-34,-75,-114,-90,-66,-33,-62,-37,-64,-67,25,-69,25,-72,-76,25,-59,-86,-261,-85,25,-113,-112,-32,-102,-101,25,-28,-122,-124,-27,59,25,25,-47,-48,25,-115,25,25,25,25,-92,25,-125,-123,25,25,59,25,-38,25,-49,25,25,-87,-93,-262,-103,-121,-120,25,25,323,325,25,25,25,-39,-41,-44,-40,-42,25,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,25,-175,-174,25,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'LBRACE':([7,20,24,26,33,34,48,54,55,56,59,60,62,63,77,80,82,85,87,88,89,90,155,162,163,170,171,176,197,198,260,266,268,276,277,280,282,289,291,292,293,295,297,298,314,315,336,362,365,370,371,374,375,379,381,383,384,388,389,390,395,396,399,402,403,405,406,409,411,414,423,424,425,427,429,433,434,436,441,443,444,445,446,447,448,],[55,-91,55,-111,-114,-90,-263,55,-261,55,-113,-112,55,55,55,-263,-47,-7,-48,55,-8,-115,-263,55,55,55,-49,-262,-121,-120,-12,55,-11,-41,-44,-40,-42,55,-156,-155,-45,-157,55,-43,-116,-119,55,-263,-139,-175,-174,-172,55,-158,-171,-159,55,-118,-117,55,55,55,-263,55,-11,-170,-173,-162,55,-160,55,-161,55,55,-263,-166,-165,-163,55,55,-167,-164,55,-169,-168,]),'PPHASH':([0,11,12,15,22,31,36,38,61,82,169,176,272,383,],[38,-35,-31,-36,38,-34,-33,-37,-32,-47,-38,-262,-39,-159,]),'INT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[39,39,-63,-74,-73,-60,-56,-57,-35,-31,-61,39,-36,-55,-70,-65,-54,39,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,39,-69,39,-72,-76,39,-59,-86,-261,-85,39,-113,-112,-32,-102,-101,39,39,39,-47,-48,39,-115,39,39,39,39,-92,39,39,39,39,-38,39,-49,39,39,-87,-93,-262,-103,-121,-120,39,39,39,39,39,-39,-41,-44,-40,-42,39,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,39,-175,-174,39,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'SIGNED':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[43,43,-63,-74,-73,-60,-56,-57,-35,-31,-61,43,-36,-55,-70,-65,-54,43,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,43,-69,43,-72,-76,43,-59,-86,-261,-85,43,-113,-112,-32,-102,-101,43,43,43,-47,-48,43,-115,43,43,43,43,-92,43,43,43,43,-38,43,-49,43,43,-87,-93,-262,-103,-121,-120,43,43,43,43,43,-39,-41,-44,-40,-42,43,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,43,-175,-174,43,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'CONTINUE':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,284,-262,-41,-44,-40,-42,284,-156,-155,-45,-157,284,-43,-175,-174,-172,284,-158,-171,-159,284,-170,-173,-162,284,-160,284,-161,284,284,-166,-165,-163,284,284,-167,-164,284,-169,-168,]),'NOT':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,152,152,-47,152,-28,-263,-125,-227,152,-225,152,-224,152,-223,152,152,-222,-226,-263,-223,152,152,152,-262,152,152,-223,152,152,-184,-187,-185,-181,-182,-186,-188,152,-190,-191,-183,-189,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,152,-12,152,152,-11,-223,-41,-44,-40,152,-42,152,152,-156,-155,-45,-157,152,-43,152,152,152,-263,-139,-175,-174,152,-172,152,152,-158,152,-171,-159,152,152,152,152,-263,152,152,-11,-170,-173,152,-162,152,-160,152,152,-161,152,152,152,-263,152,-166,-165,-163,152,152,152,-167,-164,152,-169,-168,]),'OREQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,215,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'MOD':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,259,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,259,259,259,259,259,259,259,259,259,259,-197,-196,259,259,259,259,259,-198,-221,-229,-230,-215,-245,-238,-239,]),'RSHIFT':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,241,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,241,-202,-200,241,241,241,-199,241,241,-197,-196,241,241,241,241,241,-198,-221,-229,-230,-215,-245,-238,-239,]),'DEFAULT':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,286,-262,-41,-44,-40,-42,286,-156,-155,-45,-157,286,-43,-175,-174,-172,286,-158,-171,-159,286,-170,-173,-162,286,-160,286,-161,286,286,-166,-165,-163,286,286,-167,-164,286,-169,-168,]),'CHAR':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[37,37,-63,-74,-73,-60,-56,-57,-35,-31,-61,37,-36,-55,-70,-65,-54,37,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,37,-69,37,-72,-76,37,-59,-86,-261,-85,37,-113,-112,-32,-102,-101,37,37,37,-47,-48,37,-115,37,37,37,37,-92,37,37,37,37,-38,37,-49,37,37,-87,-93,-262,-103,-121,-120,37,37,37,37,37,-39,-41,-44,-40,-42,37,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,37,-175,-174,37,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'WHILE':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,382,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,287,-262,-41,-44,-40,-42,287,-156,-155,-45,-157,287,-43,-175,-174,-172,287,-158,-171,413,-159,287,-170,-173,-162,287,-160,287,-161,287,287,-166,-165,-163,287,287,-167,-164,287,-169,-168,]),'DIVEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,206,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'EXTERN':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,66,78,80,82,87,89,90,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[10,10,-63,-74,-73,-60,-56,-57,-35,-31,-61,10,-36,-55,-70,-65,-54,10,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,10,-69,10,-72,-76,10,-59,-86,-261,-85,-113,-112,-32,-102,-101,10,10,10,-47,-48,10,-115,10,10,-38,10,-49,-87,-262,-103,-121,-120,10,-39,-41,-44,-40,-42,10,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,10,-175,-174,10,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'CASE':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,288,-262,-41,-44,-40,-42,288,-156,-155,-45,-157,288,-43,-175,-174,-172,288,-158,-171,-159,288,-170,-173,-162,288,-160,288,-161,288,288,-166,-165,-163,288,288,-167,-164,288,-169,-168,]),'LAND':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,254,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,254,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-221,-229,-230,-215,-245,-238,-239,]),'REGISTER':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,66,78,80,82,87,89,90,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[17,17,-63,-74,-73,-60,-56,-57,-35,-31,-61,17,-36,-55,-70,-65,-54,17,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,17,-69,17,-72,-76,17,-59,-86,-261,-85,-113,-112,-32,-102,-101,17,17,17,-47,-48,17,-115,17,17,-38,17,-49,-87,-262,-103,-121,-120,17,-39,-41,-44,-40,-42,17,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,17,-175,-174,17,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'MODEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,208,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'NE':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,246,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,246,-202,-200,-204,-208,-203,-199,-206,246,-197,-196,-205,246,-207,246,246,-198,-221,-229,-230,-215,-245,-238,-239,]),'SWITCH':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,290,-262,-41,-44,-40,-42,290,-156,-155,-45,-157,290,-43,-175,-174,-172,290,-158,-171,-159,290,-170,-173,-162,290,-160,290,-161,290,290,-166,-165,-163,290,290,-167,-164,290,-169,-168,]),'INT_CONST_HEX':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,138,138,-47,138,-28,-263,-125,-227,138,-225,138,-224,138,-223,138,138,-222,-226,-263,-223,138,138,138,-262,138,138,-223,138,138,-184,-187,-185,-181,-182,-186,-188,138,-190,-191,-183,-189,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,138,-12,138,138,-11,-223,-41,-44,-40,138,-42,138,138,-156,-155,-45,-157,138,-43,138,138,138,-263,-139,-175,-174,138,-172,138,138,-158,138,-171,-159,138,138,138,138,-263,138,138,-11,-170,-173,138,-162,138,-160,138,138,-161,138,138,138,-263,138,-166,-165,-163,138,138,138,-167,-164,138,-169,-168,]),'_COMPLEX':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[27,27,-63,-74,-73,-60,-56,-57,-35,-31,-61,27,-36,-55,-70,-65,-54,27,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,27,-69,27,-72,-76,27,-59,-86,-261,-85,27,-113,-112,-32,-102,-101,27,27,27,-47,-48,27,-115,27,27,27,27,-92,27,27,27,27,-38,27,-49,27,27,-87,-93,-262,-103,-121,-120,27,27,27,27,27,-39,-41,-44,-40,-42,27,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,27,-175,-174,27,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'PLUSEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,211,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'STRUCT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[34,34,-63,-74,-73,-60,-56,-57,-35,-31,-61,34,-36,-55,-70,-65,-54,34,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,34,-69,34,-72,-76,34,-59,-86,-261,-85,34,-113,-112,-32,-102,-101,34,34,34,-47,-48,34,-115,34,34,34,34,-92,34,34,34,34,-38,34,-49,34,34,-87,-93,-262,-103,-121,-120,34,34,34,34,34,-39,-41,-44,-40,-42,34,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,34,-175,-174,34,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'CONDOP':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,257,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-221,-229,-230,-215,-245,-238,-239,]),'BREAK':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,294,-262,-41,-44,-40,-42,294,-156,-155,-45,-157,294,-43,-175,-174,-172,294,-158,-171,-159,294,-170,-173,-162,294,-160,294,-161,294,294,-166,-165,-163,294,294,-167,-164,294,-169,-168,]),'VOLATILE':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,28,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,65,66,67,69,78,80,82,87,89,90,91,92,93,94,95,96,104,105,115,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[46,46,-63,-74,-73,-60,-56,-57,-35,-31,-61,46,-36,-55,-70,-65,-54,46,-58,-178,-111,-68,46,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,46,-69,46,-72,-76,46,-59,-86,-261,-85,46,-113,-112,-32,-102,-101,46,46,46,-124,46,46,-47,-48,46,-115,46,46,46,46,-92,46,46,46,-125,46,46,46,-38,46,-49,46,46,-87,-93,-262,-103,-121,-120,46,46,46,46,46,-39,-41,-44,-40,-42,46,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,46,-175,-174,46,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'INLINE':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,66,78,80,82,87,89,90,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[49,49,-63,-74,-73,-60,-56,-57,-35,-31,-61,49,-36,-55,-70,-65,-54,49,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,49,-69,49,-72,-76,49,-59,-86,-261,-85,-113,-112,-32,-102,-101,49,49,49,-47,-48,49,-115,49,49,-38,49,-49,-87,-262,-103,-121,-120,49,-39,-41,-44,-40,-42,49,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,49,-175,-174,49,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'INT_CONST_BIN':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,118,118,-47,118,-28,-263,-125,-227,118,-225,118,-224,118,-223,118,118,-222,-226,-263,-223,118,118,118,-262,118,118,-223,118,118,-184,-187,-185,-181,-182,-186,-188,118,-190,-191,-183,-189,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,118,-12,118,118,-11,-223,-41,-44,-40,118,-42,118,118,-156,-155,-45,-157,118,-43,118,118,118,-263,-139,-175,-174,118,-172,118,118,-158,118,-171,-159,118,118,118,118,-263,118,118,-11,-170,-173,118,-162,118,-160,118,118,-161,118,118,118,-263,118,-166,-165,-163,118,118,118,-167,-164,118,-169,-168,]),'DO':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,297,-262,-41,-44,-40,-42,297,-156,-155,-45,-157,297,-43,-175,-174,-172,297,-158,-171,-159,297,-170,-173,-162,297,-160,297,-161,297,297,-166,-165,-163,297,297,-167,-164,297,-169,-168,]),'LNOT':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,119,119,-47,119,-28,-263,-125,-227,119,-225,119,-224,119,-223,119,119,-222,-226,-263,-223,119,119,119,-262,119,119,-223,119,119,-184,-187,-185,-181,-182,-186,-188,119,-190,-191,-183,-189,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,119,-12,119,119,-11,-223,-41,-44,-40,119,-42,119,119,-156,-155,-45,-157,119,-43,119,119,119,-263,-139,-175,-174,119,-172,119,119,-158,119,-171,-159,119,119,119,119,-263,119,119,-11,-170,-173,119,-162,119,-160,119,119,-161,119,119,119,-263,119,-166,-165,-163,119,119,119,-167,-164,119,-169,-168,]),'CONST':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,28,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,65,66,67,69,78,80,82,87,89,90,91,92,93,94,95,96,104,105,115,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[3,3,-63,-74,-73,-60,-56,-57,-35,-31,-61,3,-36,-55,-70,-65,-54,3,-58,-178,-111,-68,3,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,3,-69,3,-72,-76,3,-59,-86,-261,-85,3,-113,-112,-32,-102,-101,3,3,3,-124,3,3,-47,-48,3,-115,3,3,3,3,-92,3,3,3,-125,3,3,3,-38,3,-49,3,3,-87,-93,-262,-103,-121,-120,3,3,3,3,3,-39,-41,-44,-40,-42,3,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,3,-175,-174,3,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'LOR':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,242,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-221,-229,-230,-215,-245,-238,-239,]),'CHAR_CONST':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,122,122,-47,122,-28,-263,-125,-227,122,-225,122,-224,122,-223,122,122,-222,-226,-263,-223,122,122,122,-262,122,122,-223,122,122,-184,-187,-185,-181,-182,-186,-188,122,-190,-191,-183,-189,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,122,-12,122,122,-11,-223,-41,-44,-40,122,-42,122,122,-156,-155,-45,-157,122,-43,122,122,122,-263,-139,-175,-174,122,-172,122,122,-158,122,-171,-159,122,122,122,122,-263,122,122,-11,-170,-173,122,-162,122,-160,122,122,-161,122,122,122,-263,122,-166,-165,-163,122,122,122,-167,-164,122,-169,-168,]),'LSHIFT':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,243,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,243,-202,-200,243,243,243,-199,243,243,-197,-196,243,243,243,243,243,-198,-221,-229,-230,-215,-245,-238,-239,]),'RBRACE':([55,82,93,95,100,101,102,112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,151,155,156,170,172,173,175,176,188,189,190,218,220,221,222,224,231,232,234,240,261,265,268,276,277,280,282,289,291,292,293,295,296,298,299,305,307,308,312,313,321,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,359,362,363,370,371,374,379,381,383,390,391,392,398,404,405,406,409,414,418,419,420,424,428,429,430,433,434,436,437,444,445,447,448,],[-261,-47,176,-92,-106,176,-109,-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,-193,-254,-250,-179,-263,-134,-263,176,176,-93,-262,176,176,-107,-260,-220,-258,-237,-236,-214,-219,-217,-218,176,-20,-19,-41,-44,-40,-42,-6,-156,-155,-45,-157,-5,-43,176,-192,-94,-95,-108,-110,-180,-235,-234,-233,-232,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-135,176,-137,-175,-174,-172,-158,-171,-159,-221,-229,-230,-215,-136,-170,-173,-162,-160,176,-194,-138,-161,-245,176,-238,-166,-165,-163,-239,-167,-164,-169,-168,]),'_BOOL':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[13,13,-63,-74,-73,-60,-56,-57,-35,-31,-61,13,-36,-55,-70,-65,-54,13,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,13,-69,13,-72,-76,13,-59,-86,-261,-85,13,-113,-112,-32,-102,-101,13,13,13,-47,-48,13,-115,13,13,13,13,-92,13,13,13,13,-38,13,-49,13,13,-87,-93,-262,-103,-121,-120,13,13,13,13,13,-39,-41,-44,-40,-42,13,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,13,-175,-174,13,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'LE':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,245,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,245,-202,-200,-204,245,-203,-199,-206,245,-197,-196,-205,245,245,245,245,-198,-221,-229,-230,-215,-245,-238,-239,]),'SEMI':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,28,29,30,31,32,33,35,36,37,38,39,40,41,42,43,44,45,46,49,50,51,52,54,55,56,58,59,60,61,62,63,67,68,69,70,71,73,74,75,76,79,80,81,82,83,84,86,90,94,96,97,112,115,116,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,151,153,154,156,166,168,169,170,174,176,177,178,179,180,182,183,184,185,186,187,191,197,198,205,218,220,221,222,224,228,231,232,234,235,238,240,269,270,271,272,276,277,279,280,281,282,284,285,289,291,292,293,294,295,296,297,298,300,302,303,304,305,310,311,314,315,321,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,359,366,367,368,369,370,371,372,373,374,375,378,379,381,383,384,386,387,388,389,390,391,392,397,398,404,405,406,407,408,409,411,414,419,421,422,423,424,425,427,428,430,431,433,434,436,437,440,441,443,444,445,446,447,448,],[15,-263,-63,-74,-73,-60,-56,-57,-35,-31,-61,-263,-36,-55,-70,-65,-54,15,-58,-178,-111,-68,-263,-71,-263,-34,-75,-114,-66,-33,-62,-37,-64,-67,82,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-261,-85,-51,-113,-112,-32,-102,-101,-28,-122,-124,-27,-18,-46,-145,-77,-17,-80,-81,-147,-47,-50,-53,-263,-115,-263,-263,-263,-248,-125,-123,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,-193,-254,-250,-179,-146,-79,-134,-145,-81,-38,-263,-87,-262,-23,-84,-24,-83,-26,307,-96,308,-25,-98,-103,-121,-120,-78,-260,-220,-258,-237,-236,-150,-214,-219,-217,-152,-176,-218,-154,-148,-82,-39,-41,-44,370,-40,371,-42,374,-14,-263,-156,-155,-45,381,-157,-13,-263,-43,-248,-89,-88,-100,-192,-105,-104,-116,-119,-180,-235,-234,-233,-232,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,-135,-149,-151,-153,405,-175,-174,406,-263,-172,-263,-13,-158,-171,-159,-263,-97,-99,-118,-117,-221,-229,-230,-177,-215,-136,-170,-173,421,-263,-162,-263,-160,-194,-263,432,-263,-161,-263,-263,-245,-238,438,-166,-165,-163,-239,444,-263,-263,-167,-164,-263,-169,-168,]),'LT':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,247,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,247,-202,-200,-204,247,-203,-199,-206,247,-197,-196,-205,247,247,247,247,-198,-221,-229,-230,-215,-245,-238,-239,]),'COMMA':([1,2,3,5,6,9,10,13,14,17,18,19,21,23,25,26,27,28,29,32,33,35,37,39,40,42,43,44,45,46,49,50,51,52,54,56,58,59,60,62,63,67,68,69,70,71,74,75,79,80,81,83,84,90,94,96,100,101,102,109,110,111,112,113,114,115,116,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,151,153,154,156,166,168,174,176,177,178,179,180,182,184,187,188,189,190,191,197,198,199,200,201,202,205,218,220,221,222,224,228,231,232,234,235,236,238,239,240,265,269,270,271,285,300,302,303,304,305,310,311,312,313,314,315,318,320,321,323,324,325,326,327,328,329,330,331,334,337,339,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,356,357,358,359,363,366,367,368,372,386,387,388,389,390,391,392,397,398,404,410,412,415,416,418,419,420,428,430,435,437,],[-263,-63,-74,-73,-60,-56,-57,-61,-263,-55,-70,-65,-54,-58,-178,-111,-68,-263,-71,-75,-114,-66,-62,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-85,-51,-113,-112,-102,-101,-28,-122,-124,-27,117,-145,-77,-80,-81,-147,-50,-53,-115,-263,-263,-106,190,-109,-128,-263,203,-248,204,-132,-125,-123,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,-193,-254,-250,-179,-146,-79,-134,-145,-81,-87,-262,-23,-84,-24,-83,306,-96,-98,190,190,-107,-103,-121,-120,-131,-1,-2,-130,-78,-260,-220,-258,-237,-236,-150,-214,-219,-217,-152,335,-176,-263,-218,362,-154,-148,-82,335,-248,-89,-88,-100,-192,-105,-104,-108,-110,-116,-119,-133,-129,-180,-235,-234,-233,-232,335,-246,-231,393,394,-244,-144,-145,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,335,-210,-198,-135,-137,-149,-151,-153,335,-97,-99,-118,-117,-221,-229,-230,-177,-215,-136,335,335,335,-247,429,-194,-138,-245,-238,335,-239,]),'OFFSETOF':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,133,133,-47,133,-28,-263,-125,-227,133,-225,133,-224,133,-223,133,133,-222,-226,-263,-223,133,133,133,-262,133,133,-223,133,133,-184,-187,-185,-181,-182,-186,-188,133,-190,-191,-183,-189,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,133,-12,133,133,-11,-223,-41,-44,-40,133,-42,133,133,-156,-155,-45,-157,133,-43,133,133,133,-263,-139,-175,-174,133,-172,133,133,-158,133,-171,-159,133,133,133,133,-263,133,133,-11,-170,-173,133,-162,133,-160,133,133,-161,133,133,133,-263,133,-166,-165,-163,133,133,133,-167,-164,133,-169,-168,]),'TYPEDEF':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,66,78,80,82,87,89,90,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[23,23,-63,-74,-73,-60,-56,-57,-35,-31,-61,23,-36,-55,-70,-65,-54,23,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,23,-69,23,-72,-76,23,-59,-86,-261,-85,-113,-112,-32,-102,-101,23,23,23,-47,-48,23,-115,23,23,-38,23,-49,-87,-262,-103,-121,-120,23,-39,-41,-44,-40,-42,23,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,23,-175,-174,23,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'XOR':([112,118,120,121,122,123,124,125,126,129,130,132,138,140,143,145,146,147,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,390,391,392,398,428,430,437,],[-248,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,-195,-251,-228,-257,-249,-240,250,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-201,250,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,250,-207,-209,250,-198,-221,-229,-230,-215,-245,-238,-239,]),'AUTO':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,59,60,61,62,63,66,78,80,82,87,89,90,165,167,169,170,171,174,176,191,197,198,204,272,276,277,280,282,289,291,292,293,295,298,302,303,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[21,21,-63,-74,-73,-60,-56,-57,-35,-31,-61,21,-36,-55,-70,-65,-54,21,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,21,-69,21,-72,-76,21,-59,-86,-261,-85,-113,-112,-32,-102,-101,21,21,21,-47,-48,21,-115,21,21,-38,21,-49,-87,-262,-103,-121,-120,21,-39,-41,-44,-40,-42,21,-156,-155,-45,-157,-43,-89,-88,-105,-104,-116,-119,21,-175,-174,21,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'TIMES':([0,1,2,3,4,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,27,28,29,30,31,32,35,36,37,38,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,61,62,63,65,67,68,69,70,72,77,78,82,83,84,86,94,96,97,103,104,105,110,112,115,117,118,119,120,121,122,123,124,125,126,127,128,129,130,132,134,135,137,138,139,140,141,142,143,144,145,146,147,148,149,152,155,157,162,164,167,169,170,174,176,177,178,179,180,181,191,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,220,221,222,224,226,227,230,231,232,233,234,239,240,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,272,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,302,303,306,309,310,311,323,324,325,326,329,334,335,336,338,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,362,365,370,371,373,374,375,376,379,380,381,383,384,385,390,391,392,393,395,398,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[28,-263,-63,-74,28,-73,-60,-56,-57,-35,-31,-61,-263,-36,-55,-70,-65,-54,28,-58,-178,-68,-263,-71,28,-34,-75,-66,-33,-62,-37,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-261,-85,-51,-32,-102,-101,-263,-28,28,-124,-27,139,157,28,-47,-50,-53,28,-263,-263,28,194,-28,-263,28,-248,-125,28,-252,-227,-214,-243,-255,-259,-256,-253,-241,157,-225,-242,-216,-195,157,-224,157,-251,-223,-228,157,157,-257,-222,-249,-240,252,-254,-250,-226,-263,-223,157,274,28,-38,157,-87,-262,-23,-84,-24,-83,157,-103,157,-223,157,157,-184,-187,-185,-181,-182,-186,-188,157,-190,-191,-183,-189,-260,157,-220,-258,-237,-236,157,157,157,-214,-219,157,-217,28,-218,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,157,-12,157,157,-11,-39,-223,-41,-44,-40,157,-42,157,157,-156,-155,-45,-157,157,-43,-248,-89,-88,28,157,-105,-104,-235,-234,-233,-232,-231,-244,157,157,28,252,252,252,252,252,252,252,252,252,252,-197,-196,252,252,252,252,252,-198,-263,-139,-175,-174,157,-172,157,157,-158,157,-171,-159,157,157,-221,-229,-230,157,157,-215,-263,157,157,-11,-170,-173,157,-162,157,-160,157,157,-161,157,157,157,-245,-263,-238,157,-166,-165,-163,-239,157,157,157,-167,-164,157,-169,-168,]),'LPAREN':([0,1,2,3,4,5,6,9,10,11,12,13,14,15,16,17,18,19,21,22,23,25,26,27,28,29,30,31,32,33,35,36,37,38,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,60,61,62,63,65,67,68,69,70,72,74,77,78,81,82,83,84,86,90,94,96,97,103,104,105,110,112,115,116,117,118,119,121,122,123,124,125,126,127,128,129,130,133,134,135,137,138,139,140,141,142,143,144,145,146,148,149,152,153,155,157,162,164,166,167,169,170,174,176,177,178,179,180,181,191,192,194,195,196,197,198,206,207,208,209,210,211,212,213,214,215,216,217,218,219,221,222,224,226,227,228,230,233,235,239,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,269,270,272,274,276,277,280,281,282,283,287,288,289,290,291,292,293,295,297,298,300,301,302,303,306,309,310,311,314,315,323,324,325,326,329,334,335,336,338,339,362,365,366,367,368,370,371,373,374,375,376,379,380,381,383,384,385,388,389,391,392,393,395,399,400,402,403,405,406,408,409,411,413,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[4,-263,-63,-74,4,-73,-60,-56,-57,-35,-31,-61,-263,-36,4,-55,-70,-65,-54,4,-58,-178,66,-68,-263,-71,78,-34,-75,-114,-66,-33,-62,-37,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-261,-85,-51,66,-32,-102,-101,-263,-28,-122,-124,-27,141,78,141,78,165,-47,-50,-53,167,-115,-263,-263,167,141,-28,-263,78,-248,-125,-123,4,-252,-227,-243,-255,-259,-256,-253,-241,219,-225,-242,227,229,230,-224,233,-251,-223,-228,141,233,-257,-222,-249,-240,-254,-250,-226,165,-263,-223,141,141,167,167,-38,141,-87,-262,-23,-84,-24,-83,230,-103,230,-223,141,141,-121,-120,-184,-187,-185,-181,-182,-186,-188,141,-190,-191,-183,-189,-260,141,-258,-237,-236,141,141,-150,141,141,-152,338,230,230,230,230,230,230,230,230,230,230,230,230,230,230,230,230,141,230,230,-12,230,141,-11,-154,-148,-39,-223,-41,-44,-40,141,-42,373,376,230,141,380,-156,-155,-45,-157,141,-43,-248,385,-89,-88,4,230,-105,-104,-116,-119,-235,-234,-233,-232,-231,-244,141,230,338,338,-263,-139,-149,-151,-153,-175,-174,141,-172,141,141,-158,141,-171,-159,141,141,-118,-117,-229,-230,141,230,-263,230,141,-11,-170,-173,141,-162,141,426,-160,141,141,-161,141,141,141,-245,-263,-238,141,-166,-165,-163,-239,141,141,141,-167,-164,141,-169,-168,]),'MINUSMINUS':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,112,115,118,119,121,122,123,124,125,126,127,128,129,130,134,135,137,138,139,140,141,142,143,144,145,146,148,149,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,218,219,221,222,224,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,300,309,323,324,325,326,329,334,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,391,392,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,428,429,430,432,433,434,436,437,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,142,142,-47,142,-28,-263,-248,-125,-252,-227,-243,-255,-259,-256,-253,-241,142,-225,-242,222,142,-224,142,-251,-223,-228,142,142,-257,-222,-249,-240,-254,-250,-226,-263,-223,142,142,142,-262,142,142,-223,142,142,-184,-187,-185,-181,-182,-186,-188,142,-190,-191,-183,-189,-260,142,-258,-237,-236,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,142,-12,142,142,-11,-223,-41,-44,-40,142,-42,142,142,-156,-155,-45,-157,142,-43,-248,142,-235,-234,-233,-232,-231,-244,142,142,-263,-139,-175,-174,142,-172,142,142,-158,142,-171,-159,142,142,-229,-230,142,142,-263,142,142,-11,-170,-173,142,-162,142,-160,142,142,-161,142,142,142,-245,-263,-238,142,-166,-165,-163,-239,142,142,142,-167,-164,142,-169,-168,]),'ID':([0,1,2,3,4,5,6,7,9,10,11,12,13,14,15,16,17,18,19,20,21,22,23,24,25,27,28,29,30,31,32,34,35,36,37,38,39,40,42,43,44,45,46,49,50,51,52,54,55,56,58,61,62,63,64,65,66,67,68,69,70,72,74,77,78,82,83,84,86,94,96,97,98,99,103,104,105,110,115,116,117,119,127,128,134,135,137,139,141,142,144,152,155,157,162,164,166,167,169,170,174,176,177,178,179,180,181,190,191,192,194,195,196,203,206,207,208,209,210,211,212,213,214,215,216,217,219,223,225,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,263,264,266,268,272,274,276,277,278,280,281,282,288,289,291,292,293,295,297,298,302,303,306,309,310,311,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,394,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[33,-263,-63,-74,33,-73,-60,56,-56,-57,-35,-31,-61,-263,-36,33,-55,-70,-65,-91,-54,33,-58,63,-178,-68,-263,-71,33,-34,-75,-90,-66,-33,-62,-37,-64,-67,-263,-69,-263,-72,-76,-59,-52,-9,-10,-86,-261,-85,-51,-32,-102,-101,102,-263,112,-28,-122,-124,-27,112,33,112,33,-47,-50,-53,33,-263,-263,33,102,102,112,-28,-263,33,-125,-123,33,-227,112,-225,112,-224,112,-223,112,112,-222,-226,-263,-223,112,112,33,33,-38,300,-87,-262,-23,-84,-24,-83,112,102,-103,112,-223,112,112,112,-184,-187,-185,-181,-182,-186,-188,112,-190,-191,-183,-189,112,324,326,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,112,-12,112,112,112,-11,-39,-223,-41,-44,369,-40,112,-42,112,300,-156,-155,-45,-157,300,-43,-89,-88,33,112,-105,-104,112,112,-263,-139,-175,-174,112,-172,300,112,-158,112,-171,-159,300,112,112,112,112,-263,112,112,-11,-170,-173,112,-162,300,-160,112,300,-161,300,112,300,-263,112,-166,-165,-163,112,300,300,-167,-164,300,-169,-168,]),'IF':([55,82,170,176,276,277,280,282,289,291,292,293,295,297,298,370,371,374,375,379,381,383,384,405,406,409,411,414,423,424,425,427,433,434,436,441,443,444,445,446,447,448,],[-261,-47,301,-262,-41,-44,-40,-42,301,-156,-155,-45,-157,301,-43,-175,-174,-172,301,-158,-171,-159,301,-170,-173,-162,301,-160,301,-161,301,301,-166,-165,-163,301,301,-167,-164,301,-169,-168,]),'STRING_LITERAL':([3,32,46,55,65,67,69,70,72,77,82,103,104,105,115,119,127,128,129,134,135,137,139,141,142,143,144,152,155,157,162,164,170,176,181,192,194,195,196,206,207,208,209,210,211,212,213,214,215,216,217,219,221,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,260,264,266,268,274,276,277,280,281,282,288,289,291,292,293,295,297,298,309,335,336,362,365,370,371,373,374,375,376,379,380,381,383,384,385,393,395,399,400,402,403,405,406,408,409,411,414,421,423,424,425,426,427,429,432,433,434,436,438,441,443,444,445,446,447,448,],[-74,-75,-76,-261,-263,-28,-124,-27,143,143,-47,143,-28,-263,-125,-227,143,-225,221,143,-224,143,-223,143,143,-257,-222,-226,-263,-223,143,143,143,-262,143,143,-223,143,143,-184,-187,-185,-181,-182,-186,-188,143,-190,-191,-183,-189,143,-258,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,143,-12,143,143,-11,-223,-41,-44,-40,143,-42,143,143,-156,-155,-45,-157,143,-43,143,143,143,-263,-139,-175,-174,143,-172,143,143,-158,143,-171,-159,143,143,143,143,-263,143,143,-11,-170,-173,143,-162,143,-160,143,143,-161,143,143,143,-263,143,-166,-165,-163,143,143,143,-167,-164,143,-169,-168,]),'FLOAT':([0,1,2,3,5,6,9,10,11,12,13,14,15,17,18,19,21,22,23,25,26,27,29,31,32,33,35,36,37,38,39,40,42,43,44,45,46,48,49,54,55,56,57,59,60,61,62,63,66,78,80,82,87,89,90,91,92,93,94,95,96,141,165,167,169,170,171,172,173,174,175,176,191,197,198,204,219,229,230,233,272,276,277,280,282,289,291,292,293,295,298,302,303,307,308,310,311,314,315,338,370,371,373,374,379,381,383,388,389,405,406,409,414,424,433,434,436,444,445,447,448,],[35,35,-63,-74,-73,-60,-56,-57,-35,-31,-61,35,-36,-55,-70,-65,-54,35,-58,-178,-111,-68,-71,-34,-75,-114,-66,-33,-62,-37,-64,-67,35,-69,35,-72,-76,35,-59,-86,-261,-85,35,-113,-112,-32,-102,-101,35,35,35,-47,-48,35,-115,35,35,35,35,-92,35,35,35,35,-38,35,-49,35,35,-87,-93,-262,-103,-121,-120,35,35,35,35,35,-39,-41,-44,-40,-42,35,-156,-155,-45,-157,-43,-89,-88,-94,-95,-105,-104,-116,-119,35,-175,-174,35,-172,-158,-171,-159,-118,-117,-170,-173,-162,-160,-161,-166,-165,-163,-167,-164,-169,-168,]),'XOREQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,210,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'LSHIFTEQUAL':([112,118,120,121,122,123,124,125,126,129,130,138,140,143,145,146,148,149,176,218,220,221,222,224,231,232,234,240,300,323,324,325,326,329,334,390,391,392,398,428,430,437,],[-248,-252,212,-243,-255,-259,-256,-253,-241,-242,-216,-251,-228,-257,-249,-240,-254,-250,-262,-260,-220,-258,-237,-236,-214,-219,-217,-218,-248,-235,-234,-233,-232,-231,-244,-221,-229,-230,-215,-245,-238,-239,]),'RBRACKET':([3,32,46,65,69,70,72,103,104,112,115,118,120,121,122,123,124,125,126,129,130,131,132,136,138,139,140,143,145,146,147,148,149,150,151,164,176,193,194,218,220,221,222,224,231,232,234,238,240,273,274,305,316,317,321,323,324,325,326,327,329,334,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,357,358,361,390,391,392,397,398,419,428,430,437,],[-74,-75,-76,-263,-124,-27,-263,-263,-28,-248,-125,-252,-214,-243,-255,-259,-256,-253,-241,-242,-216,228,-195,-4,-251,235,-228,-257,-249,-240,-193,-254,-250,-3,-179,-263,-262,314,315,-260,-220,-258,-237,-236,-214,-219,-217,-176,-218,366,367,-192,388,389,-180,-235,-234,-233,-232,391,-231,-244,-201,-213,-202,-200,-204,-208,-203,-199,-206,-211,-197,-196,-205,-212,-207,-209,-210,-198,401,-221,-229,-230,-177,-215,-194,-245,-238,-239,]),}
_lr_action = { }
for _k, _v in _lr_action_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_action: _lr_action[_x] = { }
_lr_action[_x][_k] = _y
del _lr_action_items
_lr_goto_items = {'storage_class_specifier':([0,1,14,22,42,44,48,66,78,80,89,165,167,170,204,289,338,373,],[1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,1,]),'identifier_list_opt':([66,],[106,]),'selection_statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[298,298,298,298,298,298,298,298,298,298,298,298,]),'constant':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,126,]),'unary_expression':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[120,120,120,220,231,234,120,240,120,120,120,231,231,120,120,120,120,120,120,120,120,231,231,231,231,231,231,231,231,231,231,231,231,231,231,231,231,120,231,231,231,120,120,231,120,120,231,120,231,120,120,120,120,120,120,120,231,231,120,120,120,120,120,120,120,120,120,120,120,120,120,]),'conditional_expression':([72,77,103,141,162,164,170,181,192,195,196,213,219,226,227,230,233,257,264,266,281,288,289,297,309,335,373,375,376,380,384,385,393,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[151,151,151,151,151,151,151,305,305,151,151,151,151,151,151,151,151,151,305,151,151,305,151,151,305,151,151,151,151,151,151,151,151,419,151,151,151,151,151,151,151,151,151,151,151,151,151,]),'brace_close':([93,101,172,173,188,189,261,299,362,418,429,],[174,191,302,303,310,311,359,383,404,430,437,]),'struct_or_union_specifier':([0,1,14,22,42,44,48,57,66,78,80,89,91,92,93,94,96,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,5,]),'unified_wstring_literal':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,121,]),'abstract_declarator_opt':([110,239,],[199,337,]),'iteration_statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[277,277,277,277,277,277,277,277,277,277,277,277,]),'init_declarator_list':([30,86,],[71,71,]),'translation_unit_or_empty':([0,],[8,]),'struct_declaration_list':([57,91,92,],[93,172,173,]),'block_item_list_opt':([170,],[299,]),'enumerator':([64,98,99,190,],[100,100,100,312,]),'pp_directive':([0,22,],[11,11,]),'abstract_declarator':([30,78,86,97,110,167,239,338,],[79,161,79,185,201,161,201,161,]),'declaration_specifiers_opt':([1,14,42,44,],[50,58,83,84,]),'external_declaration':([0,22,],[12,61,]),'type_specifier':([0,1,14,22,42,44,48,57,66,78,80,89,91,92,93,94,96,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[14,14,14,14,14,14,14,94,14,14,14,14,94,94,94,94,94,94,14,14,14,94,94,14,94,94,94,94,14,14,14,]),'designation':([155,362,399,429,],[260,260,260,260,]),'compound_statement':([88,163,170,289,297,375,384,411,423,425,427,441,443,446,],[169,272,282,282,282,282,282,282,282,282,282,282,282,282,]),'pointer':([0,4,22,30,68,78,86,97,110,117,167,239,306,338,],[16,16,16,74,116,74,166,166,74,16,166,339,16,339,]),'type_name':([141,219,229,230,233,],[237,322,331,332,333,]),'unified_string_literal':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,129,]),'postfix_expression':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,130,]),'assignment_expression_opt':([72,103,164,],[131,193,273,]),'designation_opt':([155,362,399,429,],[266,402,266,402,]),'expression_statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[276,276,276,276,276,276,276,276,276,276,276,276,]),'parameter_declaration':([66,78,165,167,204,338,],[109,109,109,109,320,109,]),'initializer_list_opt':([155,],[261,]),'cast_expression':([72,77,103,134,141,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[132,132,132,232,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,132,398,132,132,132,132,132,132,132,398,132,132,132,132,132,132,132,132,132,132,132,132,132,132,]),'init_declarator':([30,86,117,],[75,75,205,]),'struct_declarator_list':([97,],[182,]),'unary_operator':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,134,]),'brace_open':([7,24,54,56,62,63,77,88,162,163,170,266,289,297,336,375,384,390,395,396,402,411,423,425,427,441,443,446,],[57,64,91,92,98,99,155,170,155,170,170,155,170,170,399,170,170,399,399,399,155,170,170,170,170,170,170,170,]),'assignment_operator':([120,],[213,]),'struct_or_union':([0,1,14,22,42,44,48,57,66,78,80,89,91,92,93,94,96,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,7,]),'identifier':([66,72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,203,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,263,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,394,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[114,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,318,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,360,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,417,146,146,146,146,146,146,146,146,146,146,146,146,146,146,146,]),'struct_declaration':([57,91,92,93,172,173,],[95,95,95,175,175,175,]),'assignment_expression':([72,77,103,141,162,164,170,195,196,213,219,226,227,230,233,257,266,281,289,297,335,373,375,376,380,384,385,393,402,408,411,421,423,425,426,427,432,438,441,443,446,],[136,156,136,238,156,136,238,316,317,321,238,238,328,238,238,238,156,238,238,238,397,238,238,238,238,238,238,416,156,238,238,238,238,238,238,238,238,238,238,238,238,]),'parameter_type_list':([66,78,165,167,338,],[108,159,159,159,159,]),'type_qualifier_list_opt':([28,65,105,],[68,103,196,]),'direct_declarator':([0,4,16,22,30,74,78,86,97,110,117,166,167,306,],[26,26,60,26,26,60,26,26,26,26,26,60,26,26,]),'type_qualifier_list':([28,65,105,],[67,104,67,]),'designator':([155,267,362,399,429,],[262,364,262,262,262,]),'argument_expression_list':([227,],[330,]),'initializer':([77,162,266,402,],[154,271,363,420,]),'specifier_qualifier_list_opt':([94,96,],[178,180,]),'constant_expression':([181,192,264,288,309,],[304,313,361,377,387,]),'expression_opt':([170,289,297,373,375,384,408,411,421,423,425,427,432,438,441,443,446,],[279,279,279,407,279,279,422,279,431,279,279,279,439,442,279,279,279,]),'primary_expression':([72,77,103,127,134,137,141,142,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,336,373,375,376,380,384,385,393,395,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,140,]),'declaration_specifiers':([0,1,14,22,42,44,48,66,78,80,89,165,167,170,204,289,338,373,],[30,52,52,30,52,52,86,110,110,86,86,110,110,86,110,86,110,86,]),'declaration':([0,22,48,80,89,170,289,373,],[31,31,87,87,171,292,292,408,]),'struct_declarator_list_opt':([97,],[183,]),'identifier_list':([66,],[111,]),'typedef_name':([0,1,14,22,42,44,48,57,66,78,80,89,91,92,93,94,96,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,29,]),'parameter_type_list_opt':([78,165,167,338,],[160,275,160,160,]),'jump_statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[293,293,293,293,293,293,293,293,293,293,293,293,]),'declaration_list_opt':([48,80,],[88,163,]),'struct_declarator':([97,306,],[184,386,]),'function_definition':([0,22,],[36,36,]),'binary_expression':([72,77,103,141,162,164,170,181,192,195,196,213,219,226,227,230,233,241,242,243,244,245,246,247,248,249,250,251,252,253,254,255,256,257,258,259,264,266,281,288,289,297,309,335,373,375,376,380,384,385,393,400,402,408,411,421,423,425,426,427,432,438,441,443,446,],[147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,340,341,342,343,344,345,346,347,348,349,350,351,352,353,354,355,147,357,358,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,147,]),'parameter_list':([66,78,165,167,338,],[113,113,113,113,113,]),'init_declarator_list_opt':([30,86,],[73,73,]),'enum_specifier':([0,1,14,22,42,44,48,57,66,78,80,89,91,92,93,94,96,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,45,]),'decl_body':([0,22,48,80,89,170,289,373,],[41,41,41,41,41,41,41,41,]),'type_qualifier':([0,1,14,22,28,42,44,48,57,65,66,67,78,80,89,91,92,93,94,96,104,105,141,165,167,170,172,173,204,219,229,230,233,289,338,373,],[42,42,42,42,69,42,42,42,96,69,42,115,42,42,42,96,96,96,96,96,115,69,96,42,42,42,96,96,42,96,96,96,96,42,42,42,]),'statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[291,291,382,409,414,424,433,434,436,445,447,448,]),'enumerator_list':([64,98,99,],[101,188,189,]),'labeled_statement':([170,289,297,375,384,411,423,425,427,441,443,446,],[280,280,280,280,280,280,280,280,280,280,280,280,]),'function_specifier':([0,1,14,22,42,44,48,66,78,80,89,165,167,170,204,289,338,373,],[44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,44,]),'specifier_qualifier_list':([57,91,92,93,94,96,141,172,173,219,229,230,233,],[97,97,97,97,179,179,239,97,97,239,239,239,239,]),'block_item':([170,289,],[295,379,]),'block_item_list':([170,],[289,]),'empty':([0,1,14,28,30,42,44,48,65,66,72,78,80,86,94,96,97,103,105,110,155,164,165,167,170,239,289,297,338,362,373,375,384,399,408,411,421,423,425,427,429,432,438,441,443,446,],[47,51,51,70,76,51,51,85,70,107,150,158,85,76,177,177,186,150,70,200,268,150,158,158,296,200,378,378,158,403,378,378,378,403,378,378,378,378,378,378,403,378,378,378,378,378,]),'translation_unit':([0,],[22,]),'initializer_list':([155,399,],[265,418,]),'declarator':([0,4,22,30,78,86,97,110,117,167,306,],[48,53,48,80,53,168,187,202,168,53,187,]),'direct_abstract_declarator':([30,74,78,86,97,110,166,167,239,338,339,],[81,153,81,81,81,81,153,81,81,81,153,]),'designator_list':([155,362,399,429,],[267,267,267,267,]),'declaration_list':([48,80,],[89,89,]),'expression':([141,170,219,226,230,233,257,281,289,297,373,375,376,380,384,385,408,411,421,423,425,426,427,432,438,441,443,446,],[236,285,236,327,236,236,356,372,285,285,285,285,410,412,285,415,285,285,285,285,285,435,285,285,285,285,285,285,]),}
_lr_goto = { }
for _k, _v in _lr_goto_items.items():
for _x,_y in zip(_v[0],_v[1]):
if not _x in _lr_goto: _lr_goto[_x] = { }
_lr_goto[_x][_k] = _y
del _lr_goto_items
_lr_productions = [
("S' -> translation_unit_or_empty","S'",1,None,None,None),
('abstract_declarator_opt -> empty','abstract_declarator_opt',1,'p_abstract_declarator_opt','../pycparser/plyparser.py',42),
('abstract_declarator_opt -> abstract_declarator','abstract_declarator_opt',1,'p_abstract_declarator_opt','../pycparser/plyparser.py',43),
('assignment_expression_opt -> empty','assignment_expression_opt',1,'p_assignment_expression_opt','../pycparser/plyparser.py',42),
('assignment_expression_opt -> assignment_expression','assignment_expression_opt',1,'p_assignment_expression_opt','../pycparser/plyparser.py',43),
('block_item_list_opt -> empty','block_item_list_opt',1,'p_block_item_list_opt','../pycparser/plyparser.py',42),
('block_item_list_opt -> block_item_list','block_item_list_opt',1,'p_block_item_list_opt','../pycparser/plyparser.py',43),
('declaration_list_opt -> empty','declaration_list_opt',1,'p_declaration_list_opt','../pycparser/plyparser.py',42),
('declaration_list_opt -> declaration_list','declaration_list_opt',1,'p_declaration_list_opt','../pycparser/plyparser.py',43),
('declaration_specifiers_opt -> empty','declaration_specifiers_opt',1,'p_declaration_specifiers_opt','../pycparser/plyparser.py',42),
('declaration_specifiers_opt -> declaration_specifiers','declaration_specifiers_opt',1,'p_declaration_specifiers_opt','../pycparser/plyparser.py',43),
('designation_opt -> empty','designation_opt',1,'p_designation_opt','../pycparser/plyparser.py',42),
('designation_opt -> designation','designation_opt',1,'p_designation_opt','../pycparser/plyparser.py',43),
('expression_opt -> empty','expression_opt',1,'p_expression_opt','../pycparser/plyparser.py',42),
('expression_opt -> expression','expression_opt',1,'p_expression_opt','../pycparser/plyparser.py',43),
('identifier_list_opt -> empty','identifier_list_opt',1,'p_identifier_list_opt','../pycparser/plyparser.py',42),
('identifier_list_opt -> identifier_list','identifier_list_opt',1,'p_identifier_list_opt','../pycparser/plyparser.py',43),
('init_declarator_list_opt -> empty','init_declarator_list_opt',1,'p_init_declarator_list_opt','../pycparser/plyparser.py',42),
('init_declarator_list_opt -> init_declarator_list','init_declarator_list_opt',1,'p_init_declarator_list_opt','../pycparser/plyparser.py',43),
('initializer_list_opt -> empty','initializer_list_opt',1,'p_initializer_list_opt','../pycparser/plyparser.py',42),
('initializer_list_opt -> initializer_list','initializer_list_opt',1,'p_initializer_list_opt','../pycparser/plyparser.py',43),
('parameter_type_list_opt -> empty','parameter_type_list_opt',1,'p_parameter_type_list_opt','../pycparser/plyparser.py',42),
('parameter_type_list_opt -> parameter_type_list','parameter_type_list_opt',1,'p_parameter_type_list_opt','../pycparser/plyparser.py',43),
('specifier_qualifier_list_opt -> empty','specifier_qualifier_list_opt',1,'p_specifier_qualifier_list_opt','../pycparser/plyparser.py',42),
('specifier_qualifier_list_opt -> specifier_qualifier_list','specifier_qualifier_list_opt',1,'p_specifier_qualifier_list_opt','../pycparser/plyparser.py',43),
('struct_declarator_list_opt -> empty','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','../pycparser/plyparser.py',42),
('struct_declarator_list_opt -> struct_declarator_list','struct_declarator_list_opt',1,'p_struct_declarator_list_opt','../pycparser/plyparser.py',43),
('type_qualifier_list_opt -> empty','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','../pycparser/plyparser.py',42),
('type_qualifier_list_opt -> type_qualifier_list','type_qualifier_list_opt',1,'p_type_qualifier_list_opt','../pycparser/plyparser.py',43),
('translation_unit_or_empty -> translation_unit','translation_unit_or_empty',1,'p_translation_unit_or_empty','../pycparser/c_parser.py',494),
('translation_unit_or_empty -> empty','translation_unit_or_empty',1,'p_translation_unit_or_empty','../pycparser/c_parser.py',495),
('translation_unit -> external_declaration','translation_unit',1,'p_translation_unit_1','../pycparser/c_parser.py',503),
('translation_unit -> translation_unit external_declaration','translation_unit',2,'p_translation_unit_2','../pycparser/c_parser.py',510),
('external_declaration -> function_definition','external_declaration',1,'p_external_declaration_1','../pycparser/c_parser.py',522),
('external_declaration -> declaration','external_declaration',1,'p_external_declaration_2','../pycparser/c_parser.py',527),
('external_declaration -> pp_directive','external_declaration',1,'p_external_declaration_3','../pycparser/c_parser.py',532),
('external_declaration -> SEMI','external_declaration',1,'p_external_declaration_4','../pycparser/c_parser.py',537),
('pp_directive -> PPHASH','pp_directive',1,'p_pp_directive','../pycparser/c_parser.py',542),
('function_definition -> declarator declaration_list_opt compound_statement','function_definition',3,'p_function_definition_1','../pycparser/c_parser.py',551),
('function_definition -> declaration_specifiers declarator declaration_list_opt compound_statement','function_definition',4,'p_function_definition_2','../pycparser/c_parser.py',568),
('statement -> labeled_statement','statement',1,'p_statement','../pycparser/c_parser.py',579),
('statement -> expression_statement','statement',1,'p_statement','../pycparser/c_parser.py',580),
('statement -> compound_statement','statement',1,'p_statement','../pycparser/c_parser.py',581),
('statement -> selection_statement','statement',1,'p_statement','../pycparser/c_parser.py',582),
('statement -> iteration_statement','statement',1,'p_statement','../pycparser/c_parser.py',583),
('statement -> jump_statement','statement',1,'p_statement','../pycparser/c_parser.py',584),
('decl_body -> declaration_specifiers init_declarator_list_opt','decl_body',2,'p_decl_body','../pycparser/c_parser.py',598),
('declaration -> decl_body SEMI','declaration',2,'p_declaration','../pycparser/c_parser.py',657),
('declaration_list -> declaration','declaration_list',1,'p_declaration_list','../pycparser/c_parser.py',666),
('declaration_list -> declaration_list declaration','declaration_list',2,'p_declaration_list','../pycparser/c_parser.py',667),
('declaration_specifiers -> type_qualifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_1','../pycparser/c_parser.py',672),
('declaration_specifiers -> type_specifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_2','../pycparser/c_parser.py',677),
('declaration_specifiers -> storage_class_specifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_3','../pycparser/c_parser.py',682),
('declaration_specifiers -> function_specifier declaration_specifiers_opt','declaration_specifiers',2,'p_declaration_specifiers_4','../pycparser/c_parser.py',687),
('storage_class_specifier -> AUTO','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',692),
('storage_class_specifier -> REGISTER','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',693),
('storage_class_specifier -> STATIC','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',694),
('storage_class_specifier -> EXTERN','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',695),
('storage_class_specifier -> TYPEDEF','storage_class_specifier',1,'p_storage_class_specifier','../pycparser/c_parser.py',696),
('function_specifier -> INLINE','function_specifier',1,'p_function_specifier','../pycparser/c_parser.py',701),
('type_specifier -> VOID','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',706),
('type_specifier -> _BOOL','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',707),
('type_specifier -> CHAR','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',708),
('type_specifier -> SHORT','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',709),
('type_specifier -> INT','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',710),
('type_specifier -> LONG','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',711),
('type_specifier -> FLOAT','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',712),
('type_specifier -> DOUBLE','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',713),
('type_specifier -> _COMPLEX','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',714),
('type_specifier -> SIGNED','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',715),
('type_specifier -> UNSIGNED','type_specifier',1,'p_type_specifier_1','../pycparser/c_parser.py',716),
('type_specifier -> typedef_name','type_specifier',1,'p_type_specifier_2','../pycparser/c_parser.py',721),
('type_specifier -> enum_specifier','type_specifier',1,'p_type_specifier_2','../pycparser/c_parser.py',722),
('type_specifier -> struct_or_union_specifier','type_specifier',1,'p_type_specifier_2','../pycparser/c_parser.py',723),
('type_qualifier -> CONST','type_qualifier',1,'p_type_qualifier','../pycparser/c_parser.py',728),
('type_qualifier -> RESTRICT','type_qualifier',1,'p_type_qualifier','../pycparser/c_parser.py',729),
('type_qualifier -> VOLATILE','type_qualifier',1,'p_type_qualifier','../pycparser/c_parser.py',730),
('init_declarator_list -> init_declarator','init_declarator_list',1,'p_init_declarator_list_1','../pycparser/c_parser.py',735),
('init_declarator_list -> init_declarator_list COMMA init_declarator','init_declarator_list',3,'p_init_declarator_list_1','../pycparser/c_parser.py',736),
('init_declarator_list -> EQUALS initializer','init_declarator_list',2,'p_init_declarator_list_2','../pycparser/c_parser.py',746),
('init_declarator_list -> abstract_declarator','init_declarator_list',1,'p_init_declarator_list_3','../pycparser/c_parser.py',754),
('init_declarator -> declarator','init_declarator',1,'p_init_declarator','../pycparser/c_parser.py',762),
('init_declarator -> declarator EQUALS initializer','init_declarator',3,'p_init_declarator','../pycparser/c_parser.py',763),
('specifier_qualifier_list -> type_qualifier specifier_qualifier_list_opt','specifier_qualifier_list',2,'p_specifier_qualifier_list_1','../pycparser/c_parser.py',768),
('specifier_qualifier_list -> type_specifier specifier_qualifier_list_opt','specifier_qualifier_list',2,'p_specifier_qualifier_list_2','../pycparser/c_parser.py',773),
('struct_or_union_specifier -> struct_or_union ID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','../pycparser/c_parser.py',781),
('struct_or_union_specifier -> struct_or_union TYPEID','struct_or_union_specifier',2,'p_struct_or_union_specifier_1','../pycparser/c_parser.py',782),
('struct_or_union_specifier -> struct_or_union brace_open struct_declaration_list brace_close','struct_or_union_specifier',4,'p_struct_or_union_specifier_2','../pycparser/c_parser.py',791),
('struct_or_union_specifier -> struct_or_union ID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','../pycparser/c_parser.py',800),
('struct_or_union_specifier -> struct_or_union TYPEID brace_open struct_declaration_list brace_close','struct_or_union_specifier',5,'p_struct_or_union_specifier_3','../pycparser/c_parser.py',801),
('struct_or_union -> STRUCT','struct_or_union',1,'p_struct_or_union','../pycparser/c_parser.py',810),
('struct_or_union -> UNION','struct_or_union',1,'p_struct_or_union','../pycparser/c_parser.py',811),
('struct_declaration_list -> struct_declaration','struct_declaration_list',1,'p_struct_declaration_list','../pycparser/c_parser.py',818),
('struct_declaration_list -> struct_declaration_list struct_declaration','struct_declaration_list',2,'p_struct_declaration_list','../pycparser/c_parser.py',819),
('struct_declaration -> specifier_qualifier_list struct_declarator_list_opt SEMI','struct_declaration',3,'p_struct_declaration_1','../pycparser/c_parser.py',824),
('struct_declaration -> specifier_qualifier_list abstract_declarator SEMI','struct_declaration',3,'p_struct_declaration_2','../pycparser/c_parser.py',862),
('struct_declarator_list -> struct_declarator','struct_declarator_list',1,'p_struct_declarator_list','../pycparser/c_parser.py',876),
('struct_declarator_list -> struct_declarator_list COMMA struct_declarator','struct_declarator_list',3,'p_struct_declarator_list','../pycparser/c_parser.py',877),
('struct_declarator -> declarator','struct_declarator',1,'p_struct_declarator_1','../pycparser/c_parser.py',885),
('struct_declarator -> declarator COLON constant_expression','struct_declarator',3,'p_struct_declarator_2','../pycparser/c_parser.py',890),
('struct_declarator -> COLON constant_expression','struct_declarator',2,'p_struct_declarator_2','../pycparser/c_parser.py',891),
('enum_specifier -> ENUM ID','enum_specifier',2,'p_enum_specifier_1','../pycparser/c_parser.py',899),
('enum_specifier -> ENUM TYPEID','enum_specifier',2,'p_enum_specifier_1','../pycparser/c_parser.py',900),
('enum_specifier -> ENUM brace_open enumerator_list brace_close','enum_specifier',4,'p_enum_specifier_2','../pycparser/c_parser.py',905),
('enum_specifier -> ENUM ID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','../pycparser/c_parser.py',910),
('enum_specifier -> ENUM TYPEID brace_open enumerator_list brace_close','enum_specifier',5,'p_enum_specifier_3','../pycparser/c_parser.py',911),
('enumerator_list -> enumerator','enumerator_list',1,'p_enumerator_list','../pycparser/c_parser.py',916),
('enumerator_list -> enumerator_list COMMA','enumerator_list',2,'p_enumerator_list','../pycparser/c_parser.py',917),
('enumerator_list -> enumerator_list COMMA enumerator','enumerator_list',3,'p_enumerator_list','../pycparser/c_parser.py',918),
('enumerator -> ID','enumerator',1,'p_enumerator','../pycparser/c_parser.py',929),
('enumerator -> ID EQUALS constant_expression','enumerator',3,'p_enumerator','../pycparser/c_parser.py',930),
('declarator -> direct_declarator','declarator',1,'p_declarator_1','../pycparser/c_parser.py',945),
('declarator -> pointer direct_declarator','declarator',2,'p_declarator_2','../pycparser/c_parser.py',950),
('declarator -> pointer TYPEID','declarator',2,'p_declarator_3','../pycparser/c_parser.py',959),
('direct_declarator -> ID','direct_declarator',1,'p_direct_declarator_1','../pycparser/c_parser.py',970),
('direct_declarator -> LPAREN declarator RPAREN','direct_declarator',3,'p_direct_declarator_2','../pycparser/c_parser.py',979),
('direct_declarator -> direct_declarator LBRACKET type_qualifier_list_opt assignment_expression_opt RBRACKET','direct_declarator',5,'p_direct_declarator_3','../pycparser/c_parser.py',984),
('direct_declarator -> direct_declarator LBRACKET STATIC type_qualifier_list_opt assignment_expression RBRACKET','direct_declarator',6,'p_direct_declarator_4','../pycparser/c_parser.py',998),
('direct_declarator -> direct_declarator LBRACKET type_qualifier_list STATIC assignment_expression RBRACKET','direct_declarator',6,'p_direct_declarator_4','../pycparser/c_parser.py',999),
('direct_declarator -> direct_declarator LBRACKET type_qualifier_list_opt TIMES RBRACKET','direct_declarator',5,'p_direct_declarator_5','../pycparser/c_parser.py',1019),
('direct_declarator -> direct_declarator LPAREN parameter_type_list RPAREN','direct_declarator',4,'p_direct_declarator_6','../pycparser/c_parser.py',1030),
('direct_declarator -> direct_declarator LPAREN identifier_list_opt RPAREN','direct_declarator',4,'p_direct_declarator_6','../pycparser/c_parser.py',1031),
('pointer -> TIMES type_qualifier_list_opt','pointer',2,'p_pointer','../pycparser/c_parser.py',1058),
('pointer -> TIMES type_qualifier_list_opt pointer','pointer',3,'p_pointer','../pycparser/c_parser.py',1059),
('type_qualifier_list -> type_qualifier','type_qualifier_list',1,'p_type_qualifier_list','../pycparser/c_parser.py',1088),
('type_qualifier_list -> type_qualifier_list type_qualifier','type_qualifier_list',2,'p_type_qualifier_list','../pycparser/c_parser.py',1089),
('parameter_type_list -> parameter_list','parameter_type_list',1,'p_parameter_type_list','../pycparser/c_parser.py',1094),
('parameter_type_list -> parameter_list COMMA ELLIPSIS','parameter_type_list',3,'p_parameter_type_list','../pycparser/c_parser.py',1095),
('parameter_list -> parameter_declaration','parameter_list',1,'p_parameter_list','../pycparser/c_parser.py',1103),
('parameter_list -> parameter_list COMMA parameter_declaration','parameter_list',3,'p_parameter_list','../pycparser/c_parser.py',1104),
('parameter_declaration -> declaration_specifiers declarator','parameter_declaration',2,'p_parameter_declaration_1','../pycparser/c_parser.py',1113),
('parameter_declaration -> declaration_specifiers abstract_declarator_opt','parameter_declaration',2,'p_parameter_declaration_2','../pycparser/c_parser.py',1124),
('identifier_list -> identifier','identifier_list',1,'p_identifier_list','../pycparser/c_parser.py',1155),
('identifier_list -> identifier_list COMMA identifier','identifier_list',3,'p_identifier_list','../pycparser/c_parser.py',1156),
('initializer -> assignment_expression','initializer',1,'p_initializer_1','../pycparser/c_parser.py',1165),
('initializer -> brace_open initializer_list_opt brace_close','initializer',3,'p_initializer_2','../pycparser/c_parser.py',1170),
('initializer -> brace_open initializer_list COMMA brace_close','initializer',4,'p_initializer_2','../pycparser/c_parser.py',1171),
('initializer_list -> designation_opt initializer','initializer_list',2,'p_initializer_list','../pycparser/c_parser.py',1179),
('initializer_list -> initializer_list COMMA designation_opt initializer','initializer_list',4,'p_initializer_list','../pycparser/c_parser.py',1180),
('designation -> designator_list EQUALS','designation',2,'p_designation','../pycparser/c_parser.py',1191),
('designator_list -> designator','designator_list',1,'p_designator_list','../pycparser/c_parser.py',1199),
('designator_list -> designator_list designator','designator_list',2,'p_designator_list','../pycparser/c_parser.py',1200),
('designator -> LBRACKET constant_expression RBRACKET','designator',3,'p_designator','../pycparser/c_parser.py',1205),
('designator -> PERIOD identifier','designator',2,'p_designator','../pycparser/c_parser.py',1206),
('type_name -> specifier_qualifier_list abstract_declarator_opt','type_name',2,'p_type_name','../pycparser/c_parser.py',1211),
('abstract_declarator -> pointer','abstract_declarator',1,'p_abstract_declarator_1','../pycparser/c_parser.py',1228),
('abstract_declarator -> pointer direct_abstract_declarator','abstract_declarator',2,'p_abstract_declarator_2','../pycparser/c_parser.py',1236),
('abstract_declarator -> direct_abstract_declarator','abstract_declarator',1,'p_abstract_declarator_3','../pycparser/c_parser.py',1241),
('direct_abstract_declarator -> LPAREN abstract_declarator RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_1','../pycparser/c_parser.py',1251),
('direct_abstract_declarator -> direct_abstract_declarator LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_2','../pycparser/c_parser.py',1255),
('direct_abstract_declarator -> LBRACKET assignment_expression_opt RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_3','../pycparser/c_parser.py',1266),
('direct_abstract_declarator -> direct_abstract_declarator LBRACKET TIMES RBRACKET','direct_abstract_declarator',4,'p_direct_abstract_declarator_4','../pycparser/c_parser.py',1275),
('direct_abstract_declarator -> LBRACKET TIMES RBRACKET','direct_abstract_declarator',3,'p_direct_abstract_declarator_5','../pycparser/c_parser.py',1286),
('direct_abstract_declarator -> direct_abstract_declarator LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',4,'p_direct_abstract_declarator_6','../pycparser/c_parser.py',1295),
('direct_abstract_declarator -> LPAREN parameter_type_list_opt RPAREN','direct_abstract_declarator',3,'p_direct_abstract_declarator_7','../pycparser/c_parser.py',1305),
('block_item -> declaration','block_item',1,'p_block_item','../pycparser/c_parser.py',1316),
('block_item -> statement','block_item',1,'p_block_item','../pycparser/c_parser.py',1317),
('block_item_list -> block_item','block_item_list',1,'p_block_item_list','../pycparser/c_parser.py',1324),
('block_item_list -> block_item_list block_item','block_item_list',2,'p_block_item_list','../pycparser/c_parser.py',1325),
('compound_statement -> brace_open block_item_list_opt brace_close','compound_statement',3,'p_compound_statement_1','../pycparser/c_parser.py',1331),
('labeled_statement -> ID COLON statement','labeled_statement',3,'p_labeled_statement_1','../pycparser/c_parser.py',1337),
('labeled_statement -> CASE constant_expression COLON statement','labeled_statement',4,'p_labeled_statement_2','../pycparser/c_parser.py',1341),
('labeled_statement -> DEFAULT COLON statement','labeled_statement',3,'p_labeled_statement_3','../pycparser/c_parser.py',1345),
('selection_statement -> IF LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement_1','../pycparser/c_parser.py',1349),
('selection_statement -> IF LPAREN expression RPAREN statement ELSE statement','selection_statement',7,'p_selection_statement_2','../pycparser/c_parser.py',1353),
('selection_statement -> SWITCH LPAREN expression RPAREN statement','selection_statement',5,'p_selection_statement_3','../pycparser/c_parser.py',1357),
('iteration_statement -> WHILE LPAREN expression RPAREN statement','iteration_statement',5,'p_iteration_statement_1','../pycparser/c_parser.py',1362),
('iteration_statement -> DO statement WHILE LPAREN expression RPAREN SEMI','iteration_statement',7,'p_iteration_statement_2','../pycparser/c_parser.py',1366),
('iteration_statement -> FOR LPAREN expression_opt SEMI expression_opt SEMI expression_opt RPAREN statement','iteration_statement',9,'p_iteration_statement_3','../pycparser/c_parser.py',1370),
('iteration_statement -> FOR LPAREN declaration expression_opt SEMI expression_opt RPAREN statement','iteration_statement',8,'p_iteration_statement_4','../pycparser/c_parser.py',1374),
('jump_statement -> GOTO ID SEMI','jump_statement',3,'p_jump_statement_1','../pycparser/c_parser.py',1379),
('jump_statement -> BREAK SEMI','jump_statement',2,'p_jump_statement_2','../pycparser/c_parser.py',1383),
('jump_statement -> CONTINUE SEMI','jump_statement',2,'p_jump_statement_3','../pycparser/c_parser.py',1387),
('jump_statement -> RETURN expression SEMI','jump_statement',3,'p_jump_statement_4','../pycparser/c_parser.py',1391),
('jump_statement -> RETURN SEMI','jump_statement',2,'p_jump_statement_4','../pycparser/c_parser.py',1392),
('expression_statement -> expression_opt SEMI','expression_statement',2,'p_expression_statement','../pycparser/c_parser.py',1397),
('expression -> assignment_expression','expression',1,'p_expression','../pycparser/c_parser.py',1404),
('expression -> expression COMMA assignment_expression','expression',3,'p_expression','../pycparser/c_parser.py',1405),
('typedef_name -> TYPEID','typedef_name',1,'p_typedef_name','../pycparser/c_parser.py',1417),
('assignment_expression -> conditional_expression','assignment_expression',1,'p_assignment_expression','../pycparser/c_parser.py',1421),
('assignment_expression -> unary_expression assignment_operator assignment_expression','assignment_expression',3,'p_assignment_expression','../pycparser/c_parser.py',1422),
('assignment_operator -> EQUALS','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1435),
('assignment_operator -> XOREQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1436),
('assignment_operator -> TIMESEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1437),
('assignment_operator -> DIVEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1438),
('assignment_operator -> MODEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1439),
('assignment_operator -> PLUSEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1440),
('assignment_operator -> MINUSEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1441),
('assignment_operator -> LSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1442),
('assignment_operator -> RSHIFTEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1443),
('assignment_operator -> ANDEQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1444),
('assignment_operator -> OREQUAL','assignment_operator',1,'p_assignment_operator','../pycparser/c_parser.py',1445),
('constant_expression -> conditional_expression','constant_expression',1,'p_constant_expression','../pycparser/c_parser.py',1450),
('conditional_expression -> binary_expression','conditional_expression',1,'p_conditional_expression','../pycparser/c_parser.py',1454),
('conditional_expression -> binary_expression CONDOP expression COLON conditional_expression','conditional_expression',5,'p_conditional_expression','../pycparser/c_parser.py',1455),
('binary_expression -> cast_expression','binary_expression',1,'p_binary_expression','../pycparser/c_parser.py',1463),
('binary_expression -> binary_expression TIMES binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1464),
('binary_expression -> binary_expression DIVIDE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1465),
('binary_expression -> binary_expression MOD binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1466),
('binary_expression -> binary_expression PLUS binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1467),
('binary_expression -> binary_expression MINUS binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1468),
('binary_expression -> binary_expression RSHIFT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1469),
('binary_expression -> binary_expression LSHIFT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1470),
('binary_expression -> binary_expression LT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1471),
('binary_expression -> binary_expression LE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1472),
('binary_expression -> binary_expression GE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1473),
('binary_expression -> binary_expression GT binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1474),
('binary_expression -> binary_expression EQ binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1475),
('binary_expression -> binary_expression NE binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1476),
('binary_expression -> binary_expression AND binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1477),
('binary_expression -> binary_expression OR binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1478),
('binary_expression -> binary_expression XOR binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1479),
('binary_expression -> binary_expression LAND binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1480),
('binary_expression -> binary_expression LOR binary_expression','binary_expression',3,'p_binary_expression','../pycparser/c_parser.py',1481),
('cast_expression -> unary_expression','cast_expression',1,'p_cast_expression_1','../pycparser/c_parser.py',1489),
('cast_expression -> LPAREN type_name RPAREN cast_expression','cast_expression',4,'p_cast_expression_2','../pycparser/c_parser.py',1493),
('unary_expression -> postfix_expression','unary_expression',1,'p_unary_expression_1','../pycparser/c_parser.py',1497),
('unary_expression -> PLUSPLUS unary_expression','unary_expression',2,'p_unary_expression_2','../pycparser/c_parser.py',1501),
('unary_expression -> MINUSMINUS unary_expression','unary_expression',2,'p_unary_expression_2','../pycparser/c_parser.py',1502),
('unary_expression -> unary_operator cast_expression','unary_expression',2,'p_unary_expression_2','../pycparser/c_parser.py',1503),
('unary_expression -> SIZEOF unary_expression','unary_expression',2,'p_unary_expression_3','../pycparser/c_parser.py',1508),
('unary_expression -> SIZEOF LPAREN type_name RPAREN','unary_expression',4,'p_unary_expression_3','../pycparser/c_parser.py',1509),
('unary_operator -> AND','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1517),
('unary_operator -> TIMES','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1518),
('unary_operator -> PLUS','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1519),
('unary_operator -> MINUS','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1520),
('unary_operator -> NOT','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1521),
('unary_operator -> LNOT','unary_operator',1,'p_unary_operator','../pycparser/c_parser.py',1522),
('postfix_expression -> primary_expression','postfix_expression',1,'p_postfix_expression_1','../pycparser/c_parser.py',1527),
('postfix_expression -> postfix_expression LBRACKET expression RBRACKET','postfix_expression',4,'p_postfix_expression_2','../pycparser/c_parser.py',1531),
('postfix_expression -> postfix_expression LPAREN argument_expression_list RPAREN','postfix_expression',4,'p_postfix_expression_3','../pycparser/c_parser.py',1535),
('postfix_expression -> postfix_expression LPAREN RPAREN','postfix_expression',3,'p_postfix_expression_3','../pycparser/c_parser.py',1536),
('postfix_expression -> postfix_expression PERIOD ID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1541),
('postfix_expression -> postfix_expression PERIOD TYPEID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1542),
('postfix_expression -> postfix_expression ARROW ID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1543),
('postfix_expression -> postfix_expression ARROW TYPEID','postfix_expression',3,'p_postfix_expression_4','../pycparser/c_parser.py',1544),
('postfix_expression -> postfix_expression PLUSPLUS','postfix_expression',2,'p_postfix_expression_5','../pycparser/c_parser.py',1550),
('postfix_expression -> postfix_expression MINUSMINUS','postfix_expression',2,'p_postfix_expression_5','../pycparser/c_parser.py',1551),
('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list brace_close','postfix_expression',6,'p_postfix_expression_6','../pycparser/c_parser.py',1556),
('postfix_expression -> LPAREN type_name RPAREN brace_open initializer_list COMMA brace_close','postfix_expression',7,'p_postfix_expression_6','../pycparser/c_parser.py',1557),
('primary_expression -> identifier','primary_expression',1,'p_primary_expression_1','../pycparser/c_parser.py',1562),
('primary_expression -> constant','primary_expression',1,'p_primary_expression_2','../pycparser/c_parser.py',1566),
('primary_expression -> unified_string_literal','primary_expression',1,'p_primary_expression_3','../pycparser/c_parser.py',1570),
('primary_expression -> unified_wstring_literal','primary_expression',1,'p_primary_expression_3','../pycparser/c_parser.py',1571),
('primary_expression -> LPAREN expression RPAREN','primary_expression',3,'p_primary_expression_4','../pycparser/c_parser.py',1576),
('primary_expression -> OFFSETOF LPAREN type_name COMMA identifier RPAREN','primary_expression',6,'p_primary_expression_5','../pycparser/c_parser.py',1580),
('argument_expression_list -> assignment_expression','argument_expression_list',1,'p_argument_expression_list','../pycparser/c_parser.py',1588),
('argument_expression_list -> argument_expression_list COMMA assignment_expression','argument_expression_list',3,'p_argument_expression_list','../pycparser/c_parser.py',1589),
('identifier -> ID','identifier',1,'p_identifier','../pycparser/c_parser.py',1598),
('constant -> INT_CONST_DEC','constant',1,'p_constant_1','../pycparser/c_parser.py',1602),
('constant -> INT_CONST_OCT','constant',1,'p_constant_1','../pycparser/c_parser.py',1603),
('constant -> INT_CONST_HEX','constant',1,'p_constant_1','../pycparser/c_parser.py',1604),
('constant -> INT_CONST_BIN','constant',1,'p_constant_1','../pycparser/c_parser.py',1605),
('constant -> FLOAT_CONST','constant',1,'p_constant_2','../pycparser/c_parser.py',1611),
('constant -> HEX_FLOAT_CONST','constant',1,'p_constant_2','../pycparser/c_parser.py',1612),
('constant -> CHAR_CONST','constant',1,'p_constant_3','../pycparser/c_parser.py',1618),
('constant -> WCHAR_CONST','constant',1,'p_constant_3','../pycparser/c_parser.py',1619),
('unified_string_literal -> STRING_LITERAL','unified_string_literal',1,'p_unified_string_literal','../pycparser/c_parser.py',1630),
('unified_string_literal -> unified_string_literal STRING_LITERAL','unified_string_literal',2,'p_unified_string_literal','../pycparser/c_parser.py',1631),
('unified_wstring_literal -> WSTRING_LITERAL','unified_wstring_literal',1,'p_unified_wstring_literal','../pycparser/c_parser.py',1641),
('unified_wstring_literal -> unified_wstring_literal WSTRING_LITERAL','unified_wstring_literal',2,'p_unified_wstring_literal','../pycparser/c_parser.py',1642),
('brace_open -> LBRACE','brace_open',1,'p_brace_open','../pycparser/c_parser.py',1652),
('brace_close -> RBRACE','brace_close',1,'p_brace_close','../pycparser/c_parser.py',1657),
('empty -> <empty>','empty',0,'p_empty','../pycparser/c_parser.py',1662),
]
| apache-2.0 |
Elandril/Sick-Beard | cherrypy/process/win32.py | 35 | 6047 | """Windows service. Requires pywin32."""
import os
import win32api
import win32con
import win32event
import win32service
import win32serviceutil
from cherrypy.process import wspbus, plugins
class ConsoleCtrlHandler(plugins.SimplePlugin):
"""A WSPBus plugin for handling Win32 console events (like Ctrl-C)."""
def __init__(self, bus):
self.is_set = False
plugins.SimplePlugin.__init__(self, bus)
def start(self):
if self.is_set:
self.bus.log('Handler for console events already set.', level=40)
return
result = win32api.SetConsoleCtrlHandler(self.handle, 1)
if result == 0:
self.bus.log('Could not SetConsoleCtrlHandler (error %r)' %
win32api.GetLastError(), level=40)
else:
self.bus.log('Set handler for console events.', level=40)
self.is_set = True
def stop(self):
if not self.is_set:
self.bus.log('Handler for console events already off.', level=40)
return
try:
result = win32api.SetConsoleCtrlHandler(self.handle, 0)
except ValueError:
# "ValueError: The object has not been registered"
result = 1
if result == 0:
self.bus.log('Could not remove SetConsoleCtrlHandler (error %r)' %
win32api.GetLastError(), level=40)
else:
self.bus.log('Removed handler for console events.', level=40)
self.is_set = False
def handle(self, event):
"""Handle console control events (like Ctrl-C)."""
if event in (win32con.CTRL_C_EVENT, win32con.CTRL_LOGOFF_EVENT,
win32con.CTRL_BREAK_EVENT, win32con.CTRL_SHUTDOWN_EVENT,
win32con.CTRL_CLOSE_EVENT):
self.bus.log('Console event %s: shutting down bus' % event)
# Remove self immediately so repeated Ctrl-C doesn't re-call it.
try:
self.stop()
except ValueError:
pass
self.bus.exit()
# 'First to return True stops the calls'
return 1
return 0
class Win32Bus(wspbus.Bus):
"""A Web Site Process Bus implementation for Win32.
Instead of time.sleep, this bus blocks using native win32event objects.
"""
def __init__(self):
self.events = {}
wspbus.Bus.__init__(self)
def _get_state_event(self, state):
"""Return a win32event for the given state (creating it if needed)."""
try:
return self.events[state]
except KeyError:
event = win32event.CreateEvent(None, 0, 0,
"WSPBus %s Event (pid=%r)" %
(state.name, os.getpid()))
self.events[state] = event
return event
def _get_state(self):
return self._state
def _set_state(self, value):
self._state = value
event = self._get_state_event(value)
win32event.PulseEvent(event)
state = property(_get_state, _set_state)
def wait(self, state, interval=0.1, channel=None):
"""Wait for the given state(s), KeyboardInterrupt or SystemExit.
Since this class uses native win32event objects, the interval
argument is ignored.
"""
if isinstance(state, (tuple, list)):
# Don't wait for an event that beat us to the punch ;)
if self.state not in state:
events = tuple([self._get_state_event(s) for s in state])
win32event.WaitForMultipleObjects(events, 0, win32event.INFINITE)
else:
# Don't wait for an event that beat us to the punch ;)
if self.state != state:
event = self._get_state_event(state)
win32event.WaitForSingleObject(event, win32event.INFINITE)
class _ControlCodes(dict):
"""Control codes used to "signal" a service via ControlService.
User-defined control codes are in the range 128-255. We generally use
the standard Python value for the Linux signal and add 128. Example:
>>> signal.SIGUSR1
10
control_codes['graceful'] = 128 + 10
"""
def key_for(self, obj):
"""For the given value, return its corresponding key."""
for key, val in self.items():
if val is obj:
return key
raise ValueError("The given object could not be found: %r" % obj)
control_codes = _ControlCodes({'graceful': 138})
def signal_child(service, command):
if command == 'stop':
win32serviceutil.StopService(service)
elif command == 'restart':
win32serviceutil.RestartService(service)
else:
win32serviceutil.ControlService(service, control_codes[command])
class PyWebService(win32serviceutil.ServiceFramework):
"""Python Web Service."""
_svc_name_ = "Python Web Service"
_svc_display_name_ = "Python Web Service"
_svc_deps_ = None # sequence of service names on which this depends
_exe_name_ = "pywebsvc"
_exe_args_ = None # Default to no arguments
# Only exists on Windows 2000 or later, ignored on windows NT
_svc_description_ = "Python Web Service"
def SvcDoRun(self):
from cherrypy import process
process.bus.start()
process.bus.block()
def SvcStop(self):
from cherrypy import process
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
process.bus.exit()
def SvcOther(self, control):
process.bus.publish(control_codes.key_for(control))
if __name__ == '__main__':
win32serviceutil.HandleCommandLine(PyWebService)
| gpl-3.0 |
joshgabriel/MPInterfaces | mpinterfaces/calibrate.py | 1 | 60446 | # coding: utf-8
# Copyright (c) Henniggroup.
# Distributed under the terms of the MIT License.
from __future__ import division, print_function, unicode_literals, \
absolute_import
"""
Calibration module:
This module contains the classes for
1. Calibrate: Base class for specifying the parameters for
calibration and setting up the VASP jobs in directory
structure according to
2. CalibrateBulk: calibrating a periodic bulk structure,
3. CalibrateSlab: creates a slab of given crystallographic facet,
thickness and vacuum spacing,
3. CalibrateMolecule: creates a molecule in a box
4. CalibrateInterface: calibrates an interface composed of slab plus
molecule
The attribute turn_knobs controls the parameters to be calibrated
for a given structure
"""
from six.moves import range
import sys
import os
import re
import datetime
from itertools import product
from collections import OrderedDict
import numpy as np
from pymatgen import Lattice
from pymatgen.core.structure import Structure
from pymatgen.core.surface import SlabGenerator
from pymatgen.io.vasp.inputs import Incar, Poscar
from pymatgen.io.vasp.inputs import Potcar, Kpoints
from pymatgen.io.vasp.outputs import Outcar, Vasprun
from pymatgen.symmetry.bandstructure import HighSymmKpath
from custodian.custodian import Custodian
from monty.json import MSONable, MontyEncoder
from monty.serialization import dumpfn
from mpinterfaces.instrument import MPINTVaspInputSet, MPINTVaspJob
from mpinterfaces.interface import Interface, Ligand
from mpinterfaces.utils import get_ase_slab, get_magmom_string, get_magmom_afm, \
get_magmom_mae, get_magmom_init, print_exception,get_defo_structure
from mpinterfaces.mat2d.electronic_structure import get_2D_hse_kpoints,\
get_2D_incar_hse_prep, get_2D_incar_hse
from mpinterfaces.default_logger import get_default_logger
__author__ = "Kiran Mathew, Joshua J. Gabriel"
__copyright__ = "Copyright 2017, Henniggroup"
__maintainer__ = "Joshua J. Gabriel"
__email__ = "[email protected]"
__status__ = "Production"
__date__ = "March 3, 2017"
logger = get_default_logger(__name__)
class Calibrate(MSONable):
"""
The base class for creating vasp work flows for
calibrating the input parameters for different systems
A wrapper around Custodian
"""
LOG_FILE = "calibrate.json"
def __init__(self, incar, poscar, potcar, kpoints, system=None,
is_matrix=False, Grid_type='A',
parent_job_dir='.', job_dir='Job',
qadapter=None, job_cmd='qsub', wait=True,
mappings_override=None, pseudopotential="PBE",
database=None, magnetism=None, mag_init=None, reuse=None,
reuse_override=None, reuse_incar=None, solvation=None,
turn_knobs=OrderedDict([('ENCUT', []),
('KPOINTS', [])]),
checkpoint_file=None, finer_kpoint=None, cal_logger=None,
test=False,incar_remove=None):
"""
Calibrate constructor
Args:
incar (Incar object): input INCAR
poscar (Poscar object): input POSCAR
potcar (Potcar object): input POTCAR
kpoints (Kpoints object): input KPOINTS
system: system info as a dictionary,
slab or interface example:
system={'hkl':[1,1,1], 'ligand':None},
is_matrix (bool): whether the jobs are dependent on each
other
Grid_type (str): kpoints grid_type
parent_job_dir (str): the directory from which all the
jobs are launched
job_dir (str): job directory created for each job in the
parent_job_dir
qadapter (?): adapter for the batch system
job_cmd (str): command to be used for submitting the job. If
qadapter is specified then job_cmd is ignored
wait (bool): whther to wait for the job to finish. If the job is
being submitted to the queue then there is no need for
waiting
turn_knobs (dict): an ordered dictionary of parmaters and the
corresponding values
mappings_override (dict): override symbol mapping in potcar
eg:- {'S':'S_sv'}
pseudopotential (str): pseudopotential set
database (str): A work in progress, will be a database_name.yaml
file for defaults specific to a database workflow
that will have defaults for
INCAR: cutoff, convergence for relaxation and
continuation jobs
KPOINTS: for relaxation, band structure jobs
POTCAR: database specific
For now defaults to None, if set to 'twod'
activates twod set of directives
reuse (list or bool): list of filenames for reuse
Eg: ['CHGCAR', 'WAVECAR']
'CONTCAR' is copied by default and if found empty
warning is issued. Use the following flag for override
only if you know what you are doing
'True' for just copying the CONTCAR file
reuse_override (bool): whether to override the missing CONTCAR for a
reuse calc
magnetism (str): specifies magnetism calculation to be used
implemented are 'AntiFerroMagnetism' and
'Magntic Anisotropy Energy'
solvation (bool): whether to activate a solvation job, sets LSOL=True
for now
Calibrate jobs represent the engine configuration of mpinterfaces,
where the fuel (input file sources) and driving method (kind of calculation)
are decided . The Engine itself is instrument.py which creates the input set
configured in Calibrate.
Current fueling methods:
1. simplest test case involving a single job:
- specify the incar, kpoints, poscar, potcar (aka the VASP 4)
explicitly as pymatgen objects
- turn_knobs = {} , is_matrix = False
2. test case for calibration of parameters:
- specify an initial configuration for the VASP 4
- specify parameters to calibrate via turn_knobs,
set is_matrix = True only if number of parameters > 1
3. Database production case: (possibly most used)
- specify initial configuration for the VASP 4 based on
a database.yaml
- specify an input.yaml that details the workflow
Note: input structure if needed will be obtained from the
provided poscar object
"""
self.name = datetime.datetime.now().isoformat()
self.system = system
self.parent_job_dir = os.path.abspath(parent_job_dir)
self.job_dir = job_dir
self.incar = incar
self.poscar = poscar
self.potcar = potcar
self.test = test
if poscar:
if mappings_overload:
maps = [mappings_overload[s] for s in poscar.site_symbols]
else:
maps = poscar.site_symbols
self.potcar = Potcar(symbols=maps,
functional=pseudopotential)
self.kpoints = kpoints
if incar:
self.incar_orig = incar.as_dict()
if poscar:
self.poscar_orig = poscar.as_dict()
if self.potcar:
self.potcar_orig = self.potcar.as_dict()
if kpoints:
self.kpoints_orig = kpoints.as_dict()
self.qadapter = qadapter
self.job_dir_list = []
self.jobs = []
self.job_ids = []
self.handlers = []
self.job_cmd = job_cmd
self.n_atoms = 0
self.turn_knobs = turn_knobs
self.response_to_knobs = {}
self.sorted_response_to_knobs = {}
for k, v in turn_knobs.items():
self.response_to_knobs[k] = {}
self.sorted_response_to_knobs[k] = {}
self.is_matrix = is_matrix
self.Grid_type = Grid_type
self.wait = wait
self.cal_log = []
self.mappings_override = mappings_override
self.database = database
self.magnetism = magnetism
self.mag_init = mag_init
self.solvation = solvation
self.reuse = reuse
self.reuse_incar = reuse_incar
self.incar_remove = incar_remove
self.reuse_override = reuse_override
self.reuse_paths = None # list object communicated to instrument
self.finer_kpoint = finer_kpoint
self.pseudopotential = pseudopotential
self.checkpoint_file = checkpoint_file
if cal_logger:
self.logger = cal_logger
else:
self.logger = logger
def setup(self):
"""
set up the jobs for the given turn_knobs dict
is_matrix = True implies that the params in the dict are
interrelated. Otherwise calcs corresponding to each dict key
is independent
"""
if self.is_matrix:
self.setup_matrix_job()
else:
self._setup()
def _setup(self, turn_knobs=None):
"""
invoke the set up methods corresponding to the dict keys
any key other than KPOINTS, VOLUME and POTCAR are treated
as INCAR parameters
Args:
turn_knobs: knobs aka paramters to be tuned
Note: poscar jobs setup through the VOLUME is only for
backward compatibility, use POSCAR key in the
turn_knobs to tune poscars
"""
if turn_knobs is None:
turn_knobs = self.turn_knobs
if any(turn_knobs.values()):
for k, v in turn_knobs.items():
if k == 'POSCAR' and v:
if 'VOLUME' in list(self.turn_knobs.keys()):
self.setup_poscar_jobs(scale_list=self.turn_knobs['VOLUME'],\
poscar_list=v)
#del self.turn_knobs['VOLUME']
elif 'STRAINS' in list(self.turn_knobs.keys()):
self.setup_poscar_jobs(scale_list=self.turn_knobs['STRAINS'],\
poscar_list=v)
del self.turn_knobs['STRAINS']
else:
self.setup_poscar_jobs(poscar_list=v)
elif k == 'KPOINTS' and v:
self.setup_kpoints_jobs(kpoints_list=v)
elif k == 'VOLUME' and v:
self.setup_poscar_jobs(scale_list=v)
elif k == 'STRAINS' and v:
self.setup_poscar_jobs(scale_list=v)
elif k == 'POTCAR' and v:
self.setup_potcar_jobs(mappings=v, functional_list=None)
elif k == 'POTCAR_pseudopotential' and v:
self.setup_potcar_jobs(mappings=None,functional_list=v)
elif k == 'FUNCTIONAL' and v:
self.setup_incar_jobs(k,v)
else:
self.setup_incar_jobs(k, v)
else:
self.logger.warn('knobs not set, running a single job')
self.add_job(name='single_job', job_dir=self.job_dir)
def setup_matrix_job(self):
"""
set up jobs where the dict keys are interrelated
mind: its an ordered dict, the order in which the keys
are specified determines the nested directory structure
"""
orig_job_dir = self.job_dir
job_dir = self.job_dir
n_items = len(list(self.turn_knobs.items()))
keys = list(self.turn_knobs.keys())
#print (keys)
if 'POSCAR' in keys and 'STRAINS' in keys and len(keys)==2:
self._setup(turn_knobs=dict([('POSCAR',
self.turn_knobs['POSCAR'])]))
else:
#print ('Else here', keys[0])
#self._setup(turn_knobs=dict([(keys[0],
# self.turn_knobs[keys[0]])]))
self.recursive_jobs(n_items, keys, 0)
# restore
self.job_dir = orig_job_dir
def recursive_jobs(self, n, keys, i):
"""
recursively setup the jobs: used by setup_matrix_job
Args:
n: total number of knobs aka parameters to be tuned
keys: list of knobs i.e parameter names
i: ith knob
"""
#### Testing ####
# Orig
# job_dir = self.job_dir + os.sep + self.key_to_name(keys[i])
#print (n)
#print (i)
#print (self.job_dir)
#print (keys[i])
try:
job_dir = '__'.join(
[self.job_dir.split('/')[-1], self.key_to_name(keys[i])])
except:
job_dir = '__'.join(
[self.job_dir.split('__')[-1], self.key_to_name(keys[i])])
#### Testing ####
if i == n - 1 and i != 0:
for val in self.turn_knobs[keys[i]]:
##
## self.job_dir = job_dir + os.sep + self.val_to_name(val)
self.job_dir = '__'.join([job_dir, self.val_to_name(val)])
self.logger.info(
'setting jobs in the directory: ' + self.job_dir)
self._setup(turn_knobs=dict([(keys[i], [val])]))
#print ('add job for ',self.job_dir, val, keys[i])
self.add_job(name=job_dir, job_dir=self.job_dir)
elif i==0 and len(list(self.turn_knobs.keys()))==1:
## should be case iff when POSCAR and poscar transform are called together
for val in self.turn_knobs[keys[i]]:
self.job_dir = '__'.join([job_dir, self.val_to_name(val)])
self.logger.info(
'setting jobs in the directory: ' + self.job_dir)
self._setup(turn_knobs=dict([(keys[i], [val])]))
#self.add_job(name=job_dir, job_dir=self.job_dir)
else:
for val in self.turn_knobs[keys[i]]:
##
## self.job_dir = job_dir + os.sep + self.val_to_name(val)
#print ('comes here with',self.job_dir)
self.job_dir = '__'.join([job_dir, self.val_to_name(val)])
self.logger.info(
'setting jobs in the directory: ' + self.job_dir)
self._setup(turn_knobs=dict([(keys[i], [val])]))
self.recursive_jobs(n, keys, i + 1)
def key_to_name(self, key):
"""
convenient string mapping for the keys in the turn_knobs dict
Args:
key: key to the knob dict
Returns:
an appropriate string representation of the key so that
the name doesnt clash with the filenames
"""
if key == 'KPOINTS':
return 'KPTS'
elif key == 'POTCAR_map' or key == 'POTCAR_pseudopotential':
return 'POT'
elif key == 'POSCAR':
return 'POS'
else:
return key
def val_to_name(self, val):
"""
convert a value to a string so that it can be used for naming
the job directory
the decimal points in floats are replaced with underscore
character
if the value is of type list, kpoint_to_name method is used
since
only kpoint values are expected to be of type list
if the values is of type dict then potcar_to_name method is
invoked
Args:
val: knob value to be converted into an appropriate string
representation
Returns:
a string filename for the value
"""
if isinstance(val, float):
return re.sub('\.', '_', str(val))
elif isinstance(val, list):
return self.kpoint_to_name(val, 'M')
elif isinstance(val, dict):
return self.potcar_to_name(mapping=val)
elif isinstance(val, Poscar):
name = ''.join((val.comment).split()) + '_'+ \
str(val.structure.composition.reduced_formula)
#+ '_' + str(int(val.structure.lattice.volume))
return name.replace('\\', '_').replace('(', '_').replace(')', '_')
else:
return str(val)
def kpoint_to_name(self, kpoint, grid_type):
"""
get a string representation for the given kpoint
Args:
kpoint: an iterable
grid_type: grid_type used for the KPOINTS
Returns:
string representation for kpoint eg: Monkhorst Pack
2 2 2 will be named 2x2x2
"""
if grid_type == 'M' or grid_type == 'G':
kpoint = [str(k).replace('.', '_') for k in kpoint]
return 'x'.join(kpoint)
else:
return str(kpoint)
def potcar_to_name(self, mapping=None, pseudopotential=None):
"""
convert a symbol mapping and pseudopotential to a name that
can be used for setting up the potcar jobs
Args:
mapping: example:- if mapping = {'Pt':'Pt_pv',
'Si':'Si_GW'} then the name will be PBE_Pt_pv_Si_GW
with self.pseudopotential="PBE"
Returns:
string
"""
if mapping:
l = [v for k, v in mapping.items()]
return '_'.join(l)
elif functional:
return '_'.join(functional)
else:
return '_'.join(self.pseudopotential)
def set_incar(self, param, val):
"""
set the incar paramter, param = val
"""
print (param, val)
self.incar[param] = val
#print (self.incar)
def set_functional(self,val):
"""
"""
if val == 'PBE':
print ('PBE')
self.logger.info('setting PBE as functional')
elif val == 'PBEsol':
print ('PS')
self.logger.info('setting PBEsol as functional')
func_dict = {'GGA':'PS'}
self.incar.update(func_dict)
elif val == 'vdW-OPTB88':
print ('vdW')
self.logger.info('setting vdW-OPTB88 as functional')
func_dict = {\
'AGGAC': 0.0,
'GGA': 'BO',
'LUSE_VDW': True,
'PARAM1': 0.1833333333,
'PARAM2': 0.22}
self.incar.update(func_dict)
elif val == 'SCAN':
print ('SCAN')
self.logger.info('setting vdW-OPTB88 as functional')
func_dict = {\
'METAGGA': 'SCAN'}
self.incar.update(func_dict)
print (self.incar)
def set_poscar(self, scale=None, poscar=None):
"""
perturbs given structure by volume scaling factor
or takes user defined variants of Poscar
Args:
scale : Volume Scaling parameter
poscar : Poscar object of user defined structure
set the poscar: volume scaled by the scale factor
"""
if scale is not None:
try:
structure = self.poscar.structure
except:
print (print_exception())
structure = Poscar.from_dict(self.poscar_orig).structure
# case 1 volume scaling of poscar
#try:
if type(scale)!=list:
volume = structure.volume
structure.scale_lattice(scale * volume)
self.poscar=Poscar(structure,comment='Volume_{}'.format(scale))
elif scale[0]=='N11' or scale[0]=='N22':
self.poscar=\
get_defo_structure(structure,strain=scale[1],strain_direction=scale[0])
#except:
# print (print_exception())
# sys.exit()
elif poscar is not None:
self.poscar = poscar
def set_potcar(self, mapping=None, pseudopotential='PBE'):
"""
set the potcar: symbol to potcar type mapping
"""
symbols = self.poscar.site_symbols
mapped_symbols = []
if mapping:
for sym in symbols:
mapped_symbols.append(mapping[sym])
elif self.mappings_override:
for sym in symbols:
if sym in list(self.mappings_override.keys()):
mapped_symbols.append(self.mappings_override[sym])
else:
mapped_symbols.append(sym)
else:
mapped_symbols = symbols
if pseudopotential in ['PBE','LDA']:
func = pseudopotential
else:
func = self.pseudopotential
print ('setting potcar from', mapped_symbols)
self.potcar = Potcar(symbols=mapped_symbols,
functional=func)
def set_kpoints(self, kpoint=None, poscar=None, ibzkpth=None):
"""
set the kpoint
"""
# useful to check if a poscar is supplied from setup_poscar_jobs (most often the case)
# or this is a single poscar use case
if not poscar:
poscar = self.poscar
# splitting into two if elif branches means fewer if statements to check on
# a run
# Most general method of setting the k-points for
# different grid types
# NOTE: requires that at least one k-points value be passed
# as a turn - knobs list value
# this is not true for values that may be caculated out of
# a database
# use this part only if this is a non-database run for example
# for k-points calibration
if not self.database:
if self.Grid_type == 'M':
self.kpoints = Kpoints.monkhorst_automatic(kpts=kpoint)
elif self.Grid_type == 'A':
self.kpoints = Kpoints.automatic(subdivisions=kpoint)
elif self.Grid_type == 'G':
self.kpoints = Kpoints.gamma_automatic(kpts=kpoint)
elif self.Grid_type == '3D_vol':
self.kpoints = Kpoints.automatic_density_by_vol(structure=poscar.structure,
kppvol=kpoint)
elif self.Grid_type == 'bulk_bands_pbe':
self.kpoints = Kpoints.automatic_linemode(divisions=kpoint,
ibz=HighSymmKpath(
poscar.structure))
elif self.Grid_type == 'D':
self.kpoints = Kpoints.automatic_density(structure=poscar.structure,kppa=kpoint)
elif self.Grid_type == 'Finer_G_Mesh':
# kpoint is the scaling factor and self.kpoints is the old kpoint mesh
self.logger.info('Setting Finer G Mesh for {0} by scale {1}'.
format(kpoint, self.finer_kpoint))
self.kpoints = Kpoints.gamma_automatic(kpts = \
[i * self.finer_kpoint for i in kpoint])
self.logger.info('Finished scaling operation of k-mesh')
elif self.Grid_type == 'TwoD':
self.kpoints = Kpoints.automatic_density(structure=poscar.structure,kppa=kpoint)
kpoint_dict = self.kpoints.as_dict()
kpoint_dict['kpoints'][0][2] = 1 # remove z kpoints
self.kpoints = Kpoints.from_dict(kpoint_dict)
elif self.Grid_type == 'DG':
self.kpoints = Kpoints.automatic_gamma_density(structure=poscar.structure,kppa=kpoint)
# applicable for database runs
# future constructs or settinsg can be activated via a yaml file
# database yaml file or better still the input deck from its speification
# decides what combination of input calibrate constructor settings to use
# one of them being the grid_type tag
elif self.database == 'twod':
# set of kpoints settings according to the 2D database profile
# the actual settings of k-points density
# will in future come from any database input file set
if self.Grid_type == 'hse_bands_2D_prep':
kpoint_dict = Kpoints.automatic_gamma_density(poscar.structure,
200).as_dict()
kpoint_dict['kpoints'][0][2] = 1 # remove z kpoints
self.kpoints = Kpoints.from_dict(kpoint_dict)
elif self.Grid_type == 'hse_bands_2D':
# can at most return the path to the correct kpoints file
# needs kpoints to be written out in instrument in a different way
# not using the Kpoints object
self.kpoints = get_2D_hse_kpoints(poscar.structure, ibzkpth)
elif self.Grid_type == 'bands_2D':
kpoint_dict = Kpoints.automatic_linemode(divisions=20,
ibz=HighSymmKpath(poscar.structure)).as_dict()
self.kpoints = Kpoints.from_dict(kpoint_dict)
elif self.Grid_type == 'relax_2D':
# general relaxation settings for 2D
kpoint_dict = Kpoints.automatic_gamma_density(poscar.structure,
1000).as_dict()
kpoint_dict['kpoints'][0][2] = 1
self.kpoints = Kpoints.from_dict(kpoint_dict)
elif self.Grid_type == 'relax_3D':
# general relaxation settings for 3D
kpoint_dict = Kpoints.automatic_gamma_density(
poscar.structure, 1000)
self.kpoints = Kpoints.from_dict(kpoint_dict)
def setup_incar_jobs(self, param, val_list):
"""
set up incar jobs,, calls set_incar to set the value to param
Args:
param: Name of INCAR parameter
val_list: List of values to vary for the param
"""
if val_list != ['2D_default'] and param!='FUNCTIONAL':
for val in val_list:
self.logger.info('setting INCAR parameter ' + param + ' = '
+ str(val))
self.set_incar(param, val)
if not self.is_matrix:
job_dir = self.job_dir + os.sep + \
param + os.sep + self.val_to_name(val)
self.add_job(name=job_dir, job_dir=job_dir)
# print ('add job called')
elif param == 'FUNCTIONAL':
for val in val_list:
self.set_functional(val)
else:
self.logger.warn('incar list empty')
def setup_kpoints_jobs(self, kpoints_list=None):
"""
setup the kpoint jobs
"""
if kpoints_list:
for kpoint in kpoints_list:
self.set_kpoints(kpoint)
if not self.is_matrix:
job_dir = self.job_dir + os.sep + self.key_to_name(
'KPOINTS') \
+ os.sep + self.kpoint_to_name(kpoint,
self.Grid_type)
self.add_job(name=job_dir, job_dir=job_dir)
#print ('add job called')
else:
self.logger.warn('kpoints_list empty')
def setup_poscar_jobs(self, scale_list=None, poscar_list=None):
"""
for scaling the latice vectors of the original structure,
scale_list is volume scaling factor list
"""
incar_init = self.incar
incar_remove = self.incar_remove
if scale_list and not poscar_list:
for scale in scale_list:
self.set_poscar(scale=scale)
self.set_potcar()
if not self.is_matrix:
job_dir = self.job_dir + os.sep + 'POS' + \
os.sep + 'VOLUME_' + str(scale)
self.add_job(name=job_dir, job_dir=job_dir)
#print ('add job called')
elif poscar_list:
if not scale_list:
scale_list = [[1.0]]
for pos in poscar_list:
for n,scale in enumerate(scale_list):
print ('setting volume scaling to cell as ', scale)
# if it is a twod_database run or any general standard database run,
# the incar, kpoints and potcar follow a standard input set
# which will be activated by the twod_database tag set to true
# NOTE: this implementation means that the first turn_knobs tag
# needs to be the poscar objects list
# the database tag will be set to the name of the yaml file with the
# standard input deck definition for that database
# this incar dict provided as the init can be general format
# based on the chosen functional, cutoff
# so self.incar is a vdW incar for re-relaxation in vdW, gga for every
# other calculation or LDA+U for LSDA+U calculations
incar_dict = incar_init
#print (incar_dict)
if self.reuse:
# if this is a true list minimally, ['CONTCAR']
# it is to be ensured that the poscar list is a
# list of paths as opposed to list of poscar objects by the turn knobs
# values
# Here pos is the path and r in each self.reuse is the name of the file(s)
# to be reused
# in a reuse calculation the following are possible:
# update incar (Solvation calculations) or reset incar (HSE calculations)
# reset kpoints file with IBZKPT
# copy a CHGCAR or WAVECAR or both perhaps
try:
# first setup of POSCAR initial, INCAR, KPOINTS
self.poscar = Poscar.from_file(pos + os.sep + 'CONTCAR')
self.set_poscar(scale=scale)
#print ('Transform',scale)
if scale_list[0] == 1.0 and len(scale_list)==1:
self.job_dir = pos.split('/')[-1]
else:
# think that transform is unnecessary
#self.job_dir = pos.split('/')[-1]+'_TRANSFORM_'+\
# str(scale).replace('.','_').replace("[",'').\
# replace("]",'')
self.job_dir = pos.split('/')[-1]+'_'+\
self.kpoint_to_name(scale,grid_type='G')
potcar = Potcar.from_file(pos + os.sep + 'POTCAR').as_dict()
poscar = self.poscar
#kpoints = Kpoints.from_file(pos+os.sep+'KPOINTS')
self.logger.info('Read previous relaxed CONTCAR file from {}'.
format(pos))
# check if it is KPOINTS altering job like HSE
if self.Grid_type == 'hse_bands_2D_prep':
# HSE prep calcualtions
# reset the INCAR file with a magmom only if exists
try:
incar_dict = {
'MAGMOM': get_magmom_string(poscar)}
except:
incar_dict = {}
incar_dict = get_2D_incar_hse_prep(incar_dict)
self.set_kpoints(poscar=poscar)
self.logger.info(
'updated input set for HSE 2D prep calcaultion')
elif self.Grid_type == 'hse_bands_2D':
# HSE calculation
# reset the incar and kpoints file builds
# on the preceding calculations (prep calculation)
# IBZKPT
try:
incar_dict = {
'MAGMOM': get_magmom_string(poscar)}
except:
incar_dict = {}
incar_dict = get_2D_incar_hse(incar_dict)
self.set_kpoints(poscar=poscar,
ibzkpth=pos + os.sep + 'IBZKPT')
self.logger.info('updated input set for HSE calcaultion\
using IBZKPT from {0}'.format(pos + os.sep + 'IBZKPT'))
elif self.Grid_type == 'hse_bands':
# general HSE bands
pass
elif self.Grid_type == 'Finer_G_Mesh':
self.logger.info('updating to Finer G Mesh')
kpoint = Kpoints.from_file(pos+os.sep+'KPOINTS')
self.set_kpoints(kpoint=kpoint.kpts[0])
else:
# use the same kpoints file and build from the old
# incar
self.kpoints = Kpoints.from_file(
pos + os.sep + 'KPOINTS')
# decide on how to use incar, use same one or
# update or afresh
if self.reuse_incar == 'old':
# reuse same incar with no updates done to it
incar_dict = Incar.from_file(
pos + os.sep + 'INCAR').as_dict()
elif self.reuse_incar == 'update':
# reuse same incar but with updates done to it
self.logger.info('updating incar at {}'.format(pos))
incar_dict_init = Incar.from_file(pos + os.sep + 'INCAR')
#print ('Reading INCAR from directory ', incar_dict_init)
#print ('What should be updating', incar_dict)
incar_dict_init.update(incar_dict)
incar_dict = incar_dict_init
#print ('Final update')
#print (incar_dict)
elif self.reuse_incar == 'update_remove':
self.logger.info('updating incar at {}'.format(pos))
incar_dict_init = Incar.from_file(pos + os.sep + 'INCAR')
print (incar_dict_init)
incar_dict_init.update(incar_dict)
for i in self.incar_remove:
print (i)
del incar_dict_init[i]
incar_dict = incar_dict_init
else:
# use a fresh incar as specified by the init
# way to go for example for LDAU or other
# major removals done to INCAR
# but always retain the MAGMOM if present
old_incar_dict = Incar.from_file(
pos + os.sep + 'INCAR').as_dict()
if 'MAGMOM' in old_incar_dict.keys():
incar_dict['MAGMOM'] = old_incar_dict[
'MAGMOM']
else:
incar_dict = incar_dict
if isinstance(self.reuse, list):
# for files to be reused: example CHGCAR, WAVECAR, etc.
reuse_paths = [
pos + os.sep + r for r in self.reuse]
self.reuse_paths = reuse_paths
# Magnetism use cases, updates to be made to the INCAR (MAE)
# and poscar (AFM)
# MAE and AFM
#print ('Here')
if self.magnetism == 'MAE':
# remove vdW tags for MAE calculations
vdW_tags = ('GGA', 'AGGAC', 'LUSE_VDW',
'PARAM1', 'PARAM2')
for key in vdW_tags:
if key in incar_dict:
del incar_dict[key]
self.logger.info(
'updating input set for MAE calculation')
self.mag_init = Outcar(
pos + os.sep + 'OUTCAR').total_mag
nbands = 2 * \
Vasprun(pos + os.sep +
'vasprun.xml').parameters['NBANDS']
# u_value = Vasprun(pos+os.sep+'vasprun.xml').incar['LDAUU']
# u_value = 4.0
self.logger.info(
"updating mag mom with value {0}".format(self.mag_init))
self.logger.info(
"updating NBANDS with {0}".format(nbands))
incar_dict.update({'NBANDS': nbands,
'LSORBIT': True,
'EDIFF': 1e-08,
'ICHARG': 11,
'LMAXMIX': 4,
'LCHARG': False,
'ISYM': 0,
'NSW': 0,
'ISPIN': 2,
'IBRION': -1,
'LORBIT': 11,
'MAGMOM': get_magmom_mae(poscar, self.mag_init)
})
# incar_dict.update({'LDAUU': u_value})
elif self.magnetism == 'AFM':
self.logger.info(
'updating INCAR and POSCAR for AFM calculation')
afm, poscar = get_magmom_afm(poscar, self.database)
self.set_poscar(poscar=poscar)
incar_dict.update({'MAGMOM': afm})
elif self.magnetism == 'Relaxed':
self.logger.info(
'updating INCAR with the total magnetization obtained in the relaxed state')
try:
out_mag = Outcar(
pos + os.sep + 'OUTCAR')
if out_mag.magnetization:
#print ('reading tot')
self.mag_init = [i['tot'] for i in out_mag.magnetization]
else:
#print ('reading total mag')
mag_tot = out_mag.total_mag
self.mag_init = mag_tot/len(poscar.structure.sites)
incar_dict.update({'MAGMOM':get_magmom_init(poscar, self.mag_init)})
except:
self.logger.info('no mag relaxed')
elif self.magnetism == 'Remove':
try:
del incar_dict['MAGMOM']
incar_dict.update({'ISPIN':1})
self.logger.info('Removed magnetism settings')
except:
self.logger.info('No previous magnetism settings')
except:
# check what to do if the previous calculation being reused is not
# actuall done .. system exit or adopt a user override
# with POSCAR
print (print_exception())
self.logger.warn(
'Empty relaxed CONTCAR file .. Probably job not done')
if not self.reuse_override:
self.logger.warn(
'You can set reuse_override to continue with POSCAR file, exiting now ..')
sys.exit(0)
else:
self.logger.info('Using old Poscar for rerun')
poscar = Poscar.from_file(pos + os.sep + 'POSCAR')
self.incar = Incar.from_dict(incar_dict)
# case for non - reuse
else:
poscar = pos
# temporary: magnetism only set if twod flag is activated
if self.database == 'twod':
incar_dict.update(
{'MAGMOM': get_magmom_string(poscar)})
self.set_kpoints(poscar=poscar)
#self.incar = Incar.from_dict(incar_dict)
# Long term solution for magmom initialization
if self.magnetism == 'Init_by_file':
self.logger.info('Updating magmom from input mag_inits.yaml')
if 'LSORBIT' in list(incar_dict.keys()):
magmom = get_magmom_init(poscar,is_spin_orbit=True)
else:
magmom = get_magmom_init(poscar)
incar_dict.update({'MAGMOM': magmom})
elif self.magnetism == 'General_Init':
self.logger.info('Updating magmom with transition metal as 6.0 \
everything else as 0.5')
incar_dict.update(\
{'MAGMOM': get_magmom_string(poscar.structure),\
'ISPIN': 2})
self.incar = Incar.from_dict(incar_dict)
self.poscar = poscar
#self.set_poscar(poscar=poscar)
if not self.reuse:
self.set_potcar()
else:
self.potcar = Potcar.from_dict(potcar)
if not self.is_matrix:
job_dir = self.job_dir + os.sep + 'POS' + \
os.sep + self.val_to_name(poscar)
self.add_job(name=job_dir, job_dir=job_dir)
#elif self.is_matrix and scale_list[0]!=[1.0]:
#print ('adding poscar and volume job')
# self.add_job(name=self.job_dir, job_dir=self.job_dir)
#print ('set job dir', self.job_dir)
def setup_potcar_jobs(self, mappings, functional_list):
"""
take a list of symbol mappings and setup the potcar jobs
"""
if functional_list:
for func in functional_list:
self.set_potcar(pseudopotential=func)
if not self.is_matrix:
job_dir = self.job_dir + os.sep \
+ self.key_to_name('POTCAR') \
+ os.sep + self.potcar_to_name(func)
self.add_job(name=job_dir, job_dir=job_dir)
elif mappings:
for mapping in mappings:
self.set_potcar(mapping)
if not self.is_matrix:
job_dir = self.job_dir + os.sep \
+ self.key_to_name('POTCAR') \
+ os.sep + self.potcar_to_name(mapping)
self.add_job(name=job_dir, job_dir=job_dir)
def add_job(self, name='noname', job_dir='.'):
"""
add a single job using the current incar, poscar, potcar and
kpoints
"""
#print ('call add job')
vis = MPINTVaspInputSet(name, self.incar, self.poscar,
self.kpoints, self.potcar,
self.qadapter, vis_logger=self.logger,
reuse_path=self.reuse_paths, test=self.test)
# the job command can be overrridden in the run method
job = MPINTVaspJob(self.job_cmd, name=name, final=True,
parent_job_dir=self.parent_job_dir,
job_dir=job_dir, vis=vis, wait=self.wait,
vjob_logger=self.logger)
self.job_dir_list.append(os.path.abspath(job_dir))
self.jobs.append(job)
def run(self, job_cmd=None):
"""
run the vasp jobs through custodian
if the job list is empty,
run a single job with the initial input set
"""
for j in self.jobs:
if job_cmd is not None:
j.job_cmd = job_cmd
else:
j.job_cmd = self.job_cmd
c_params = {'jobs': [j.as_dict() for j in self.jobs],
'handlers': [h.as_dict() for h in self.handlers],
'max_errors': 5}
c = Custodian(self.handlers, self.jobs, max_errors=5)
c.run()
for j in self.jobs:
self.cal_log.append({"job": j.as_dict(),
'job_id': j.job_id,
"corrections": [],
'final_energy': None})
self.job_ids.append(j.job_id)
if self.checkpoint_file:
dumpfn(self.cal_log, self.checkpoint_file,
cls=MontyEncoder, indent=4)
else:
dumpfn(self.cal_log, Calibrate.LOG_FILE, cls=MontyEncoder,
indent=4)
def as_dict(self):
qadapter = None
system = None
if self.qadapter:
qadapter = self.qadapter.to_dict()
if self.system is not None:
system = self.system
d = dict(incar=self.incar.as_dict(),
poscar=self.poscar.as_dict(),
potcar=self.potcar.as_dict(),
kpoints=self.kpoints.as_dict(),
system=system, is_matrix=self.is_matrix,
Grid_type=self.Grid_type,
parent_job_dir=self.parent_job_dir,
job_dir=self.job_dir,
qadapter=qadapter, job_cmd=self.job_cmd,
wait=self.wait,
turn_knobs=self.turn_knobs,
job_dir_list=self.job_dir_list,
job_ids=self.job_ids)
d["@module"] = self.__class__.__module__
d["@class"] = self.__class__.__name__
# d['calibrate'] = self.__class__.__name__
return d
@classmethod
def from_dict(cls, d):
incar = Incar.from_dict(d["incar"])
poscar = Poscar.from_dict(d["poscar"])
potcar = Potcar.from_dict(d["potcar"])
kpoints = Kpoints.from_dict(d["kpoints"])
cal = Calibrate(incar, poscar, potcar, kpoints,
system=d["system"], is_matrix=d["is_matrix"],
Grid_type=d["Grid_type"],
parent_job_dir=d["parent_job_dir"],
job_dir=d["job_dir"], qadapter=d.get("qadapter"),
job_cmd=d["job_cmd"], wait=d["wait"],
turn_knobs=d["turn_knobs"])
cal.job_dir_list = d["job_dir_list"]
cal.job_ids = d["job_ids"]
return cal
class CalibrateMolecule(Calibrate):
"""
Calibrate paramters for Molecule calculations
"""
def __init__(self, incar, poscar, potcar, kpoints, system=None,
is_matrix=False, Grid_type='A',
parent_job_dir='.',
job_dir='./Molecule', qadapter=None,
job_cmd='qsub', wait=True,
mappings_override=None, pseudopotential="PBE",
turn_knobs={'ENCUT': [], 'KPOINTS': []},
checkpoint_file=None, cal_logger=None):
Calibrate.__init__(self, incar, poscar, potcar, kpoints,
system=system, is_matrix=is_matrix,
Grid_type=Grid_type,
parent_job_dir=parent_job_dir,
job_dir=job_dir, qadapter=qadapter,
job_cmd=job_cmd, wait=wait,
mappings_override=mappings_override,
pseudopotential=pseudopotential,
turn_knobs=turn_knobs,
checkpoint_file=checkpoint_file,
cal_logger=cal_logger)
def setup_kpoints_jobs(self, Grid_type='M',
kpoints_list=None, conv_step=1):
self.logger.warn("Its a molecule ! no need for kpoint convergence")
self.kpoints = Kpoints.monkhorst_automatic(kpts=[1, 1, 1])
return
class CalibrateBulk(Calibrate):
"""
Calibrate parameters for Bulk calculations
"""
def __init__(self, incar, poscar, potcar, kpoints, system=None,
is_matrix=False, Grid_type='A',
parent_job_dir='.',
job_dir='./Bulk', qadapter=None,
job_cmd='qsub', wait=True,
mappings_override=None, pseudopotential="PBE",
turn_knobs={'ENCUT': [], 'KPOINTS': []},
checkpoint_file=None, cal_logger=None, test=False):
Calibrate.__init__(self, incar, poscar, potcar, kpoints,
system=system, is_matrix=is_matrix,
Grid_type=Grid_type,
parent_job_dir=parent_job_dir,
job_dir=job_dir, qadapter=qadapter,
job_cmd=job_cmd, wait=wait,
mappings_override=mappings_override,
pseudopotential=pseudopotential,
turn_knobs=OrderedDict(turn_knobs),
checkpoint_file=checkpoint_file,
cal_logger=cal_logger, test=test)
class CalibrateSlab(Calibrate):
"""
Calibrate paramters for Slab calculations
"""
def __init__(self, incar, poscar, potcar, kpoints, system=None,
is_matrix=False, Grid_type='A',
parent_job_dir='.', job_dir='./Slab',
qadapter=None, job_cmd='qsub', wait=True,
mappings_override=None, pseudopotential="PBE",
turn_knobs={'VACUUM': [], 'THICKNESS': []},
from_ase=False, checkpoint_file=None,
cal_logger=None, test=False):
self.from_ase = from_ase
self.is_matrix = is_matrix
self.system = system
self.input_structure = poscar.structure.copy()
self.slab_setup(turn_knobs=turn_knobs)
Calibrate.__init__(self, incar, poscar, potcar, kpoints,
system=system, is_matrix=is_matrix,
Grid_type=Grid_type,
parent_job_dir=parent_job_dir,
job_dir=job_dir, qadapter=qadapter,
job_cmd=job_cmd, wait=wait,
mappings_override=mappings_override,
pseudopotential=pseudopotential,
turn_knobs=turn_knobs,
checkpoint_file=checkpoint_file,
cal_logger=cal_logger, test=test)
def slab_setup(self, turn_knobs=None):
"""
invoke the set up methods corresponding to the dict keys:
VACUUM and THICKNESS
sets the POSCAR key in the turn_knobs
"""
if turn_knobs is None:
turn_knobs = self.turn_knobs
if any(turn_knobs.values()):
keys = ['VACUUM', 'THICKNESS']
poscar_list = []
if self.is_matrix:
prod_list = [turn_knobs[k] for k in keys]
for params in product(*tuple(prod_list)):
poscar_list.append(self.create_slab(*params))
else:
for k, v in turn_knobs.items():
if k == 'VACUUM' and v:
poscar_list += self.setup_vacuum_jobs(v)
elif k == 'THICKNESS' and v:
poscar_list += self.setup_thickness_jobs(v)
for k in keys:
if turn_knobs.get(k):
del turn_knobs[k]
turn_knobs['POSCAR'] = poscar_list
def setup_vacuum_jobs(self, vacuum_list):
"""
create slabs with the provided vacuum settings
returns list of poscars
"""
return [self.create_slab(vacuum=val) for val in vacuum_list]
def setup_thickness_jobs(self, thickness_list):
"""
create slabs with the provided thickness settings
returns list of poscars
"""
return [self.create_slab(thickness=val) for val in thickness_list]
def create_slab(self, vacuum=12, thickness=10):
"""
set the vacuum spacing, slab thickness and call sd_flags
for top 2 layers
returns the poscar corresponding to the modified structure
"""
strt_structure = self.input_structure.copy()
if self.from_ase:
slab_struct = get_ase_slab(strt_structure, hkl=self.system['hkl'],
min_thick=thickness, min_vac=vacuum)
else:
slab_struct = SlabGenerator(initial_structure=strt_structure,
miller_index=self.system['hkl'],
min_slab_size=thickness,
min_vacuum_size=vacuum,
lll_reduce=False, center_slab=True,
primitive=False).get_slab()
slab_struct.sort()
sd = self.set_sd_flags(slab_struct)
comment = 'VAC' + str(vacuum) + 'THICK' + str(thickness)
return Poscar(slab_struct, comment=comment,
selective_dynamics=sd)
@staticmethod
def set_sd_flags(interface=None, n_layers=2, top=True, bottom=True):
"""
set the relaxation flags for top and bottom layers of interface.
The upper and lower bounds of the z coordinate are determined
based on the slab. All layers above and below the bounds will
be relaxed. This means if there is a ligand on top of the slab,
all of its atoms will also be relaxed.
"""
sd_flags = np.zeros_like(interface.frac_coords)
if isinstance(interface, Interface):
slab = interface.slab
else:
slab = interface
z_coords = interface.frac_coords[:, 2]
z_coords_slab = slab.frac_coords[:, 2]
z_lower_bound = None
z_upper_bound = None
if bottom:
z_lower_bound = np.unique(z_coords_slab)[n_layers - 1]
sd_flags[np.where(z_coords <= z_lower_bound)] = np.ones((1, 3))
if top:
z_upper_bound = np.unique(z_coords_slab)[-n_layers]
sd_flags[np.where(z_coords >= z_upper_bound)] = np.ones((1, 3))
return sd_flags.tolist()
def set_reconstructed_surface(self, sites_to_add):
"""
Append sites as needed for reconstruction TODO
"""
pass
class CalibrateInterface(CalibrateSlab):
"""
Calibrate paramters for interface calculations
"""
def __init__(self, incar, poscar, potcar, kpoints, system=None,
is_matrix=False, Grid_type='A',
parent_job_dir='.', job_dir='./Interface',
qadapter=None, job_cmd='qsub', wait=True,
mappings_override=None, pseudopotential="PBE",
turn_knobs={'VACUUM': [], 'THICKNESS': []},
from_ase=False, checkpoint_file=None,
cal_logger=None):
CalibrateSlab.__init__(self, incar, poscar, potcar, kpoints,
system=system, is_matrix=is_matrix,
Grid_type=Grid_type,
parent_job_dir=parent_job_dir,
job_dir=job_dir, qadapter=qadapter,
job_cmd=job_cmd, wait=wait,
mappings_override=mappings_override,
pseudopotential=pseudopotential,
turn_knobs=turn_knobs,
from_ase=from_ase,
checkpoint_file=checkpoint_file,
cal_logger=cal_logger)
self.interface_setup(turn_knobs=turn_knobs)
def interface_setup(self, turn_knobs=None):
if not self.system.get('ligand'):
return
else:
if turn_knobs is None:
turn_knobs = self.turn_knobs
if any(turn_knobs.values()):
poscar_list = []
poscar_list.append(self.create_interface())
turn_knobs['POSCAR'] = poscar_list
def create_interface(self):
"""
add params that you want to vary
"""
structure = self.input_structure.copy()
iface = sorted(Interface(structure,
hkl=self.system['hkl'],
ligand=Ligand.from_dict(
self.system['ligand']),
from_ase=self.from_ase))
sd = self.set_sd_flags(iface, n_layers=2)
# if there are other paramters that are being varied
# change the comment accordingly
comment = self.system['hkl'] + self.system['ligand']['name']
return Poscar(iface, comment=comment,
selective_dynamics=sd)
if __name__ == '__main__':
# STRUCTURE
a0 = 3.965
lvec = [[0.5, 0.0, 0.5], [0.5, 0.5, 0.0], [0.0, 0.5, 0.5]]
lvec = np.array(lvec) * a0
lattice = Lattice(lvec)
structure = Structure(lattice, ['Pt'], [[0.0, 0.0, 0.0]],
coords_are_cartesian=False)
# INITIAL VASP INPUT SET
incarparams = {'System': 'test',
'ENCUT': 400,
'ISMEAR': 1,
'SIGMA': 0.1,
'EDIFF': 1E-6}
incar = Incar(params=incarparams)
poscar = Poscar(structure, comment='test')
potcar = Potcar(symbols=poscar.site_symbols, functional='PBE',
sym_potcar_map=None)
kpoints = Kpoints.monkhorst_automatic(kpts=(16, 16, 16),
shift=(0, 0, 0))
# CALIBRATION INPUT
system = {'hkl': [1, 1, 1], 'ligand': None}
turn_knobs = OrderedDict([
('SIGMA', [0.025, 0.50]),
('POTCAR', [{'Pt': 'Pt'}, {'Pt': 'Pt_pv'}, {'Pt': 'Pt_GW'}]),
('IBRION', [1, 2, 3]),
('KPOINTS', [k for k in range(20, 40, 10)]),
('ENCUT', list(range(400, 700, 100))),
('VACUUM', [10, 12, 15]),
('THICKNESS', [11])
])
is_matrix = True
job_dir = 'Slab'
job_cmd = ['ls', '-lt']
# SETUP AND RUN
cal = CalibrateSlab(incar, poscar, potcar, kpoints,
system=system,
is_matrix=is_matrix,
job_dir=job_dir,
turn_knobs=turn_knobs, test=True)
cal.setup()
cal.run(job_cmd)
| mit |
tedder/ansible | lib/ansible/modules/web_infrastructure/sophos_utm/utm_proxy_exception.py | 31 | 7499 | #!/usr/bin/python
# Copyright: (c) 2018, Sebastian Schenzel <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = """
---
module: utm_proxy_exception
author:
- Sebastian Schenzel (@RickS-C137)
short_description: Create, update or destroy reverse_proxy exception entry in Sophos UTM
description:
- Create, update or destroy a reverse_proxy exception entry in SOPHOS UTM.
- This module needs to have the REST Ability of the UTM to be activated.
version_added: "2.8"
options:
name:
description:
- The name of the object. Will be used to identify the entry
required: True
type: str
op:
description:
- The operand to be used with the entries of the path parameter
default: 'AND'
choices:
- 'AND'
- 'OR'
required: False
type: str
path:
description:
- The paths the exception in the reverse proxy is defined for
type: list
default: []
required: False
skip_custom_threats_filters:
description:
- A list of threats to be skipped
type: list
default: []
required: False
skip_threats_filter_categories:
description:
- Define which categories of threats are skipped
type: list
default: []
required: False
skipav:
description:
- Skip the Antivirus Scanning
default: False
type: bool
required: False
skipbadclients:
description:
- Block clients with bad reputation
default: False
type: bool
required: False
skipcookie:
description:
- Skip the Cookie Signing check
default: False
type: bool
required: False
skipform:
description:
- Enable form hardening
default: False
type: bool
required: False
skipform_missingtoken:
description:
- Enable form hardening with missing tokens
default: False
type: bool
required: False
skiphtmlrewrite:
description:
- Protection against SQL
default: False
type: bool
required: False
skiptft:
description:
- Enable true file type control
default: False
type: bool
required: False
skipurl:
description:
- Enable static URL hardening
default: False
type: bool
required: False
source:
description:
- Define which categories of threats are skipped
type: list
default: []
required: False
status:
description:
- Status of the exception rule set
default: True
type: bool
required: False
extends_documentation_fragment:
- utm
"""
EXAMPLES = """
- name: Create UTM proxy_exception
utm_proxy_exception:
utm_host: sophos.host.name
utm_token: abcdefghijklmno1234
name: TestExceptionEntry
backend: REF_OBJECT_STRING
state: present
- name: Remove UTM proxy_exception
utm_proxy_exception:
utm_host: sophos.host.name
utm_token: abcdefghijklmno1234
name: TestExceptionEntry
state: absent
"""
RETURN = """
result:
description: The utm object that was created
returned: success
type: complex
contains:
_ref:
description: The reference name of the object
type: string
_locked:
description: Whether or not the object is currently locked
type: boolean
_type:
description: The type of the object
type: string
name:
description: The name of the object
type: string
comment:
description: The optional comment string
op:
description: The operand to be used with the entries of the path parameter
type: string
path:
description: The paths the exception in the reverse proxy is defined for
type: array
skip_custom_threats_filters:
description: A list of threats to be skipped
type: array
skip_threats_filter_categories:
description: Define which categories of threats are skipped
type: array
skipav:
description: Skip the Antivirus Scanning
type: bool
skipbadclients:
description: Block clients with bad reputation
type: bool
skipcookie:
description: Skip the Cookie Signing check
type: bool
skipform:
description: Enable form hardening
type: bool
skipform_missingtoken:
description: Enable form hardening with missing tokens
type: bool
skiphtmlrewrite:
description: Protection against SQL
type: bool
skiptft:
description: Enable true file type control
type: bool
skipurl:
description: Enable static URL hardening
type: bool
source:
description: Define which categories of threats are skipped
type: array
"""
from ansible.module_utils.utm_utils import UTM, UTMModule
from ansible.module_utils._text import to_native
def main():
endpoint = "reverse_proxy/exception"
key_to_check_for_changes = ["op", "path", "skip_custom_threats_filters", "skip_threats_filter_categories", "skipav",
"comment", "skipbadclients", "skipcookie", "skipform", "status", "skipform_missingtoken",
"skiphtmlrewrite", "skiptft", "skipurl", "source"]
module = UTMModule(
argument_spec=dict(
name=dict(type='str', required=True),
op=dict(type='str', required=False, default='AND', choices=['AND', 'OR']),
path=dict(type='list', elements='string', required=False, default=[]),
skip_custom_threats_filters=dict(type='list', elements='string', required=False, default=[]),
skip_threats_filter_categories=dict(type='list', elements='string', required=False, default=[]),
skipav=dict(type='bool', required=False, default=False),
skipbadclients=dict(type='bool', required=False, default=False),
skipcookie=dict(type='bool', required=False, default=False),
skipform=dict(type='bool', required=False, default=False),
skipform_missingtoken=dict(type='bool', required=False, default=False),
skiphtmlrewrite=dict(type='bool', required=False, default=False),
skiptft=dict(type='bool', required=False, default=False),
skipurl=dict(type='bool', required=False, default=False),
source=dict(type='list', elements='string', required=False, default=[]),
status=dict(type='bool', required=False, default=True),
)
)
try:
UTM(module, endpoint, key_to_check_for_changes).execute()
except Exception as e:
module.fail_json(msg=to_native(e))
if __name__ == '__main__':
main()
| gpl-3.0 |
Pexego/odoo | openerp/tools/amount_to_text_en.py | 441 | 5103 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import logging
from translate import _
_logger = logging.getLogger(__name__)
#-------------------------------------------------------------
#ENGLISH
#-------------------------------------------------------------
to_19 = ( 'Zero', 'One', 'Two', 'Three', 'Four', 'Five', 'Six',
'Seven', 'Eight', 'Nine', 'Ten', 'Eleven', 'Twelve', 'Thirteen',
'Fourteen', 'Fifteen', 'Sixteen', 'Seventeen', 'Eighteen', 'Nineteen' )
tens = ( 'Twenty', 'Thirty', 'Forty', 'Fifty', 'Sixty', 'Seventy', 'Eighty', 'Ninety')
denom = ( '',
'Thousand', 'Million', 'Billion', 'Trillion', 'Quadrillion',
'Quintillion', 'Sextillion', 'Septillion', 'Octillion', 'Nonillion',
'Decillion', 'Undecillion', 'Duodecillion', 'Tredecillion', 'Quattuordecillion',
'Sexdecillion', 'Septendecillion', 'Octodecillion', 'Novemdecillion', 'Vigintillion' )
def _convert_nn(val):
"""convert a value < 100 to English.
"""
if val < 20:
return to_19[val]
for (dcap, dval) in ((k, 20 + (10 * v)) for (v, k) in enumerate(tens)):
if dval + 10 > val:
if val % 10:
return dcap + '-' + to_19[val % 10]
return dcap
def _convert_nnn(val):
"""
convert a value < 1000 to english, special cased because it is the level that kicks
off the < 100 special case. The rest are more general. This also allows you to
get strings in the form of 'forty-five hundred' if called directly.
"""
word = ''
(mod, rem) = (val % 100, val // 100)
if rem > 0:
word = to_19[rem] + ' Hundred'
if mod > 0:
word += ' '
if mod > 0:
word += _convert_nn(mod)
return word
def english_number(val):
if val < 100:
return _convert_nn(val)
if val < 1000:
return _convert_nnn(val)
for (didx, dval) in ((v - 1, 1000 ** v) for v in range(len(denom))):
if dval > val:
mod = 1000 ** didx
l = val // mod
r = val - (l * mod)
ret = _convert_nnn(l) + ' ' + denom[didx]
if r > 0:
ret = ret + ', ' + english_number(r)
return ret
def amount_to_text(number, currency):
number = '%.2f' % number
units_name = currency
list = str(number).split('.')
start_word = english_number(int(list[0]))
end_word = english_number(int(list[1]))
cents_number = int(list[1])
cents_name = (cents_number > 1) and 'Cents' or 'Cent'
return ' '.join(filter(None, [start_word, units_name, (start_word or units_name) and (end_word or cents_name) and 'and', end_word, cents_name]))
#-------------------------------------------------------------
# Generic functions
#-------------------------------------------------------------
_translate_funcs = {'en' : amount_to_text}
#TODO: we should use the country AND language (ex: septante VS soixante dix)
#TODO: we should use en by default, but the translation func is yet to be implemented
def amount_to_text(nbr, lang='en', currency='euro'):
""" Converts an integer to its textual representation, using the language set in the context if any.
Example::
1654: thousands six cent cinquante-quatre.
"""
import openerp.loglevels as loglevels
# if nbr > 10000000:
# _logger.warning(_("Number too large '%d', can not translate it"))
# return str(nbr)
if not _translate_funcs.has_key(lang):
_logger.warning(_("no translation function found for lang: '%s'"), lang)
#TODO: (default should be en) same as above
lang = 'en'
return _translate_funcs[lang](abs(nbr), currency)
if __name__=='__main__':
from sys import argv
lang = 'nl'
if len(argv) < 2:
for i in range(1,200):
print i, ">>", int_to_text(i, lang)
for i in range(200,999999,139):
print i, ">>", int_to_text(i, lang)
else:
print int_to_text(int(argv[1]), lang)
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
cloudbase/cinder | cinder/tests/unit/volume/drivers/emc/scaleio/test_consistencygroups.py | 6 | 10098 | # Copyright (c) 2013 - 2016 EMC Corporation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import json
import mock
from cinder import context
from cinder.tests.unit.consistencygroup import fake_consistencygroup
from cinder.tests.unit import fake_constants as fake
from cinder.tests.unit import fake_snapshot
from cinder.tests.unit import fake_volume
from cinder.tests.unit.volume.drivers.emc import scaleio
from cinder.tests.unit.volume.drivers.emc.scaleio import mocks
class TestConsistencyGroups(scaleio.TestScaleIODriver):
"""Test cases for ``ScaleIODriver consistency groups support``"""
def setUp(self):
"""Setup a test case environment.
Creates a fake volume object and sets up the required API responses.
"""
super(TestConsistencyGroups, self).setUp()
self.ctx = context.RequestContext('fake', 'fake', auth_token=True)
self.consistency_group = (
fake_consistencygroup.fake_consistencyobject_obj(
self.ctx, **{'id': fake.CONSISTENCY_GROUP_ID}))
fake_volume1 = fake_volume.fake_volume_obj(
self.ctx,
**{'id': fake.VOLUME_ID, 'provider_id': fake.PROVIDER_ID})
fake_volume2 = fake_volume.fake_volume_obj(
self.ctx,
**{'id': fake.VOLUME2_ID, 'provider_id': fake.PROVIDER2_ID})
fake_volume3 = fake_volume.fake_volume_obj(
self.ctx,
**{'id': fake.VOLUME3_ID, 'provider_id': fake.PROVIDER3_ID})
fake_volume4 = fake_volume.fake_volume_obj(
self.ctx,
**{'id': fake.VOLUME4_ID, 'provider_id': fake.PROVIDER4_ID})
self.volumes = [fake_volume1, fake_volume2]
self.volumes2 = [fake_volume3, fake_volume4]
fake_snapshot1 = fake_snapshot.fake_snapshot_obj(
self.ctx,
**{'id': fake.SNAPSHOT_ID, 'volume_id': fake.VOLUME_ID,
'volume': fake_volume1})
fake_snapshot2 = fake_snapshot.fake_snapshot_obj(
self.ctx,
**{'id': fake.SNAPSHOT2_ID, 'volume_id': fake.VOLUME2_ID, 'volume':
fake_volume2})
self.snapshots = [fake_snapshot1, fake_snapshot2]
self.snapshot_reply = json.dumps({
'volumeIdList': ['sid1', 'sid2'],
'snapshotGroupId': 'sgid1'})
self.HTTPS_MOCK_RESPONSES = {
self.RESPONSE_MODE.Valid: {
'instances/Volume::{}/action/removeVolume'.format(
fake_volume1['provider_id']
): fake_volume1['provider_id'],
'instances/Volume::{}/action/removeVolume'.format(
fake_volume2['provider_id']
): fake_volume2['provider_id'],
'instances/Volume::{}/action/removeMappedSdc'.format(
fake_volume1['provider_id']
): fake_volume1['provider_id'],
'instances/Volume::{}/action/removeMappedSdc'.format(
fake_volume2['provider_id']
): fake_volume2['provider_id'],
'instances/System/action/snapshotVolumes':
self.snapshot_reply,
},
self.RESPONSE_MODE.BadStatus: {
'instances/Volume::{}/action/removeVolume'.format(
fake_volume1['provider_id']
): mocks.MockHTTPSResponse(
{
'errorCode': 401,
'message': 'BadStatus Volume Test',
}, 401
),
'instances/Volume::{}/action/removeVolume'.format(
fake_volume2['provider_id']
): mocks.MockHTTPSResponse(
{
'errorCode': 401,
'message': 'BadStatus Volume Test',
}, 401
),
'instances/System/action/snapshotVolumes':
self.BAD_STATUS_RESPONSE
},
}
def _fake_cgsnapshot(self):
cgsnap = {'id': 'cgsid', 'name': 'testsnap',
'consistencygroup_id': fake.CONSISTENCY_GROUP_ID,
'status': 'available'}
return cgsnap
def test_create_consistencygroup(self):
result = self.driver.create_consistencygroup(self.ctx,
self.consistency_group)
self.assertEqual('available', result['status'])
def test_delete_consistencygroup_valid(self):
self.set_https_response_mode(self.RESPONSE_MODE.Valid)
self.driver.configuration.set_override(
'sio_unmap_volume_before_deletion',
override=True)
result_model_update, result_volumes_update = (
self.driver.delete_consistencygroup(self.ctx,
self.consistency_group,
self.volumes))
self.assertTrue(all(volume['status'] == 'deleted' for volume in
result_volumes_update))
self.assertEqual('deleted', result_model_update['status'])
def test_delete_consistency_group_fail(self):
self.set_https_response_mode(self.RESPONSE_MODE.BadStatus)
result_model_update, result_volumes_update = (
self.driver.delete_consistencygroup(self.ctx,
self.consistency_group,
self.volumes))
self.assertTrue(any(volume['status'] == 'error_deleting' for volume in
result_volumes_update))
self.assertIn(result_model_update['status'],
['error_deleting', 'error'])
def test_create_consistencygroup_from_cg(self):
self.set_https_response_mode(self.RESPONSE_MODE.Valid)
result_model_update, result_volumes_model_update = (
self.driver.create_consistencygroup_from_src(
self.ctx, self.consistency_group, self.volumes2,
source_cg=self.consistency_group, source_vols=self.volumes))
self.assertEqual('available', result_model_update['status'])
get_pid = lambda snapshot: snapshot['provider_id']
volume_provider_list = list(map(get_pid, result_volumes_model_update))
self.assertListEqual(volume_provider_list, ['sid1', 'sid2'])
def test_create_consistencygroup_from_cgs(self):
self.snapshots[0]['provider_id'] = fake.PROVIDER_ID
self.snapshots[1]['provider_id'] = fake.PROVIDER2_ID
self.set_https_response_mode(self.RESPONSE_MODE.Valid)
result_model_update, result_volumes_model_update = (
self.driver.create_consistencygroup_from_src(
self.ctx, self.consistency_group, self.volumes2,
cgsnapshot=self._fake_cgsnapshot(),
snapshots=self.snapshots))
self.assertEqual('available', result_model_update['status'])
get_pid = lambda snapshot: snapshot['provider_id']
volume_provider_list = list(map(get_pid, result_volumes_model_update))
self.assertListEqual(['sid1', 'sid2'], volume_provider_list)
@mock.patch('cinder.objects.snapshot')
@mock.patch('cinder.objects.snapshot')
def test_create_cgsnapshots(self, snapshot1, snapshot2):
type(snapshot1).volume = mock.PropertyMock(
return_value=self.volumes[0])
type(snapshot2).volume = mock.PropertyMock(
return_value=self.volumes[1])
snapshots = [snapshot1, snapshot2]
self.set_https_response_mode(self.RESPONSE_MODE.Valid)
result_model_update, result_snapshot_model_update = (
self.driver.create_cgsnapshot(
self.ctx,
self._fake_cgsnapshot(),
snapshots
))
self.assertEqual('available', result_model_update['status'])
self.assertTrue(all(snapshot['status'] == 'available' for snapshot in
result_snapshot_model_update))
get_pid = lambda snapshot: snapshot['provider_id']
snapshot_provider_list = list(map(get_pid,
result_snapshot_model_update))
self.assertListEqual(['sid1', 'sid2'], snapshot_provider_list)
@mock.patch('cinder.objects.snapshot')
@mock.patch('cinder.objects.snapshot')
def test_delete_cgsnapshots(self, snapshot1, snapshot2):
type(snapshot1).volume = mock.PropertyMock(
return_value=self.volumes[0])
type(snapshot2).volume = mock.PropertyMock(
return_value=self.volumes[1])
type(snapshot1).provider_id = mock.PropertyMock(
return_value=fake.PROVIDER_ID)
type(snapshot2).provider_id = mock.PropertyMock(
return_value=fake.PROVIDER2_ID)
snapshots = [snapshot1, snapshot2]
self.set_https_response_mode(self.RESPONSE_MODE.Valid)
result_model_update, result_snapshot_model_update = (
self.driver.delete_cgsnapshot(
self.ctx,
self._fake_cgsnapshot(),
snapshots
))
self.assertEqual('deleted', result_model_update['status'])
self.assertTrue(all(snapshot['status'] == 'deleted' for snapshot in
result_snapshot_model_update))
| apache-2.0 |
zsiki/ulyxes | pyapi/tcpiface.py | 1 | 4981 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
.. module:: tcpiface.py
:platform: Unix, Windows
:synopsis: Ulyxes - an open source project to drive total stations and
publish observation results. GPL v2.0 license Copyright (C)
2010- Zoltan Siki <[email protected]>.
.. moduleauthor:: Bence Turak <[email protected]>
"""
import sys
import socket
import logging
import re
from iface import Iface
class TCPIface(Iface):
"""Interface to communicate on TCP/IP protocol. This class requires socket.
:param name: name of tcp interface (str)
:param address: address of server (tuple) (ip(str), port(int))
:param bufSize: size of buffer in case of file (int)
:param timeout: communication timeout seconds (int), default 15
"""
def __init__(self, name, address, bufSize = 1024, timeout=15):
""" Constructor for TCP socket interface
"""
super(TCPIface, self).__init__(name)
self.sock = None
self.bufSize = None
# open socket
self.Open(address, bufSize, timeout)
def __del__(self):
""" Destructor for TCP socket interface
"""
self.Close()
def Open(self, address, bufSize = 1024, timeout=15):
""" Open TCP socket
"""
try:
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.connect(address)
self.sock.settimeout(timeout)
self.bufSize = bufSize
self.opened = True
self.state = self.IF_OK
except Exception:
self.opened = False
self.state = self.IF_ERROR
logging.error(" cannot open TCP socket")
def Close(self):
""" Close TCP socket
"""
try:
self.sock.close()
self.opened = False
self.state = self.IF_OK
except Exception:
self.state = self.IF_ERROR
logging.error(" cannot close TCP socet")
def GetLine(self, fileSize = None):
""" read from TCP interface until end of line
:param fileSize: the size of the expected file (int)
:returns: line read from TCP (str) or empty string on timeout or error, state is set also
"""
if self.sock is None or not self.opened or self.state != self.IF_OK:
logging.error(" TCP socket not opened")
return None
# read answer till end of line
ans = b''
a = b''
try:
if fileSize != None:
a = self.sock.recv(self.bufSize)
ans += a
while sys.getsizeof(ans) < fileSize + 17:
l = sys.getsizeof(ans)
a = self.sock.recv(self.bufSize)
ans += a
else:
a = self.sock.recv(1)
ans += a
while a != b'\n':
a = self.sock.recv(1)
ans += a
except Exception as e:
#self.state = self.IF_READ
logging.error(" cannot read TCP socket")
if ans == b'':
# timeout exit loop
#self.state = self.IF_TIMEOUT
logging.error(" timeout on TCP socket")
# remove end of line
logging.debug(" message got: %s", ans)
ans = ans.strip(b'\n')
return ans
def PutLine(self, msg):
""" send message through the TCP socket
:param msg: message to send (str)
:returns: 0 - on OK, -1 on error or interface is in error state
"""
# do nothing if interface is in error state
if self.sock is None or not self.opened or self.state != self.IF_OK:
logging.error(" TCP socket not opened or in error state")
return -1
# add CR/LF to message end
if (msg[-1:] != '\n'):
msg += '\n'
# remove special characters
msg = msg.encode('ascii', 'ignore')
# send message to socket
logging.debug(" message sent: %s", msg)
try:
self.sock.send(msg)
except Exception:
self.state = self.IF_WRITE
logging.error(" cannot write tcp")
return -1
return 0
def Send(self, msg):
""" send message to TCP socket and read answer
:param msg: message to send, it can be multipart message separated by '|' (str)
:returns: answer from server (str)
"""
msglist = re.split("\|", msg)
res = b''
# sending
for m in msglist:
if self.PutLine(m) == 0:
res += self.GetLine() + b"|"
if res.endswith(b"|"):
res = res[:-1]
res = res.decode('ascii')
return res
if __name__ == "__main__":
a = TCPIface('test', ('127.0.0.1', 80), 1024, 15)
print (a.GetName())
print (a.GetState())
print (a.Send('GET /index.html HTTP/1.1'))
| gpl-2.0 |
akosel/servo | components/script/dom/bindings/codegen/parser/tests/test_incomplete_types.py | 276 | 1731 | import WebIDL
def WebIDLTest(parser, harness):
parser.parse("""
interface TestIncompleteTypes {
attribute FooInterface attr1;
FooInterface method1(FooInterface arg);
};
interface FooInterface {
};
""")
results = parser.finish()
harness.ok(True, "TestIncompleteTypes interface parsed without error.")
harness.check(len(results), 2, "Should be two productions.")
iface = results[0]
harness.ok(isinstance(iface, WebIDL.IDLInterface),
"Should be an IDLInterface")
harness.check(iface.identifier.QName(), "::TestIncompleteTypes", "Interface has the right QName")
harness.check(iface.identifier.name, "TestIncompleteTypes", "Interface has the right name")
harness.check(len(iface.members), 2, "Expect 2 members")
attr = iface.members[0]
harness.ok(isinstance(attr, WebIDL.IDLAttribute),
"Should be an IDLAttribute")
method = iface.members[1]
harness.ok(isinstance(method, WebIDL.IDLMethod),
"Should be an IDLMethod")
harness.check(attr.identifier.QName(), "::TestIncompleteTypes::attr1",
"Attribute has the right QName")
harness.check(attr.type.name, "FooInterface",
"Previously unresolved type has the right name")
harness.check(method.identifier.QName(), "::TestIncompleteTypes::method1",
"Attribute has the right QName")
(returnType, args) = method.signatures()[0]
harness.check(returnType.name, "FooInterface",
"Previously unresolved type has the right name")
harness.check(args[0].type.name, "FooInterface",
"Previously unresolved type has the right name")
| mpl-2.0 |
nave91/dbt | dbt/compilation.py | 1 | 7958 | import itertools
import os
import json
from collections import OrderedDict, defaultdict
import sqlparse
import dbt.project
import dbt.utils
import dbt.include
import dbt.tracking
from dbt.utils import get_materialization, NodeType, is_type
from dbt.linker import Linker
import dbt.compat
import dbt.context.runtime
import dbt.contracts.project
import dbt.exceptions
import dbt.flags
import dbt.loader
from dbt.contracts.graph.compiled import CompiledNode, CompiledGraph
from dbt.clients.system import write_json
from dbt.logger import GLOBAL_LOGGER as logger
graph_file_name = 'graph.gpickle'
manifest_file_name = 'manifest.json'
def print_compile_stats(stats):
names = {
NodeType.Model: 'models',
NodeType.Test: 'tests',
NodeType.Archive: 'archives',
NodeType.Analysis: 'analyses',
NodeType.Macro: 'macros',
NodeType.Operation: 'operations',
NodeType.Seed: 'seed files',
}
results = {k: 0 for k in names.keys()}
results.update(stats)
stat_line = ", ".join(
["{} {}".format(ct, names.get(t)) for t, ct in results.items()])
logger.info("Found {}".format(stat_line))
def _add_prepended_cte(prepended_ctes, new_cte):
for dct in prepended_ctes:
if dct['id'] == new_cte['id']:
dct['sql'] = new_cte['sql']
return
prepended_ctes.append(new_cte)
def _extend_prepended_ctes(prepended_ctes, new_prepended_ctes):
for new_cte in new_prepended_ctes:
_add_prepended_cte(prepended_ctes, new_cte)
def prepend_ctes(model, manifest):
model, _, manifest = recursively_prepend_ctes(model, manifest)
return (model, manifest)
def recursively_prepend_ctes(model, manifest):
if model.extra_ctes_injected:
return (model, model.extra_ctes, manifest)
if dbt.flags.STRICT_MODE:
# ensure that all the nodes in this manifest are compiled
CompiledGraph(**manifest.to_flat_graph())
prepended_ctes = []
for cte in model.extra_ctes:
cte_id = cte['id']
cte_to_add = manifest.nodes.get(cte_id)
cte_to_add, new_prepended_ctes, manifest = recursively_prepend_ctes(
cte_to_add, manifest)
_extend_prepended_ctes(prepended_ctes, new_prepended_ctes)
new_cte_name = '__dbt__CTE__{}'.format(cte_to_add.get('name'))
sql = ' {} as (\n{}\n)'.format(new_cte_name, cte_to_add.compiled_sql)
_add_prepended_cte(prepended_ctes, {'id': cte_id, 'sql': sql})
model.prepend_ctes(prepended_ctes)
manifest.nodes[model.unique_id] = model
return (model, prepended_ctes, manifest)
class Compiler(object):
def __init__(self, project):
self.project = project
def initialize(self):
dbt.clients.system.make_directory(self.project['target-path'])
dbt.clients.system.make_directory(self.project['modules-path'])
def compile_node(self, node, manifest):
logger.debug("Compiling {}".format(node.get('unique_id')))
data = node.to_dict()
data.update({
'compiled': False,
'compiled_sql': None,
'extra_ctes_injected': False,
'extra_ctes': [],
'injected_sql': None,
})
compiled_node = CompiledNode(**data)
context = dbt.context.runtime.generate(
compiled_node, self.project, manifest)
compiled_node.compiled_sql = dbt.clients.jinja.get_rendered(
node.get('raw_sql'),
context,
node)
compiled_node.compiled = True
injected_node, _ = prepend_ctes(compiled_node, manifest)
should_wrap = {NodeType.Test, NodeType.Analysis, NodeType.Operation}
if injected_node.resource_type in should_wrap:
# data tests get wrapped in count(*)
# TODO : move this somewhere more reasonable
if 'data' in injected_node.tags and \
is_type(injected_node, NodeType.Test):
injected_node.wrapped_sql = (
"select count(*) from (\n{test_sql}\n) sbq").format(
test_sql=injected_node.injected_sql)
else:
# don't wrap schema tests or analyses.
injected_node.wrapped_sql = injected_node.injected_sql
elif is_type(injected_node, NodeType.Archive):
# unfortunately we do everything automagically for
# archives. in the future it'd be nice to generate
# the SQL at the parser level.
pass
elif(is_type(injected_node, NodeType.Model) and
get_materialization(injected_node) == 'ephemeral'):
pass
else:
injected_node.wrapped_sql = None
return injected_node
def write_manifest_file(self, manifest):
"""Write the manifest file to disk.
manifest should be a Manifest.
"""
filename = manifest_file_name
manifest_path = os.path.join(self.project['target-path'], filename)
write_json(manifest_path, manifest.serialize())
def write_graph_file(self, linker):
filename = graph_file_name
graph_path = os.path.join(self.project['target-path'], filename)
linker.write_graph(graph_path)
def link_node(self, linker, node, manifest):
linker.add_node(node.unique_id)
linker.update_node_data(
node.unique_id,
node.to_dict())
for dependency in node.depends_on_nodes:
if manifest.nodes.get(dependency):
linker.dependency(
node.unique_id,
(manifest.nodes.get(dependency).unique_id))
else:
dbt.exceptions.dependency_not_found(node, dependency)
def link_graph(self, linker, manifest):
for node in manifest.nodes.values():
self.link_node(linker, node, manifest)
cycle = linker.find_cycles()
if cycle:
raise RuntimeError("Found a cycle: {}".format(cycle))
def get_all_projects(self):
root_project = self.project.cfg
all_projects = {root_project.get('name'): root_project}
dependency_projects = dbt.utils.dependency_projects(self.project)
for project in dependency_projects:
name = project.cfg.get('name', 'unknown')
all_projects[name] = project.cfg
if dbt.flags.STRICT_MODE:
dbt.contracts.project.ProjectList(**all_projects)
return all_projects
def _check_resource_uniqueness(cls, manifest):
names_resources = {}
alias_resources = {}
for resource, node in manifest.nodes.items():
if node.resource_type not in NodeType.refable():
continue
name = node.name
alias = "{}.{}".format(node.schema, node.alias)
existing_node = names_resources.get(name)
if existing_node is not None:
dbt.exceptions.raise_duplicate_resource_name(
existing_node, node)
existing_alias = alias_resources.get(alias)
if existing_alias is not None:
dbt.exceptions.raise_ambiguous_alias(
existing_alias, node)
names_resources[name] = node
alias_resources[alias] = node
def compile(self):
linker = Linker()
all_projects = self.get_all_projects()
manifest = dbt.loader.GraphLoader.load_all(self.project, all_projects)
self.write_manifest_file(manifest)
self._check_resource_uniqueness(manifest)
self.link_graph(linker, manifest)
stats = defaultdict(int)
for node_name, node in itertools.chain(
manifest.nodes.items(),
manifest.macros.items()):
stats[node.resource_type] += 1
self.write_graph_file(linker)
print_compile_stats(stats)
return manifest, linker
| apache-2.0 |
matehall/Python-koan | python 3/koans/about_dice_project.py | 14 | 1958 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
from runner.koan import *
import random
class DiceSet:
def __init__(self):
self._values = None
@property
def values(self):
return self._values
def roll(self, n):
# Needs implementing!
# Tip: random.randint(min, max) can be used to generate random numbers
pass
class AboutDiceProject(Koan):
def test_can_create_a_dice_set(self):
dice = DiceSet()
self.assertTrue(dice)
def test_rolling_the_dice_returns_a_set_of_integers_between_1_and_6(self):
dice = DiceSet()
dice.roll(5)
self.assertTrue(isinstance(dice.values, list), "should be a list")
self.assertEqual(5, len(dice.values))
for value in dice.values:
self.assertTrue(value >= 1 and value <= 6, "value " + str(value) + " must be between 1 and 6")
def test_dice_values_do_not_change_unless_explicitly_rolled(self):
dice = DiceSet()
dice.roll(5)
first_time = dice.values
second_time = dice.values
self.assertEqual(first_time, second_time)
def test_dice_values_should_change_between_rolls(self):
dice = DiceSet()
dice.roll(5)
first_time = dice.values
dice.roll(5)
second_time = dice.values
self.assertNotEqual(first_time, second_time, \
"Two rolls should not be equal")
# THINK ABOUT IT:
#
# If the rolls are random, then it is possible (although not
# likely) that two consecutive rolls are equal. What would be a
# better way to test this?
def test_you_can_roll_different_numbers_of_dice(self):
dice = DiceSet()
dice.roll(3)
self.assertEqual(3, len(dice.values))
dice.roll(1)
self.assertEqual(1, len(dice.values))
| mit |
rudhir-upretee/Sumo_With_Netsim | docs/tutorial/city_mobil/statistics.py | 6 | 4395 | #!/usr/bin/env python
"""
@file statistics.py
@author Michael Behrisch
@author Daniel Krajzewicz
@date 2008-10-17
@version $Id: statistics.py 12898 2012-10-26 08:58:14Z behrisch $
Collecting statistics for the CityMobil parking lot
SUMO, Simulation of Urban MObility; see http://sumo.sourceforge.net/
Copyright (C) 2008-2012 DLR (http://www.dlr.de/) and contributors
All rights reserved
"""
persons = {}
personsRunning = 0
class Person:
def __init__(self, id, source, target, step):
self.id = id
self.source = source
self.target = target
self.waitStart = step
self.depart = None
self.arrive = None
def personArrived(personID, edge, target, step):
global personsRunning
persons[personID] = Person(personID, edge, target, step)
personsRunning += 1
def personLoaded(personID, step):
persons[personID].depart = step
def personUnloaded(personID, step):
global personsRunning
persons[personID].arrive = step
personsRunning -= 1
def evaluate(forTest=False):
try:
import numpy, math
except ImportError:
print "No numpy available, skipping statistics"
return
waitTimes = []
routeTimes = {}
for person in persons.itervalues():
waitTimes.append(person.depart - person.waitStart)
route = (person.source, person.target)
if not route in routeTimes:
routeTimes[route] = []
routeTimes[route].append(person.arrive - person.depart)
waitArray = numpy.array(waitTimes)
if forTest:
print "waiting time (max, mean, dev):", waitArray.max() < 1000, waitArray.mean() < 1000, math.sqrt(waitArray.var()) < 100
else:
print "waiting time (max, mean, dev):", waitArray.max(), waitArray.mean(), math.sqrt(waitArray.var())
for route, times in sorted(routeTimes.iteritems()):
timeArray = numpy.array(times)
if forTest:
print route, timeArray.max() < 1000, timeArray.mean() < 1000, math.sqrt(timeArray.var()) < 100
else:
print route, timeArray.max(), timeArray.mean(), math.sqrt(timeArray.var())
co2 = 0.
for line in open("aggregated.xml"):
if "cyber" in line:
pos = line.find('CO2_abs="') + 9
if pos >= 9:
endpos = line.find('"', pos)
co2 += float(line[pos:endpos])
if forTest:
print "CO2:", co2 < 10000000
else:
print "CO2:", co2
if __name__ == "__main__":
from pylab import *
stats = open(sys.argv[1])
demand = []
simpleWaitMean = []
agentWaitMean = []
simpleWaitDev = []
agentWaitDev = []
simpleRouteMean = []
agentRouteMean = []
simpleRouteDev = []
agentRouteDev = []
for line in stats:
if "simple" in line:
mean = simpleWaitMean
dev = simpleWaitDev
rmean = simpleRouteMean
rdev = simpleRouteDev
demand.append(int(line.split()[-1]))
if "agent" in line:
mean = agentWaitMean
dev = agentWaitDev
rmean = agentRouteMean
rdev = agentRouteDev
if "waiting" in line:
mean.append(float(line.split()[-2]))
dev.append(float(line.split()[-1]))
if line.startswith("('footmain0to1'"):
rmean.append(float(line.split()[-2]))
rdev.append(float(line.split()[-1]))
stats.close()
figure()
errorbar(demand, simpleWaitMean, simpleWaitDev, lw=2, ms=10, fmt='o', label='standard bus scenario')
errorbar(demand, agentWaitMean, agentWaitDev, lw=2, ms=10, color="red", fmt='o', label='agent controlled cyber cars')
xlim(0, 50)
ylim(0, 3300)
xlabel('Repeater interval (s)')
ylabel('Waiting time (s)')
title('Mean and standard deviation of waiting time')
legend(numpoints=1)
savefig("waitingtime.png")
figure()
errorbar(demand, simpleRouteMean, simpleRouteDev, lw=2, ms=10, fmt='o', label='standard bus scenario')
errorbar(demand, agentRouteMean, agentRouteDev, lw=2, ms=10, color="red", fmt='o', label='agent controlled cyber cars')
xlim(0, 50)
ylim(0, 300)
xlabel('Repeater interval (s)')
ylabel('Travel time (s)')
title('Mean and standard deviation of travel time on the longest route')
legend(numpoints=1)
savefig("traveltime.png")
show()
| gpl-3.0 |
edx/lettuce | tests/integration/lib/Django-1.3/django/core/management/sql.py | 229 | 8259 | import os
import re
from django.conf import settings
from django.core.management.base import CommandError
from django.db import models
from django.db.models import get_models
def sql_create(app, style, connection):
"Returns a list of the CREATE TABLE SQL statements for the given app."
if connection.settings_dict['ENGINE'] == 'django.db.backends.dummy':
# This must be the "dummy" database backend, which means the user
# hasn't set ENGINE for the databse.
raise CommandError("Django doesn't know which syntax to use for your SQL statements,\n" +
"because you haven't specified the ENGINE setting for the database.\n" +
"Edit your settings file and change DATBASES['default']['ENGINE'] to something like\n" +
"'django.db.backends.postgresql' or 'django.db.backends.mysql'.")
# Get installed models, so we generate REFERENCES right.
# We trim models from the current app so that the sqlreset command does not
# generate invalid SQL (leaving models out of known_models is harmless, so
# we can be conservative).
app_models = models.get_models(app, include_auto_created=True)
final_output = []
tables = connection.introspection.table_names()
known_models = set([model for model in connection.introspection.installed_models(tables) if model not in app_models])
pending_references = {}
for model in app_models:
output, references = connection.creation.sql_create_model(model, style, known_models)
final_output.extend(output)
for refto, refs in references.items():
pending_references.setdefault(refto, []).extend(refs)
if refto in known_models:
final_output.extend(connection.creation.sql_for_pending_references(refto, style, pending_references))
final_output.extend(connection.creation.sql_for_pending_references(model, style, pending_references))
# Keep track of the fact that we've created the table for this model.
known_models.add(model)
# Handle references to tables that are from other apps
# but don't exist physically.
not_installed_models = set(pending_references.keys())
if not_installed_models:
alter_sql = []
for model in not_installed_models:
alter_sql.extend(['-- ' + sql for sql in
connection.creation.sql_for_pending_references(model, style, pending_references)])
if alter_sql:
final_output.append('-- The following references should be added but depend on non-existent tables:')
final_output.extend(alter_sql)
return final_output
def sql_delete(app, style, connection):
"Returns a list of the DROP TABLE SQL statements for the given app."
# This should work even if a connection isn't available
try:
cursor = connection.cursor()
except:
cursor = None
# Figure out which tables already exist
if cursor:
table_names = connection.introspection.get_table_list(cursor)
else:
table_names = []
output = []
# Output DROP TABLE statements for standard application tables.
to_delete = set()
references_to_delete = {}
app_models = models.get_models(app, include_auto_created=True)
for model in app_models:
if cursor and connection.introspection.table_name_converter(model._meta.db_table) in table_names:
# The table exists, so it needs to be dropped
opts = model._meta
for f in opts.local_fields:
if f.rel and f.rel.to not in to_delete:
references_to_delete.setdefault(f.rel.to, []).append( (model, f) )
to_delete.add(model)
for model in app_models:
if connection.introspection.table_name_converter(model._meta.db_table) in table_names:
output.extend(connection.creation.sql_destroy_model(model, references_to_delete, style))
# Close database connection explicitly, in case this output is being piped
# directly into a database client, to avoid locking issues.
if cursor:
cursor.close()
connection.close()
return output[::-1] # Reverse it, to deal with table dependencies.
def sql_reset(app, style, connection):
"Returns a list of the DROP TABLE SQL, then the CREATE TABLE SQL, for the given module."
# This command breaks a lot and should be deprecated
import warnings
warnings.warn(
'This command has been deprecated. The command ``sqlflush`` can be used to delete everything. You can also use ALTER TABLE or DROP TABLE statements manually.',
PendingDeprecationWarning
)
return sql_delete(app, style, connection) + sql_all(app, style, connection)
def sql_flush(style, connection, only_django=False):
"""
Returns a list of the SQL statements used to flush the database.
If only_django is True, then only table names that have associated Django
models and are in INSTALLED_APPS will be included.
"""
if only_django:
tables = connection.introspection.django_table_names(only_existing=True)
else:
tables = connection.introspection.table_names()
statements = connection.ops.sql_flush(
style, tables, connection.introspection.sequence_list()
)
return statements
def sql_custom(app, style, connection):
"Returns a list of the custom table modifying SQL statements for the given app."
output = []
app_models = get_models(app)
app_dir = os.path.normpath(os.path.join(os.path.dirname(app.__file__), 'sql'))
for model in app_models:
output.extend(custom_sql_for_model(model, style, connection))
return output
def sql_indexes(app, style, connection):
"Returns a list of the CREATE INDEX SQL statements for all models in the given app."
output = []
for model in models.get_models(app):
output.extend(connection.creation.sql_indexes_for_model(model, style))
return output
def sql_all(app, style, connection):
"Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
return sql_create(app, style, connection) + sql_custom(app, style, connection) + sql_indexes(app, style, connection)
def custom_sql_for_model(model, style, connection):
opts = model._meta
app_dir = os.path.normpath(os.path.join(os.path.dirname(models.get_app(model._meta.app_label).__file__), 'sql'))
output = []
# Post-creation SQL should come before any initial SQL data is loaded.
# However, this should not be done for models that are unmanaged or
# for fields that are part of a parent model (via model inheritance).
if opts.managed:
post_sql_fields = [f for f in opts.local_fields if hasattr(f, 'post_create_sql')]
for f in post_sql_fields:
output.extend(f.post_create_sql(style, model._meta.db_table))
# Some backends can't execute more than one SQL statement at a time,
# so split into separate statements.
statements = re.compile(r";[ \t]*$", re.M)
# Find custom SQL, if it's available.
backend_name = connection.settings_dict['ENGINE'].split('.')[-1]
sql_files = [os.path.join(app_dir, "%s.%s.sql" % (opts.object_name.lower(), backend_name)),
os.path.join(app_dir, "%s.sql" % opts.object_name.lower())]
for sql_file in sql_files:
if os.path.exists(sql_file):
fp = open(sql_file, 'U')
for statement in statements.split(fp.read().decode(settings.FILE_CHARSET)):
# Remove any comments from the file
statement = re.sub(ur"--.*([\n\Z]|$)", "", statement)
if statement.strip():
output.append(statement + u";")
fp.close()
return output
def emit_post_sync_signal(created_models, verbosity, interactive, db):
# Emit the post_sync signal for every application.
for app in models.get_apps():
app_name = app.__name__.split('.')[-2]
if verbosity >= 2:
print "Running post-sync handlers for application", app_name
models.signals.post_syncdb.send(sender=app, app=app,
created_models=created_models, verbosity=verbosity,
interactive=interactive, db=db)
| gpl-3.0 |
pgmillon/ansible | lib/ansible/plugins/terminal/enos.py | 101 | 2824 | # (C) 2017 Red Hat Inc.
# Copyright (C) 2017 Lenovo.
#
# GNU General Public License v3.0+
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
#
# Contains terminal Plugin methods for ENOS Config Module
# Lenovo Networking
#
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
import re
from ansible.errors import AnsibleConnectionFailure
from ansible.module_utils._text import to_text, to_bytes
from ansible.plugins.terminal import TerminalBase
class TerminalModule(TerminalBase):
terminal_stdout_re = [
re.compile(br"[\r\n]?[\w+\-\.:\/\[\]]+(?:\([^\)]+\)){,3}(?:>|#) ?$"),
re.compile(br"\[\w+\@[\w\-\.]+(?: [^\]])\] ?[>#\$] ?$"),
re.compile(br">[\r\n]?")
]
terminal_stderr_re = [
re.compile(br"% ?Error"),
re.compile(br"% ?Bad secret"),
re.compile(br"invalid input", re.I),
re.compile(br"(?:incomplete|ambiguous) command", re.I),
re.compile(br"connection timed out", re.I),
re.compile(br"[^\r\n]+ not found"),
re.compile(br"'[^']' +returned error code: ?\d+"),
]
def on_open_shell(self):
try:
for cmd in (b'\n', b'terminal-length 0\n'):
self._exec_cli_command(cmd)
except AnsibleConnectionFailure:
raise AnsibleConnectionFailure('unable to set terminal parameters')
def on_become(self, passwd=None):
if self._get_prompt().endswith(b'#'):
return
cmd = {u'command': u'enable'}
if passwd:
# Note: python-3.5 cannot combine u"" and r"" together. Thus make
# an r string and use to_text to ensure it's text
# on both py2 and py3.
cmd[u'prompt'] = to_text(r"[\r\n]?password: $",
errors='surrogate_or_strict')
cmd[u'answer'] = passwd
try:
self._exec_cli_command(to_bytes(json.dumps(cmd),
errors='surrogate_or_strict'))
except AnsibleConnectionFailure:
msg = 'unable to elevate privilege to enable mode'
raise AnsibleConnectionFailure(msg)
def on_unbecome(self):
prompt = self._get_prompt()
if prompt is None:
# if prompt is None most likely the terminal is hung up at a prompt
return
if b'(config' in prompt:
self._exec_cli_command(b'end')
self._exec_cli_command(b'disable')
elif prompt.endswith(b'#'):
self._exec_cli_command(b'disable')
| gpl-3.0 |
lyapun/django-lean-42cc | django_lean/lean_analytics/tests.py | 4 | 15078 | from __future__ import with_statement
from contextlib import contextmanager
from django.conf import settings
from django.contrib.auth.models import AnonymousUser, User
from django.http import HttpRequest
from django_lean.experiments.models import (AnonymousVisitor, Experiment,
GoalRecord, GoalType, Participant)
from django_lean.experiments.tests.utils import get_session, patch, TestCase
from django_lean.experiments.utils import StaticUser, WebUser
from django_lean.lean_analytics import (get_all_analytics,
get_all_analytics_names,
reset_caches,
IdentificationError)
from django_lean.lean_analytics.base import BaseAnalytics
import mox
class TestAnalytics(TestCase):
def test_get_all_analytics_names(self):
with patch(settings, 'LEAN_ANALYTICS', NotImplemented):
reset_caches()
self.assertEqual(get_all_analytics_names(), ())
with patch(settings, 'LEAN_ANALYTICS', []):
reset_caches()
self.assertEqual(get_all_analytics_names(), [])
base_name = '%s.%s' % (BaseAnalytics.__module__, BaseAnalytics.__name__)
with patch(settings, 'LEAN_ANALYTICS', [base_name]):
reset_caches()
self.assertEqual(get_all_analytics_names(), [base_name])
def test_get_all_analytics(self):
with patch(settings, 'LEAN_ANALYTICS', NotImplemented):
reset_caches()
self.assertEqual(get_all_analytics(), [])
with patch(settings, 'LEAN_ANALYTICS', []):
reset_caches()
self.assertEqual(get_all_analytics(), [])
base_name = '%s.%s' % (BaseAnalytics.__module__, BaseAnalytics.__name__)
with patch(settings, 'LEAN_ANALYTICS', [base_name]):
reset_caches()
self.assertEqual([a.__class__.__name__ for a in get_all_analytics()],
[BaseAnalytics.__name__])
#############
# KISSMETRICS
#############
try:
import django_kissmetrics
except ImportError:
if 'django_lean.lean_analytics.kissmetrics.KissMetrics' in \
get_all_analytics_names():
traceback.print_exc()
else:
from django_lean.lean_analytics.kissmetrics import KissMetrics
class TestKissMetrics(TestCase):
def setUp(self):
self.mox = mox.Mox()
self.analytics = KissMetrics()
def test_id_from_user(self):
user = User.objects.create_user('user', '[email protected]', 'user')
self.assertEqual(self.analytics._id_from_user(user),
'User %d' % user.pk)
self.assertRaises(IdentificationError,
self.analytics._id_from_user, None)
def test_id_from_session(self):
# With real session
with self.web_user(AnonymousUser()) as experiment_user:
self.mox.ReplayAll()
session = experiment_user.session
self.assertEqual(
self.analytics._id_from_session(experiment_user.session),
'Session %s' % session.session_key
)
self.mox.VerifyAll()
# With dict as session
experiment_user = StaticUser()
self.assertRaises(IdentificationError,
self.analytics._id_from_session,
experiment_user.session)
def test_compute_id(self):
# With anonymous WebUser
with self.web_user(AnonymousUser()) as experiment_user:
session = experiment_user.session
self.mox.ReplayAll()
self.assertEqual(self.analytics._compute_id(experiment_user),
'Session %s' % session.session_key)
self.mox.VerifyAll()
# With authenticated WebUser
user = User.objects.create_user('user', '[email protected]', 'user')
with self.web_user(user) as experiment_user:
self.mox.ReplayAll()
self.assertEqual(self.analytics._compute_id(experiment_user),
'User %d' % user.id)
self.mox.VerifyAll()
# With StaticUser
experiment_user = StaticUser()
self.assertRaises(IdentificationError,
self.analytics._compute_id, experiment_user)
def test_identify(self):
# With anonymous WebUser
with self.web_user(AnonymousUser()) as experiment_user:
self.mox.ReplayAll()
self.assertTrue(self.analytics._identify(experiment_user))
self.mox.VerifyAll()
# With authenticated WebUser
user = User.objects.create_user('user', '[email protected]', 'user')
with self.web_user(user) as experiment_user:
self.mox.ReplayAll()
self.assertTrue(self.analytics._identify(experiment_user))
self.mox.VerifyAll()
# With StaticUser
experiment_user = StaticUser()
self.assertFalse(self.analytics._identify(experiment_user))
def test_enroll(self):
experiment = Experiment.objects.create(name='Experiment')
user = User.objects.create_user('user', '[email protected]', 'user')
KM = self.mox.CreateMockAnything()
analytics = KissMetrics(KM=KM)
with self.web_user(user) as experiment_user:
KM.identify(analytics._compute_id(experiment_user))
KM.record(action='Enrolled In Experiment',
props={'Experiment': experiment.name,
'Group': 'Test'})
self.mox.ReplayAll()
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=Participant.TEST_GROUP)
self.mox.VerifyAll()
def test_record(self):
KM = self.mox.CreateMockAnything()
analytics = KissMetrics(KM=KM)
with self.web_user(AnonymousUser()) as experiment_user:
KM.identify(analytics._id_from_session(experiment_user.session))
KM.record(action='Goal Recorded',
props={'Goal Type': 'Goal Type'})
self.mox.ReplayAll()
goal_type = GoalType.objects.create(name='Goal Type')
goal_record = GoalRecord.record(goal_name=goal_type.name,
experiment_user=experiment_user)
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
self.mox.VerifyAll()
def test_event(self):
KM = self.mox.CreateMockAnything()
analytics = KissMetrics(KM=KM)
with self.web_user(AnonymousUser()) as experiment_user:
KM.identify(analytics._id_from_session(experiment_user.session))
KM.record(action='Event', props={'Foo': 'Bar'})
self.mox.ReplayAll()
analytics.event(name='Event',
properties={'Foo': 'Bar'},
request=experiment_user.request)
self.mox.VerifyAll()
@contextmanager
def web_user(self, user):
session = get_session(None)
request = self.mox.CreateMock(HttpRequest)
request.user = user
request.session = session
experiment_user = WebUser(request)
experiment_user.get_or_create_anonymous_visitor()
yield experiment_user
##########
# MIXPANEL
##########
try:
import mixpanel
except ImportError:
if 'django_lean.lean_analytics.mixpanel.Mixpanel' in \
get_all_analytics_names():
traceback.print_exc()
else:
from django_lean.lean_analytics.mixpanel import Mixpanel
class TestMixpanel(TestCase):
def setUp(self):
self.mox = mox.Mox()
self.analytics = Mixpanel()
def tearDown(self):
self.mox.UnsetStubs()
def test_id_from_user(self):
user = User.objects.create_user('user', '[email protected]', 'user')
self.assertEqual(self.analytics._id_from_user(user),
'User %d' % user.pk)
self.assertRaises(IdentificationError,
self.analytics._id_from_user, None)
def test_id_from_session(self):
# With real session
with self.web_user(AnonymousUser()) as experiment_user:
self.mox.ReplayAll()
session = experiment_user.session
self.assertEqual(
self.analytics._id_from_session(experiment_user.session),
'Session %s' % session.session_key
)
self.mox.VerifyAll()
# With dict as session
experiment_user = StaticUser()
self.assertRaises(IdentificationError,
self.analytics._id_from_session,
experiment_user.session)
def test_compute_id(self):
# With anonymous WebUser
with self.web_user(AnonymousUser()) as experiment_user:
session = experiment_user.session
self.mox.ReplayAll()
self.assertEqual(self.analytics._compute_id(experiment_user),
'Session %s' % session.session_key)
self.mox.VerifyAll()
# With authenticated WebUser
user = User.objects.create_user('user', '[email protected]', 'user')
with self.web_user(user) as experiment_user:
self.mox.ReplayAll()
self.assertEqual(self.analytics._compute_id(experiment_user),
'User %d' % user.id)
self.mox.VerifyAll()
# With StaticUser
experiment_user = StaticUser()
self.assertRaises(IdentificationError,
self.analytics._compute_id, experiment_user)
def test_identify(self):
# With anonymous WebUser
with self.web_user(AnonymousUser()) as experiment_user:
self.mox.ReplayAll()
self.assertTrue(self.analytics._identify(experiment_user))
self.assertEqual(
self.analytics.identity,
'Session %s' % experiment_user.session.session_key
)
self.mox.VerifyAll()
# With authenticated WebUser
user = User.objects.create_user('user', '[email protected]', 'user')
with self.web_user(user) as experiment_user:
self.mox.ReplayAll()
self.assertTrue(self.analytics._identify(experiment_user))
self.assertEqual(self.analytics.identity,
'User %s' % experiment_user.user.pk)
self.mox.VerifyAll()
# With StaticUser
experiment_user = StaticUser()
self.assertFalse(self.analytics._identify(experiment_user))
self.assertEqual(self.analytics.identity, None)
def test_enroll(self):
import time
experiment = Experiment.objects.create(name='Experiment')
user = User.objects.create_user('user', '[email protected]', 'user')
tracker = self.mox.CreateMockAnything()
analytics = Mixpanel(tracker=tracker)
now = time.gmtime()
self.mox.StubOutWithMock(time, 'gmtime')
time.gmtime().AndReturn(now)
with self.web_user(user) as experiment_user:
properties = {'time': '%d' % time.mktime(now),
'distinct_id': 'User %d' % user.pk,
'Experiment': experiment.name,
'Group': 'Test'}
tracker.run(event_name='Enrolled In Experiment',
properties=properties)
self.mox.ReplayAll()
analytics.enroll(experiment=experiment,
experiment_user=experiment_user,
group_id=Participant.TEST_GROUP)
self.mox.VerifyAll()
def test_record(self):
import time
tracker = self.mox.CreateMockAnything()
analytics = Mixpanel(tracker=tracker)
now = time.gmtime()
self.mox.StubOutWithMock(time, 'gmtime')
time.gmtime().AndReturn(now)
with self.web_user(AnonymousUser()) as experiment_user:
properties = {
'time': '%d' % time.mktime(now),
'distinct_id': ('Session %s' %
experiment_user.session.session_key),
'Goal Type': 'Goal Type'
}
tracker.run(event_name='Goal Recorded',
properties=properties)
self.mox.ReplayAll()
goal_type = GoalType.objects.create(name='Goal Type')
goal_record = GoalRecord.record(goal_name=goal_type.name,
experiment_user=experiment_user)
analytics.record(goal_record=goal_record,
experiment_user=experiment_user)
self.mox.VerifyAll()
def test_event(self):
import time
tracker = self.mox.CreateMockAnything()
analytics = Mixpanel(tracker=tracker)
now = time.gmtime()
self.mox.StubOutWithMock(time, 'gmtime')
time.gmtime().AndReturn(now)
with self.web_user(AnonymousUser()) as experiment_user:
properties = {
'time': '%d' % time.mktime(now),
'distinct_id': ('Session %s' %
experiment_user.session.session_key),
'Foo': 'Bar'
}
tracker.run(event_name='Event',
properties=properties)
self.mox.ReplayAll()
analytics.event(name='Event',
properties={'Foo': 'Bar'},
request=experiment_user.request)
self.mox.VerifyAll()
@contextmanager
def web_user(self, user):
session = get_session(None)
request = self.mox.CreateMock(HttpRequest)
request.user = user
request.session = session
experiment_user = WebUser(request)
experiment_user.get_or_create_anonymous_visitor()
yield experiment_user
| bsd-3-clause |
frozstone/concept | utilities/HeurDep.py | 1 | 5042 | from os import listdir, path
from lxml import etree, objectify
from pickle import load
from sys import argv
from StringIO import StringIO
from collections import OrderedDict
import time
from utilities.norm_arxiv import norm_arxiv
from utilities.norm_attribute import norm_attribute
from utilities.norm_mrow import norm_mrow
from utilities.norm_outer_fence import norm_outer_fence
from utilities.norm_splitter import norm_splitter
from utilities.norm_tag import norm_tag
from utilities.utils import Link_Types, Matching_Methods, utils
from utilities.depgraph_heur import depgraph_heur
__dtd = '<!DOCTYPE math SYSTEM "resources/xhtml-math11-f.dtd">'
__xmlns = ' xmlns="http://www.w3.org/1998/Math/MathML"'
__relation_fl = 'resources/math_symbols_unicode.dump'
__xml_parser = etree.XMLParser(remove_blank_text = True, load_dtd = True, resolve_entities = True)
def __get_clean_mathml(mt_string):
mt_tree = etree.parse(StringIO(__dtd + mt_string), __xml_parser).getroot()
objectify.deannotate(mt_tree, cleanup_namespaces=True)
return mt_tree
def __extract_math_line_arxiv(line):
cells = line.strip().split('\t')
latexml_id = cells[0]
para_id = cells[1]
kmcs_id = cells[2]
gmid = '#'.join([para_id, kmcs_id, latexml_id])
mt_string = '\t'.join(cells[3:]).replace(__xmlns, "")
mt = __get_clean_mathml(mt_string)
return gmid, mt
def __extract_math_line_acl(line):
cells = line.strip().split('\t')
gmid = cells[0]
mt_string = '\t'.join(cells[1:]).replace(__xmlns, "")
mt = __get_clean_mathml(mt_string)
return gmid, mt
def __write_edges(edges, toflname):
lns = []
for gmid, nodes in edges.iteritems():
lns.append( '\t'.join([gmid, ' '.join([node[0] for node in nodes])]) + '\n')
f = open(toflname, 'w')
f.writelines(lns)
f.close()
def __get_dep_graph(math_dir, dep_dir, fl, matching_method):
'''
input: file from math_new
output:
1. edges: {gumid1:[(gumid2, linktype)]} --> component list
2. gumidmappings: {gmid:gumid}
'''
#useful utilities classes
n_arxiv = norm_arxiv()
n_attribute = norm_attribute()
n_mrow = norm_mrow(__dtd)
n_outer_fence = norm_outer_fence()
n_tag = norm_tag(__dtd)
n_splitter = norm_splitter(__dtd, __relation_fl)
u = utils()
depgraph = depgraph_heur(matching_method)
lns = open(path.join(math_dir, fl)).readlines()
#enumerate if there is no id in the <math> tag
mts = OrderedDict()
#for xhtml, enumerate mathtag; for xml, enumerate expressiontag; for math_new, enumerate the lines
for ln in lns:
if ln.strip() == '': continue
gmid, mt = __extract_math_line_arxiv(ln)
#replace <m:math> with <math>
mt_string_initial = n_arxiv.remove_math_prefix(etree.tostring(mt))
#remove annotation, attributes, and finally get rid the <math> tag
mt_string_formatted = n_arxiv.remove_annotation(etree.parse(StringIO(__dtd + mt_string_initial)).getroot())
mt_string_formatted = n_attribute.normalize(mt_string_formatted)
#normalize mrow
mt_string_formatted = n_mrow.normalize(mt_string_formatted)
#remove fences
mt_string_formatted = etree.tostring(n_outer_fence.remove_outer_fence(etree.parse(StringIO(__dtd + mt_string_formatted)).getroot()))[6:-7]
#expand maths (normalize tags and/or case)
expanded = n_tag.normalize_tags('<math>%s</math>' % mt_string_formatted)
if len(expanded) > 0:
expanded[-1] = n_mrow.normalize('<math>%s</math>' % expanded[-1])[6:-7]
expanded.extend([etree.tostring(n_outer_fence.remove_outer_fence(etree.parse(StringIO(__dtd + '<math>%s</math>' % exp)).getroot()))[6:-7] for exp in expanded])
else:
expanded = [mt_string_formatted]
mts[gmid] = expanded
#split around the equality and get the left side subexpressions
left_subexp = n_splitter.split('<math>%s</math>' % expanded[-1])
if left_subexp is None: continue
left_subexp = n_mrow.normalize(left_subexp)[6:-7]
if not u.is_empty_tag(left_subexp):
expanded_left = n_tag.normalize_tags(left_subexp)
expanded_left = [n_mrow.normalize('<math>%s</math>' % exp)[6:-7] for exp in expanded_left]
mts[gmid].append(left_subexp)
mts[gmid].extend(expanded_left)
mts[gmid] = list(set(mts[gmid]))
edges = depgraph.create_edges(mts)
__write_edges(edges, path.join(dep_dir, fl))
if __name__ == '__main__':
#Preparation
math_path = "../mathmlandextra/math_new/5/0704.0005.txt"#argv[1]
dep_dir = "./"#argv[2]
math_dir = path.dirname(math_path) #path to math_new directory
math_fl = path.basename(math_path) #./1/0704.0097.txt
# try:
__get_dep_graph(math_dir, dep_dir, math_fl, Matching_Methods.heur)
# except:
# print math_path
| mit |
vadimtk/chrome4sdp | tools/telemetry/third_party/gsutilz/third_party/boto/boto/support/exceptions.py | 151 | 1715 | # Copyright (c) 2013 Amazon.com, Inc. or its affiliates. All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.exception import JSONResponseError
class CaseIdNotFound(JSONResponseError):
pass
class CaseCreationLimitExceeded(JSONResponseError):
pass
class InternalServerError(JSONResponseError):
pass
class AttachmentLimitExceeded(JSONResponseError):
pass
class DescribeAttachmentLimitExceeded(JSONResponseError):
pass
class AttachmentSetIdNotFound(JSONResponseError):
pass
class AttachmentSetExpired(JSONResponseError):
pass
class AttachmentIdNotFound(JSONResponseError):
pass
class AttachmentSetSizeLimitExceeded(JSONResponseError):
pass
| bsd-3-clause |
4022321818/2015cd_midterm2 | static/Brython3.1.1-20150328-091302/Lib/xml/sax/saxutils.py | 730 | 11688 | """\
A library of useful helper classes to the SAX classes, for the
convenience of application and driver writers.
"""
import os, urllib.parse, urllib.request
import io
from . import handler
from . import xmlreader
def __dict_replace(s, d):
"""Replace substrings of a string using a dictionary."""
for key, value in d.items():
s = s.replace(key, value)
return s
def escape(data, entities={}):
"""Escape &, <, and > in a string of data.
You can escape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
# must do ampersand first
data = data.replace("&", "&")
data = data.replace(">", ">")
data = data.replace("<", "<")
if entities:
data = __dict_replace(data, entities)
return data
def unescape(data, entities={}):
"""Unescape &, <, and > in a string of data.
You can unescape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
data = data.replace("<", "<")
data = data.replace(">", ">")
if entities:
data = __dict_replace(data, entities)
# must do ampersand last
return data.replace("&", "&")
def quoteattr(data, entities={}):
"""Escape and quote an attribute value.
Escape &, <, and > in a string of data, then quote it for use as
an attribute value. The \" character will be escaped as well, if
necessary.
You can escape other strings of data by passing a dictionary as
the optional entities parameter. The keys and values must all be
strings; each key will be replaced with its corresponding value.
"""
entities = entities.copy()
entities.update({'\n': ' ', '\r': ' ', '\t':'	'})
data = escape(data, entities)
if '"' in data:
if "'" in data:
data = '"%s"' % data.replace('"', """)
else:
data = "'%s'" % data
else:
data = '"%s"' % data
return data
def _gettextwriter(out, encoding):
if out is None:
import sys
return sys.stdout
if isinstance(out, io.TextIOBase):
# use a text writer as is
return out
# wrap a binary writer with TextIOWrapper
if isinstance(out, io.RawIOBase):
# Keep the original file open when the TextIOWrapper is
# destroyed
class _wrapper:
__class__ = out.__class__
def __getattr__(self, name):
return getattr(out, name)
buffer = _wrapper()
buffer.close = lambda: None
else:
# This is to handle passed objects that aren't in the
# IOBase hierarchy, but just have a write method
buffer = io.BufferedIOBase()
buffer.writable = lambda: True
buffer.write = out.write
try:
# TextIOWrapper uses this methods to determine
# if BOM (for UTF-16, etc) should be added
buffer.seekable = out.seekable
buffer.tell = out.tell
except AttributeError:
pass
return io.TextIOWrapper(buffer, encoding=encoding,
errors='xmlcharrefreplace',
newline='\n',
write_through=True)
class XMLGenerator(handler.ContentHandler):
def __init__(self, out=None, encoding="iso-8859-1", short_empty_elements=False):
handler.ContentHandler.__init__(self)
out = _gettextwriter(out, encoding)
self._write = out.write
self._flush = out.flush
self._ns_contexts = [{}] # contains uri -> prefix dicts
self._current_context = self._ns_contexts[-1]
self._undeclared_ns_maps = []
self._encoding = encoding
self._short_empty_elements = short_empty_elements
self._pending_start_element = False
def _qname(self, name):
"""Builds a qualified name from a (ns_url, localname) pair"""
if name[0]:
# Per http://www.w3.org/XML/1998/namespace, The 'xml' prefix is
# bound by definition to http://www.w3.org/XML/1998/namespace. It
# does not need to be declared and will not usually be found in
# self._current_context.
if 'http://www.w3.org/XML/1998/namespace' == name[0]:
return 'xml:' + name[1]
# The name is in a non-empty namespace
prefix = self._current_context[name[0]]
if prefix:
# If it is not the default namespace, prepend the prefix
return prefix + ":" + name[1]
# Return the unqualified name
return name[1]
def _finish_pending_start_element(self,endElement=False):
if self._pending_start_element:
self._write('>')
self._pending_start_element = False
# ContentHandler methods
def startDocument(self):
self._write('<?xml version="1.0" encoding="%s"?>\n' %
self._encoding)
def endDocument(self):
self._flush()
def startPrefixMapping(self, prefix, uri):
self._ns_contexts.append(self._current_context.copy())
self._current_context[uri] = prefix
self._undeclared_ns_maps.append((prefix, uri))
def endPrefixMapping(self, prefix):
self._current_context = self._ns_contexts[-1]
del self._ns_contexts[-1]
def startElement(self, name, attrs):
self._finish_pending_start_element()
self._write('<' + name)
for (name, value) in attrs.items():
self._write(' %s=%s' % (name, quoteattr(value)))
if self._short_empty_elements:
self._pending_start_element = True
else:
self._write(">")
def endElement(self, name):
if self._pending_start_element:
self._write('/>')
self._pending_start_element = False
else:
self._write('</%s>' % name)
def startElementNS(self, name, qname, attrs):
self._finish_pending_start_element()
self._write('<' + self._qname(name))
for prefix, uri in self._undeclared_ns_maps:
if prefix:
self._write(' xmlns:%s="%s"' % (prefix, uri))
else:
self._write(' xmlns="%s"' % uri)
self._undeclared_ns_maps = []
for (name, value) in attrs.items():
self._write(' %s=%s' % (self._qname(name), quoteattr(value)))
if self._short_empty_elements:
self._pending_start_element = True
else:
self._write(">")
def endElementNS(self, name, qname):
if self._pending_start_element:
self._write('/>')
self._pending_start_element = False
else:
self._write('</%s>' % self._qname(name))
def characters(self, content):
if content:
self._finish_pending_start_element()
self._write(escape(content))
def ignorableWhitespace(self, content):
if content:
self._finish_pending_start_element()
self._write(content)
def processingInstruction(self, target, data):
self._finish_pending_start_element()
self._write('<?%s %s?>' % (target, data))
class XMLFilterBase(xmlreader.XMLReader):
"""This class is designed to sit between an XMLReader and the
client application's event handlers. By default, it does nothing
but pass requests up to the reader and events on to the handlers
unmodified, but subclasses can override specific methods to modify
the event stream or the configuration requests as they pass
through."""
def __init__(self, parent = None):
xmlreader.XMLReader.__init__(self)
self._parent = parent
# ErrorHandler methods
def error(self, exception):
self._err_handler.error(exception)
def fatalError(self, exception):
self._err_handler.fatalError(exception)
def warning(self, exception):
self._err_handler.warning(exception)
# ContentHandler methods
def setDocumentLocator(self, locator):
self._cont_handler.setDocumentLocator(locator)
def startDocument(self):
self._cont_handler.startDocument()
def endDocument(self):
self._cont_handler.endDocument()
def startPrefixMapping(self, prefix, uri):
self._cont_handler.startPrefixMapping(prefix, uri)
def endPrefixMapping(self, prefix):
self._cont_handler.endPrefixMapping(prefix)
def startElement(self, name, attrs):
self._cont_handler.startElement(name, attrs)
def endElement(self, name):
self._cont_handler.endElement(name)
def startElementNS(self, name, qname, attrs):
self._cont_handler.startElementNS(name, qname, attrs)
def endElementNS(self, name, qname):
self._cont_handler.endElementNS(name, qname)
def characters(self, content):
self._cont_handler.characters(content)
def ignorableWhitespace(self, chars):
self._cont_handler.ignorableWhitespace(chars)
def processingInstruction(self, target, data):
self._cont_handler.processingInstruction(target, data)
def skippedEntity(self, name):
self._cont_handler.skippedEntity(name)
# DTDHandler methods
def notationDecl(self, name, publicId, systemId):
self._dtd_handler.notationDecl(name, publicId, systemId)
def unparsedEntityDecl(self, name, publicId, systemId, ndata):
self._dtd_handler.unparsedEntityDecl(name, publicId, systemId, ndata)
# EntityResolver methods
def resolveEntity(self, publicId, systemId):
return self._ent_handler.resolveEntity(publicId, systemId)
# XMLReader methods
def parse(self, source):
self._parent.setContentHandler(self)
self._parent.setErrorHandler(self)
self._parent.setEntityResolver(self)
self._parent.setDTDHandler(self)
self._parent.parse(source)
def setLocale(self, locale):
self._parent.setLocale(locale)
def getFeature(self, name):
return self._parent.getFeature(name)
def setFeature(self, name, state):
self._parent.setFeature(name, state)
def getProperty(self, name):
return self._parent.getProperty(name)
def setProperty(self, name, value):
self._parent.setProperty(name, value)
# XMLFilter methods
def getParent(self):
return self._parent
def setParent(self, parent):
self._parent = parent
# --- Utility functions
def prepare_input_source(source, base=""):
"""This function takes an InputSource and an optional base URL and
returns a fully resolved InputSource object ready for reading."""
if isinstance(source, str):
source = xmlreader.InputSource(source)
elif hasattr(source, "read"):
f = source
source = xmlreader.InputSource()
source.setByteStream(f)
if hasattr(f, "name"):
source.setSystemId(f.name)
if source.getByteStream() is None:
sysid = source.getSystemId()
basehead = os.path.dirname(os.path.normpath(base))
sysidfilename = os.path.join(basehead, sysid)
if os.path.isfile(sysidfilename):
source.setSystemId(sysidfilename)
f = open(sysidfilename, "rb")
else:
source.setSystemId(urllib.parse.urljoin(base, sysid))
f = urllib.request.urlopen(source.getSystemId())
source.setByteStream(f)
return source
| agpl-3.0 |
vaquer/ckanext-dkan | ckanext/dkan/harvesters/dkanharvester.py | 1 | 31034 | import json
import urllib
import urllib2
import httplib
import datetime
import socket
import datetime
# from ckanext.harvest.harvesters.ckanharvester import CKANHarvester
from ckanext.harvest.harvesters.base import HarvesterBase
from ckanext.harvest.model import HarvestObject
from ckan.logic import ValidationError, NotFound, get_action
from ckan import model
import ckan.lib.munge as munge
from ckan.plugins import toolkit
log = __import__('logging').getLogger(__name__)
MIMETYPE_FORMATS = {
'text/html': 'HTML',
'text/csv': 'CSV',
'text/xml': 'XML',
'application/pdf': 'PDF',
'application/zip': 'ZIP',
'application/rdf+xml': 'RDF',
'application/json': 'JSON',
'application/vnd.ms-excel': 'XLS',
'application/vnd.google-earth.kml+xml': 'KML',
'application/msword': 'DOC',
}
class DKANHarvester(HarvesterBase):
ckan_revision_api_works = False
config = None
api_version = 2
action_api_version = 3
def info(self):
return {
'name': 'dkan',
'title': 'DKAN HARVESTER',
'description': 'Harvests remote DKAN instances',
'form_config_interface': 'Text'
}
def _get_action_api_offset(self):
return '/api/%d/action' % self.action_api_version
def _get_search_api_offset(self):
return '%s/current_package_list_with_resources' % self._get_action_api_offset()
def _get_content(self, url):
http_request = urllib2.Request(url=url)
api_key = self.config.get('api_key')
if api_key:
http_request.add_header('Authorization', api_key)
try:
http_response = urllib2.urlopen(http_request, timeout=90)
except urllib2.HTTPError, e:
if e.getcode() == 404:
raise ContentNotFoundError('HTTP error: %s' % e.code)
else:
raise ContentFetchError('HTTP error: %s' % e.code)
except urllib2.URLError, e:
raise ContentFetchError('URL error: %s' % e.reason)
except httplib.HTTPException, e:
raise ContentFetchError('HTTP Exception: %s' % e)
except socket.error, e:
raise ContentFetchError('HTTP socket error: %s' % e)
except Exception, e:
raise ContentFetchError('HTTP general exception: %s' % e)
return http_response.read()
def _get_group(self, base_url, group):
url = base_url + self._get_action_api_offset() + '/group_show?id=' + \
group['id']
try:
content = self._get_content(url)
data = json.loads(content)
if self.action_api_version == 3:
return data.pop('result')
return data
except (ContentFetchError, ValueError):
log.debug('Could not fetch/decode remote group')
raise RemoteResourceError('Could not fetch/decode remote group')
def _get_organization(self, base_url, org_name):
url = base_url + self._get_action_api_offset() + \
'/organization_show?id=' + org_name
try:
content = self._get_content(url)
content_dict = json.loads(content)
return content_dict['result']
except (ContentFetchError, ValueError, KeyError):
log.debug('Could not fetch/decode remote group')
raise RemoteResourceError(
'Could not fetch/decode remote organization')
def _set_config(self, config_str):
if config_str:
self.config = json.loads(config_str)
if 'api_version' in self.config:
self.api_version = int(self.config['api_version'])
log.debug('Using config: %r', self.config)
else:
self.config = {}
def validate_config(self, config):
if not config:
return config
try:
config_obj = json.loads(config)
if 'api_version' in config_obj:
try:
int(config_obj['api_version'])
except ValueError:
raise ValueError('api_version must be an integer')
if 'default_tags' in config_obj:
if not isinstance(config_obj['default_tags'], list):
raise ValueError('default_tags must be a list')
if config_obj['default_tags'] and \
not isinstance(config_obj['default_tags'][0], dict):
raise ValueError('default_tags must be a list of '
'dictionaries')
if 'default_groups' in config_obj:
if not isinstance(config_obj['default_groups'], list):
raise ValueError('default_groups must be a *list* of group'
' names/ids')
if config_obj['default_groups'] and \
not isinstance(config_obj['default_groups'][0],
basestring):
raise ValueError('default_groups must be a list of group '
'names/ids (i.e. strings)')
# Check if default groups exist
context = {'model': model, 'user': toolkit.c.user}
config_obj['default_group_dicts'] = []
for group_name_or_id in config_obj['default_groups']:
try:
group = get_action('group_show')(
context, {'id': group_name_or_id})
# save the dict to the config object, as we'll need it
# in the import_stage of every dataset
config_obj['default_group_dicts'].append(group)
except NotFound, e:
raise ValueError('Default group not found')
config = json.dumps(config_obj)
if 'default_extras' in config_obj:
if not isinstance(config_obj['default_extras'], dict):
raise ValueError('default_extras must be a dictionary')
if 'organizations_filter_include' in config_obj \
and 'organizations_filter_exclude' in config_obj:
raise ValueError('Harvest configuration cannot contain both '
'organizations_filter_include and organizations_filter_exclude')
if 'user' in config_obj:
# Check if user exists
context = {'model': model, 'user': toolkit.c.user}
try:
user = get_action('user_show')(
context, {'id': config_obj.get('user')})
except NotFound:
raise ValueError('User not found')
for key in ('read_only', 'force_all'):
if key in config_obj:
if not isinstance(config_obj[key], bool):
raise ValueError('%s must be boolean' % key)
except ValueError, e:
raise e
return config
def _get_all_packages(self, base_url, harvest_job):
# Request all remote packages
url = base_url + '/api/3/action/package_list'
log.debug('Getting all DKAN packages: %s', url)
try:
content = self._get_content(url)
except Exception, e:
self._save_gather_error('Unable to get content for URL: %s - %s'
% (url, e), harvest_job)
return None
packages = json.loads(content)['result']
return packages
def _get_package(self, base_url, harvest_object):
url = base_url + '/api/3/action/package_show/' + harvest_object.guid
log.debug('Getting DKAN package: %s', url)
# Get contents
try:
content = self._get_content(url)
except Exception, e:
self._save_object_error(
'Unable to get content for package: %s - %r' % (url, e),
harvest_object)
return None, None
package = json.loads(content)['result'][0]
return url, json.dumps(package)
def fetch_stage(self, harvest_object):
# Nothing to do here - we got the package dict in the search in the
# gather stage
return True
def gather_stage(self, harvest_job):
log.debug('In DKANHarvester gather_stage (%s)',
harvest_job.source.url)
toolkit.requires_ckan_version(min_version='2.0')
get_all_packages = True
self._set_config(harvest_job.source.config)
# Get source URL
remote_ckan_base_url = harvest_job.source.url.rstrip('/')
# Filter in/out datasets from particular organizations
fq_terms = []
org_filter_include = self.config.get('organizations_filter_include', [])
org_filter_exclude = self.config.get('organizations_filter_exclude', [])
if org_filter_include:
fq_terms.append(' OR '.join(
'organization:%s' % org_name for org_name in org_filter_include))
elif org_filter_exclude:
fq_terms.extend(
'-organization:%s' % org_name for org_name in org_filter_exclude)
# Ideally we can request from the remote CKAN only those datasets
# modified since the last completely successful harvest.
last_error_free_job = self.last_error_free_job(harvest_job)
log.debug('Last error-free job: %r', last_error_free_job)
if (last_error_free_job and
not self.config.get('force_all', False)):
get_all_packages = False
# Request only the datasets modified since
last_time = last_error_free_job.gather_started
# Note: SOLR works in UTC, and gather_started is also UTC, so
# this should work as long as local and remote clocks are
# relatively accurate. Going back a little earlier, just in case.
get_changes_since = \
(last_time - datetime.timedelta(hours=1)).isoformat()
log.info('Searching for datasets modified since: %s UTC',
get_changes_since)
fq_since_last_time = 'metadata_modified:[{since}Z TO *]' \
.format(since=get_changes_since)
try:
pkg_dicts = self._search_for_datasets(
remote_ckan_base_url,
fq_terms + [fq_since_last_time])
except SearchError, e:
log.info('Searching for datasets changed since last time '
'gave an error: %s', e)
get_all_packages = True
if not get_all_packages and not pkg_dicts:
log.info('No datasets have been updated on the remote '
'DKAN instance since the last harvest job %s',
last_time)
return None
# Fall-back option - request all the datasets from the remote CKAN
if get_all_packages:
# Request all remote packages
try:
pkg_dicts = self._search_for_datasets(remote_ckan_base_url,
fq_terms)
except SearchError, e:
log.info('Searching for all datasets gave an error: %s', e)
self._save_gather_error(
'Unable to search remote DKAN for datasets:%s url:%s'
'terms:%s' % (e, remote_ckan_base_url, fq_terms),
harvest_job)
return None
if not pkg_dicts:
self._save_gather_error(
'No datasets found at DKAN: %s' % remote_ckan_base_url,
harvest_job)
return None
# Create harvest objects for each dataset
try:
package_ids = set()
object_ids = []
for pkg_dict in pkg_dicts:
if pkg_dict is None:
continue
if pkg_dict['id'] in package_ids:
log.info('Discarding duplicate dataset %s - probably due '
'to datasets being changed at the same time as '
'when the harvester was paging through',
pkg_dict['id'])
continue
package_ids.add(pkg_dict['id'])
log.debug('Package: %s', pkg_dict)
log.debug('Creating HarvestObject for %s %s',
pkg_dict['name'], pkg_dict['id'])
obj = HarvestObject(guid=pkg_dict['id'],
job=harvest_job,
content=json.dumps(pkg_dict))
obj.save()
object_ids.append(obj.id)
return object_ids
except Exception, e:
self._save_gather_error('%r' % e.message, harvest_job)
def _search_for_datasets(self, remote_ckan_base_url, fq_terms=None):
'''Does a dataset search on a remote CKAN and returns the results.
Deals with paging to return all the results, not just the first page.
'''
base_search_url = remote_ckan_base_url + self._get_search_api_offset()
params = {'limit': '100', 'offset': '0'}
# There is the worry that datasets will be changed whilst we are paging
# through them.
# * In SOLR 4.7 there is a cursor, but not using that yet
# because few CKANs are running that version yet.
# * However we sort, then new names added or removed before the current
# page would cause existing names on the next page to be missed or
# double counted.
# * Another approach might be to sort by metadata_modified and always
# ask for changes since (and including) the date of the last item of
# the day before. However if the entire page is of the exact same
# time, then you end up in an infinite loop asking for the same page.
# * We choose a balanced approach of sorting by ID, which means
# datasets are only missed if some are removed, which is far less
# likely than any being added. If some are missed then it is assumed
# they will harvested the next time anyway. When datasets are added,
# we are at risk of seeing datasets twice in the paging, so we detect
# and remove any duplicates.
pkg_dicts = []
pkg_ids = set()
previous_content = None
while True:
url = base_search_url + '?' + urllib.urlencode(params)
log.debug('Searching for DKAN datasets: %s', url)
try:
content = self._get_content(url)
except ContentFetchError, e:
raise SearchError(
'Error sending request to search remote '
'DKAN instance %s using URL %r. Error: %s' %
(remote_ckan_base_url, url, e))
if previous_content and content == previous_content:
raise SearchError('The paging doesn\'t seem to work. URL: %s' %
url)
try:
response_dict = json.loads(content)
except ValueError:
raise SearchError('Response from remote DKAN was not JSON: %r'
% content)
try:
pkg_dicts_page = response_dict.get('result', [])
except ValueError:
raise SearchError('Response JSON did not contain '
'result/results: %r' % response_dict)
if len(pkg_dicts_page) == 0:
break
# Weed out any datasets found on previous pages (should datasets be
# changing while we page)
if type(pkg_dicts_page[0]) == list:
pkg_dicts_page = pkg_dicts_page[0]
pkg_dicts_page = [self._convert_dkan_package_to_ckan(p) for p in pkg_dicts_page]
ids_in_page = set(p['id'] for p in pkg_dicts_page if p is not None)
duplicate_ids = ids_in_page & pkg_ids
if duplicate_ids:
pkg_dicts_page = [p for p in pkg_dicts_page if p['id'] not in duplicate_ids]
pkg_ids |= ids_in_page
pkg_dicts.extend(pkg_dicts_page)
params['offset'] = str(int(params['offset']) + int(params['limit']))
return pkg_dicts
def import_stage(self, harvest_object):
log.debug('In DKANHarvester import_stage')
base_context = {'model': model, 'session': model.Session,
'user': self._get_user_name()}
if not harvest_object:
log.error('No harvest object received')
return False
if harvest_object.content is None:
self._save_object_error('Empty content for object %s' %
harvest_object.id,
harvest_object, 'Import')
return False
self._set_config(harvest_object.job.source.config)
try:
package_dict = json.loads(harvest_object.content)
if package_dict.get('type') == 'harvest':
log.warn('Remote dataset is a harvest source, ignoring...')
return True
# Set default tags if needed
default_tags = self.config.get('default_tags', [])
if default_tags:
if not 'tags' in package_dict:
package_dict['tags'] = []
package_dict['tags'].extend(
[t for t in default_tags if t not in package_dict['tags']])
remote_groups = self.config.get('remote_groups', None)
if not remote_groups in ('only_local', 'create'):
# Ignore remote groups
package_dict.pop('groups', None)
else:
if not 'groups' in package_dict:
package_dict['groups'] = []
# check if remote groups exist locally, otherwise remove
validated_groups = []
for group_ in package_dict['groups']:
try:
data_dict = {'id': group_['id']}
group = get_action('group_show')(base_context.copy(), data_dict)
validated_groups.append({'id': group['id'], 'name': group['name']})
except NotFound, e:
log.info('Group %s is not available', group_)
if remote_groups == 'create':
try:
group = self._get_group(harvest_object.source.url, group_)
except RemoteResourceError:
log.error('Could not get remote group %s', group_)
continue
for key in ['packages', 'created', 'users', 'groups', 'tags', 'extras', 'display_name']:
group.pop(key, None)
get_action('group_create')(base_context.copy(), group)
log.info('Group %s has been newly created', group_)
validated_groups.append({'id': group['id'], 'name': group['name']})
package_dict['groups'] = validated_groups
# Local harvest source organization
source_dataset = get_action('package_show')(base_context.copy(), {'id': harvest_object.source.id})
local_org = source_dataset.get('owner_org')
remote_orgs = self.config.get('remote_orgs', None)
if not remote_orgs in ('only_local', 'create'):
# Assign dataset to the source organization
package_dict['owner_org'] = local_org
else:
if not 'owner_org' in package_dict:
package_dict['owner_org'] = None
# check if remote org exist locally, otherwise remove
validated_org = None
remote_org = package_dict['owner_org']
if remote_org:
try:
data_dict = {'id': remote_org}
org = get_action('organization_show')(base_context.copy(), data_dict)
validated_org = org['id']
except NotFound, e:
log.info('Organization %s is not available', remote_org)
if remote_orgs == 'create':
try:
try:
org = self._get_organization(harvest_object.source.url, remote_org)
except RemoteResourceError:
# fallback if remote CKAN exposes organizations as groups
# this especially targets older versions of CKAN
org = self._get_group(harvest_object.source.url, remote_org)
for key in ['packages', 'created', 'users', 'groups', 'tags', 'extras', 'display_name', 'type']:
org.pop(key, None)
get_action('organization_create')(base_context.copy(), org)
log.info('Organization %s has been newly created', remote_org)
validated_org = org['id']
except (RemoteResourceError, ValidationError):
log.error('Could not get remote org %s', remote_org)
package_dict['owner_org'] = validated_org or local_org
# Set default groups if needed
default_groups = self.config.get('default_groups', [])
if default_groups:
if not 'groups' in package_dict:
package_dict['groups'] = []
existing_group_ids = [g['id'] for g in package_dict['groups']]
package_dict['groups'].extend(
[g for g in self.config['default_group_dicts']
if g['id'] not in existing_group_ids])
# Set default extras if needed
default_extras = self.config.get('default_extras', {})
def get_extra(key, package_dict):
for extra in package_dict.get('extras', []):
if extra['key'] == key:
return extra
if default_extras:
override_extras = self.config.get('override_extras', False)
if not 'extras' in package_dict:
package_dict['extras'] = []
for key, value in default_extras.iteritems():
existing_extra = get_extra(key, package_dict)
if existing_extra and not override_extras:
continue # no need for the default
if existing_extra:
package_dict['extras'].remove(existing_extra)
# Look for replacement strings
if isinstance(value, basestring):
value = value.format(
harvest_source_id=harvest_object.job.source.id,
harvest_source_url=
harvest_object.job.source.url.strip('/'),
harvest_source_title=
harvest_object.job.source.title,
harvest_job_id=harvest_object.job.id,
harvest_object_id=harvest_object.id,
dataset_id=package_dict['id'])
package_dict['extras'].append({'key': key, 'value': value})
for resource in package_dict.get('resources', []):
# Clear remote url_type for resources (eg datastore, upload) as
# we are only creating normal resources with links to the
# remote ones
resource.pop('url_type', None)
# Clear revision_id as the revision won't exist on this CKAN
# and saving it will cause an IntegrityError with the foreign
# key.
resource.pop('revision_id', None)
result = self._create_or_update_package(
package_dict, harvest_object, package_dict_form='package_show')
log.info(result)
return result
except ValidationError, e:
self._save_object_error('Invalid package with GUID %s: %r' %
(harvest_object.guid, e.error_dict),
harvest_object, 'Import')
except Exception, e:
self._save_object_error('%s' % e, harvest_object, 'Import')
def _convert_dkan_package_to_ckan(self, package):
"""
Function: Change the package dict's DKAN-style
to CKAN-style
Return: <dict>
"""
resources = []
try:
if 'extras' not in package:
package['extras'] = []
if 'title' not in package:
raise ValueError("Dataset has not title")
if 'name' not in package:
package['name'] = munge.munge_title_to_name(package['title'])
if 'description' in package:
package['notes'] = package['description']
for license in model.Package.get_license_register().values():
if license.title == package['license_title']:
package['license_id'] = license.id
break
if 'private' not in package:
package['private'] = False
else:
package['private'] = True if package['private'] != 'Publicado' else False
package['state'] = package['state'].lower()
package['type'] = package['type'].lower()
if 'metadata_created' in package:
package['metadata_created'] = self._convert_date_package_handling_error(package, 'metadata_created')
if 'metadata_modified' in package:
package['metadata_modified'] = self._convert_date_package_handling_error(package, 'metadata_modified')
if 'revision_timestamp' in package:
package['revision_timestamp'] = self._convert_date_package_handling_error(package, 'revision_timestamp')
if 'resources' not in package:
raise ValueError('Dataset has no resources')
package = self._fix_tags(package)
for resource in package['resources']:
resource['description'] = resource['name']
if not resource.get('url', ''):
next
if 'size' in resource:
if type(resource['size']) == unicode or type(resource['size']) == str:
clean_size = resource['size'].replace('KB', '').replace('MB', '').strip()
try:
resource['size'] = int(float(clean_size))
except:
log.error(u'Incorrect size file format Package: {0}, Resource: {1}'.format(package['name'], resource['name']))
resource['size'] = 0
return None
self._convert_date_resource_handling_error(resource, 'created', package['name'])
self._convert_date_resource_handling_error(resource, 'last_modified', package['name'], last_modified=True)
if 'revision_id' in resource:
del resource['revision_id']
if 'format' not in resource:
resource['format'] = MIMETYPE_FORMATS.get(resource.get('mimetype'), '')
resources.append(resource)
package['resources'] = resources
if 'private' in package:
# DKAN appears to have datasets with private=True which are
# still public: https://github.com/NuCivic/dkan/issues/950. If
# they were really private then we'd not get be able to access
# them, so assume they are not private.
package['private'] = False
return package
except Exception, e:
log.error('Unable to get convert DKAN to CKAN package: %s' % e)
return None
def _convert_date_package_handling_error(self, package, key, last_modified=False):
"""
Function: Convert package's format dates
Return: <string>
"""
try:
return self._convert_date(package[key], last_modified=last_modified)
except:
log.error(
u'Incorrect date metadata_created format in Package {0}: {1}'.format(package['name'], package[key])
)
return datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
def _convert_date_resource_handling_error(self, resource, key, package_name, last_modified=False):
"""
Function: Convert resources's format dates
Return: <string>
"""
try:
return self._convert_date(resource[key], last_modified=last_modified)
except:
log.error(
u'Incorrect date last_modified format in Package: {0}, Source: {1} Date: {2}'.format(package_name, resource['name'], resource[key])
)
return datetime.datetime.now().strftime("%Y-%m-%dT%H:%M:%S.%f")
def _convert_date(self, date, last_modified=False):
"""
Function: Convert generic format to ckan dates format
Return: <string>
"""
try:
date_object = datetime.datetime.strptime(date, "%Y-%m-%dT%H:%M:%S.%f")
return date
except:
pass
try:
date_object = datetime.datetime.strptime(date, "%Y-%m-%dT%H:%M:%S")
return date
except:
pass
date_correct_format = date.replace('Date changed\t', '')[4:].lstrip() if last_modified else date[4:].lstrip()
date_object = datetime.datetime.strptime(date_correct_format, '%m/%d/%Y - %H:%M:%S')
return date_object.strftime("%Y-%m-%dT%H:%M:%S.%f")
def _fix_tags(self, package_dict):
"""
Function: Purge the vocabulary tags
Return: <dict>
"""
tags = []
tag_aux = None
for tag in package_dict.get('tags', []):
tag_aux = tag
if 'vocabulary_id' in tag_aux:
tag_aux['vocabulary_id'] = None
tags.append(tag_aux)
package_dict['tags'] = tags
return package_dict
class ContentFetchError(Exception):
pass
class ContentNotFoundError(ContentFetchError):
pass
class RemoteResourceError(Exception):
pass
class SearchError(Exception):
pass
| agpl-3.0 |
TeamExodus/external_chromium_org | third_party/cython/src/Cython/Debugger/Tests/test_libpython_in_gdb.py | 110 | 3979 | # -*- coding: UTF-8 -*-
"""
Test libpython.py. This is already partly tested by test_libcython_in_gdb and
Lib/test/test_gdb.py in the Python source. These tests are run in gdb and
called from test_libcython_in_gdb.main()
"""
import os
import sys
import gdb
from Cython.Debugger import libcython
from Cython.Debugger import libpython
import test_libcython_in_gdb
from test_libcython_in_gdb import _debug, inferior_python_version
class TestPrettyPrinters(test_libcython_in_gdb.DebugTestCase):
"""
Test whether types of Python objects are correctly inferred and that
the right libpython.PySomeTypeObjectPtr classes are instantiated.
Also test whether values are appropriately formatted (don't be too
laborious as Lib/test/test_gdb.py already covers this extensively).
Don't take care of decreffing newly allocated objects as a new
interpreter is started for every test anyway.
"""
def setUp(self):
super(TestPrettyPrinters, self).setUp()
self.break_and_run('b = c = d = 0')
def get_pyobject(self, code):
value = gdb.parse_and_eval(code)
assert libpython.pointervalue(value) != 0
return value
def pyobject_fromcode(self, code, gdbvar=None):
if gdbvar is not None:
d = {'varname':gdbvar, 'code':code}
gdb.execute('set $%(varname)s = %(code)s' % d)
code = '$' + gdbvar
return libpython.PyObjectPtr.from_pyobject_ptr(self.get_pyobject(code))
def get_repr(self, pyobject):
return pyobject.get_truncated_repr(libpython.MAX_OUTPUT_LEN)
def alloc_bytestring(self, string, gdbvar=None):
if inferior_python_version < (3, 0):
funcname = 'PyString_FromStringAndSize'
else:
funcname = 'PyBytes_FromStringAndSize'
assert '"' not in string
# ensure double quotes
code = '(PyObject *) %s("%s", %d)' % (funcname, string, len(string))
return self.pyobject_fromcode(code, gdbvar=gdbvar)
def alloc_unicodestring(self, string, gdbvar=None):
self.alloc_bytestring(string.encode('UTF-8'), gdbvar='_temp')
postfix = libpython.get_inferior_unicode_postfix()
funcname = 'PyUnicode%s_FromEncodedObject' % (postfix,)
return self.pyobject_fromcode(
'(PyObject *) %s($_temp, "UTF-8", "strict")' % funcname,
gdbvar=gdbvar)
def test_bytestring(self):
bytestring = self.alloc_bytestring("spam")
if inferior_python_version < (3, 0):
bytestring_class = libpython.PyStringObjectPtr
expected = repr("spam")
else:
bytestring_class = libpython.PyBytesObjectPtr
expected = "b'spam'"
self.assertEqual(type(bytestring), bytestring_class)
self.assertEqual(self.get_repr(bytestring), expected)
def test_unicode(self):
unicode_string = self.alloc_unicodestring(u"spam ἄλφα")
expected = "'spam ἄλφα'"
if inferior_python_version < (3, 0):
expected = 'u' + expected
self.assertEqual(type(unicode_string), libpython.PyUnicodeObjectPtr)
self.assertEqual(self.get_repr(unicode_string), expected)
def test_int(self):
if inferior_python_version < (3, 0):
intval = self.pyobject_fromcode('PyInt_FromLong(100)')
self.assertEqual(type(intval), libpython.PyIntObjectPtr)
self.assertEqual(self.get_repr(intval), '100')
def test_long(self):
longval = self.pyobject_fromcode('PyLong_FromLong(200)',
gdbvar='longval')
assert gdb.parse_and_eval('$longval->ob_type == &PyLong_Type')
self.assertEqual(type(longval), libpython.PyLongObjectPtr)
self.assertEqual(self.get_repr(longval), '200')
def test_frame_type(self):
frame = self.pyobject_fromcode('PyEval_GetFrame()')
self.assertEqual(type(frame), libpython.PyFrameObjectPtr)
| bsd-3-clause |
a-y-u-s-h/QuarkWebsite2017 | quark/lib/python2.7/site-packages/pip/_vendor/distlib/markers.py | 1261 | 6282 | # -*- coding: utf-8 -*-
#
# Copyright (C) 2012-2013 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
"""Parser for the environment markers micro-language defined in PEP 345."""
import ast
import os
import sys
import platform
from .compat import python_implementation, string_types
from .util import in_venv
__all__ = ['interpret']
class Evaluator(object):
"""
A limited evaluator for Python expressions.
"""
operators = {
'eq': lambda x, y: x == y,
'gt': lambda x, y: x > y,
'gte': lambda x, y: x >= y,
'in': lambda x, y: x in y,
'lt': lambda x, y: x < y,
'lte': lambda x, y: x <= y,
'not': lambda x: not x,
'noteq': lambda x, y: x != y,
'notin': lambda x, y: x not in y,
}
allowed_values = {
'sys_platform': sys.platform,
'python_version': '%s.%s' % sys.version_info[:2],
# parsing sys.platform is not reliable, but there is no other
# way to get e.g. 2.7.2+, and the PEP is defined with sys.version
'python_full_version': sys.version.split(' ', 1)[0],
'os_name': os.name,
'platform_in_venv': str(in_venv()),
'platform_release': platform.release(),
'platform_version': platform.version(),
'platform_machine': platform.machine(),
'platform_python_implementation': python_implementation(),
}
def __init__(self, context=None):
"""
Initialise an instance.
:param context: If specified, names are looked up in this mapping.
"""
self.context = context or {}
self.source = None
def get_fragment(self, offset):
"""
Get the part of the source which is causing a problem.
"""
fragment_len = 10
s = '%r' % (self.source[offset:offset + fragment_len])
if offset + fragment_len < len(self.source):
s += '...'
return s
def get_handler(self, node_type):
"""
Get a handler for the specified AST node type.
"""
return getattr(self, 'do_%s' % node_type, None)
def evaluate(self, node, filename=None):
"""
Evaluate a source string or node, using ``filename`` when
displaying errors.
"""
if isinstance(node, string_types):
self.source = node
kwargs = {'mode': 'eval'}
if filename:
kwargs['filename'] = filename
try:
node = ast.parse(node, **kwargs)
except SyntaxError as e:
s = self.get_fragment(e.offset)
raise SyntaxError('syntax error %s' % s)
node_type = node.__class__.__name__.lower()
handler = self.get_handler(node_type)
if handler is None:
if self.source is None:
s = '(source not available)'
else:
s = self.get_fragment(node.col_offset)
raise SyntaxError("don't know how to evaluate %r %s" % (
node_type, s))
return handler(node)
def get_attr_key(self, node):
assert isinstance(node, ast.Attribute), 'attribute node expected'
return '%s.%s' % (node.value.id, node.attr)
def do_attribute(self, node):
if not isinstance(node.value, ast.Name):
valid = False
else:
key = self.get_attr_key(node)
valid = key in self.context or key in self.allowed_values
if not valid:
raise SyntaxError('invalid expression: %s' % key)
if key in self.context:
result = self.context[key]
else:
result = self.allowed_values[key]
return result
def do_boolop(self, node):
result = self.evaluate(node.values[0])
is_or = node.op.__class__ is ast.Or
is_and = node.op.__class__ is ast.And
assert is_or or is_and
if (is_and and result) or (is_or and not result):
for n in node.values[1:]:
result = self.evaluate(n)
if (is_or and result) or (is_and and not result):
break
return result
def do_compare(self, node):
def sanity_check(lhsnode, rhsnode):
valid = True
if isinstance(lhsnode, ast.Str) and isinstance(rhsnode, ast.Str):
valid = False
#elif (isinstance(lhsnode, ast.Attribute)
# and isinstance(rhsnode, ast.Attribute)):
# klhs = self.get_attr_key(lhsnode)
# krhs = self.get_attr_key(rhsnode)
# valid = klhs != krhs
if not valid:
s = self.get_fragment(node.col_offset)
raise SyntaxError('Invalid comparison: %s' % s)
lhsnode = node.left
lhs = self.evaluate(lhsnode)
result = True
for op, rhsnode in zip(node.ops, node.comparators):
sanity_check(lhsnode, rhsnode)
op = op.__class__.__name__.lower()
if op not in self.operators:
raise SyntaxError('unsupported operation: %r' % op)
rhs = self.evaluate(rhsnode)
result = self.operators[op](lhs, rhs)
if not result:
break
lhs = rhs
lhsnode = rhsnode
return result
def do_expression(self, node):
return self.evaluate(node.body)
def do_name(self, node):
valid = False
if node.id in self.context:
valid = True
result = self.context[node.id]
elif node.id in self.allowed_values:
valid = True
result = self.allowed_values[node.id]
if not valid:
raise SyntaxError('invalid expression: %s' % node.id)
return result
def do_str(self, node):
return node.s
def interpret(marker, execution_context=None):
"""
Interpret a marker and return a result depending on environment.
:param marker: The marker to interpret.
:type marker: str
:param execution_context: The context used for name lookup.
:type execution_context: mapping
"""
return Evaluator(execution_context).evaluate(marker.strip())
| apache-2.0 |
cs243iitg/vehicle-webapp | webapp/vms/views.py | 1 | 12301 | from django.shortcuts import render, render_to_response
from django.http import HttpResponse, HttpResponseRedirect
from django.views import generic
from django.core.context_processors import csrf
from django.views.decorators.csrf import csrf_protect
from django.contrib import auth
from django.contrib import messages
from django.template import RequestContext
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from django.shortcuts import get_object_or_404
from .forms import TheftForm, StudentVehicleForm, SuspiciousVehicleForm
from .models import TheftReport, StudentVehicle, BusTiming, EmployeeVehicle, SuspiciousVehicle, Guard, ParkingSlot, StudentCycle, OnDutyGuard, PersonPass
from datetime import datetime
import requests, threading
from vms import pdf
def login(request):
"""
Displays login page at the start
"""
c = {}
c.update(csrf(request))
if request.method == 'POST':
return render_to_response('vms/login.html', {
'form_errors': form_errors,
})
else:
return render_to_response('vms/login.html', c)
@login_required(login_url="/vms")
def logout(request):
"""
Logs the user out, if he is logged in.
"""
auth.logout(request)
return HttpResponseRedirect('/vms/', {
'form_errors': "You've succesfully logged out."
})
def auth_view(request):
"""
Authenticates user from the username and password from POST -- REQUIRES CHANGES DEPENDING ON MODEL
"""
username = request.POST.get('username', '')
password = request.POST.get('password', '')
user = auth.authenticate(username=username, password=password)
if user is not None:
auth.login(request, user)
return HttpResponseRedirect('/vms/users/dashboard') #CHANGE THIS!! -- SHOULD WORK ACCORDING TO USER
else:
return HttpResponseRedirect('/vms/')
#------------------------------------------------------------
# Theft Reporting for User
#------------------------------------------------------------
# @login_required(login_url="/vms/")
# def home(request):
# """
# Home page for user, with his previous tasks
# """
# today = str.lower(datetime.now().strftime("%A"))
# buses = sorted(j for j in BusTiming.objects.all() if (j.from_time >= datetime.now().time() and filter(lambda x: str(x).lower() == today, j.availability.all()) ))
# return render(request, 'vms/dashboard.html',{
# 'username': request.user.first_name,
# 'is_user': True,
# 'user': request.user,
# 'buses': buses[0:3],
# })
@login_required(login_url="/vms/")
def home(request):
"""
Home page for user, with his previous tasks
"""
today = str.lower(datetime.now().strftime("%A"))
# buses = sorted(j for j in BusTiming.objects.all() if (j.from_time >= datetime.now().time() and filter(lambda x: str(x).lower() == today, j.availability.all()) ))
if not request.user.is_superuser == True:
num_suspicious = len(SuspiciousVehicle.objects.filter(reporter=request.user))
x1 = [j for j in StudentVehicle.objects.all() if (j.user == request.user and j.registered_with_security_section==None)]
num_pending = len(x1)
x2 = [j.available_slots for j in ParkingSlot.objects.all()]
num_guards = sum(x2)
x3 = [j for j in TheftReport.objects.all() if j.reporter==request.user]
num_thefts = len(x3)
return render(request, 'vms/dashboard.html',{
'username': request.user.first_name,
'user': request.user,
# 'buses': buses[0:3],
'num_suspicious': num_suspicious,
'num_pending': num_pending,
'num_guards': num_guards,
'num_thefts': num_thefts,
'user_thefts': x3,
})
else:
num_suspicious = len(SuspiciousVehicle.objects.all())
num_pending = len(StudentVehicle.objects.filter(registered_with_security_section=None)) + len(EmployeeVehicle.objects.filter(registered_with_security_section=None))
num_approved = len(StudentVehicle.objects.filter(registered_with_security_section=True)) + len(EmployeeVehicle.objects.filter(registered_with_security_section=True))
num_denied = len(StudentVehicle.objects.filter(registered_with_security_section=False)) + len(EmployeeVehicle.objects.filter(registered_with_security_section=False))
num_guards = len(OnDutyGuard.objects.all())
num_thefts = len(TheftReport.objects.filter(status="Submitted"))
passes=PersonPass.objects.all()
total_blocked = len(passes.filter(is_blocked=True))
total_issued = len(passes.filter(is_blocked=False))
x = [j for j in passes if j.expiry_date < datetime.now().date()]
total_expired = len(x)
return render(request, 'vms/dashboard.html',{
'username': request.user.first_name,
'is_user': True,
'user': request.user,
# 'buses': buses[0:3],
'num_suspicious': num_suspicious,
'num_pending': num_pending,
'num_guards': num_guards,
'num_thefts': num_thefts,
'num_approved': num_approved,
'num_denied': num_denied,
'total_issued': total_issued,
'total_expired': total_expired,
'total_blocked': total_blocked,
})
@login_required(login_url="/vms/")
def busdetails(request):
return render(request, 'vms/busdetails.html')
#Ayush Mananiya
#----------thread function for sending sms---------------------------------------------
def send_sms(message, numbers):
proxy = "http://sumeet.ranka:weh,[email protected]:3128" #change the username and password
status1=''
for i in numbers:
response = requests.get("https://site2sms.p.mashape.com/index.php?msg="+message+"&phone="+str(i)+"&pwd=CS243iitg&uid=8011035945",headers={"X-Mashape-Key": "CW4gX5MRw2mshX6uxzLHMxEVoB0Op1v4cMrjsnZoeRXbk3LD46", "Accept": "application/json"},proxies={"http":proxy,"https":proxy,"ftp":proxy},)
#-------------------------end-----------------------------------------------------
@login_required(login_url="/vms/")
def theft_report_form(request):
"""
Displays theft report form for user -- NOTE: This form is common to admin and user
"""
if request.method == 'POST':
form = TheftForm(request.POST)
if form.is_valid():
task = form.save(commit = False)
task.reporter = request.user
if request.user.user.is_student:
vehicles=StudentVehicle.objects.filter(user=request.user)
cycles=StudentCycle.objects.filter(user=request.user)
try:
vehicle = StudentVehicle.objects.get(vehicle_pass_no=task.vehicle_pass_no)
cycle=0
except:
message = "Vehicle does not belong to you."
vehicle = None
try:
vehicle = StudentCycle.objects.get(cycle_pass_no=task.vehicle_pass_no)
cycle=1
except:
message = "Vehicle does not belong to you."
vehicle = None
return render(request, "vms/theft.html",{
'message':message,
'user':request.user,
'form':form,
})
else:
vehicles=EmployeeVehicle.objects.filter(user=request.user)
try:
vehicle = EmployeeVehicle.objects.get(vehicle_pass_no=task.vehicle_pass_no)
cycle=0
except:
vehicle = None
message = "Vehicle does not belong to you."
return render(request, "vms/theft.html",{
'message':message,
'user':request.user,
'form':form,
})
if vehicle != None and vehicle in vehicles:
if request.user.user.is_student:
task.stud_vehicle=vehicle
else:
task.emp_vehicle=vehicle
#ayush Mananiya
#my funct started--------------------------------------------------------------------------
if cycle == 0:
message = vehicle.make_and_model +' '+ task.vehicle_pass_no + ' is stolen from ' + task.theft_place +' at '+ str(task.theft_time.strftime('%d-%b-%Y %H:%M')) #extract the form fields and generate message text
else:
message = vehicle.cycle_model+ ' '+vehicle.cycle_color+' '+' '+'is stolen from '+task.theft_place+' at '+ str(task.theft_time)
numbers = list(Guard.objects.values_list('guard_phone_number', flat=True)) #retrieves the phone numbers of all the guards
sms_thread = threading.Thread(target=send_sms, args=(message, numbers)) #threading
sms_thread.start()
#ended here--------------------------------------------------------------------------------------------------------------------------
task.save()
messages.success(request, 'Your theft report is submitted.')
return render(request, "vms/theft.html",{
'message':"Theft Report successfully submitted.",
'user':request.user,
'form':form,
'success':True,
'id':task.id,
})
else:
form = TheftForm()
return render(request, "vms/theft.html", {
'form':form,
'user':request.user,
})
@login_required(login_url="/vms/")
def generate_report(request, report_id):
rep = TheftReport.objects.filter(id=report_id)
if len(rep) > 0:
# print rep[0].theft_time
return pdf.pdf_gen(rep[0])
return HttpResponse("done")
@login_required(login_url="/vms/")
def vehicles_missing(request):
"""
Displays to users their theft reports
"""
reports = TheftReport.objects.all()
return render(request, "vms/theft_reports.html", {
'reports': reports,
})
@login_required(login_url="/vms/")
def parking_slot_availability(request):
"""
Function to serve the parking spaces that are available
"""
return render(request, 'users/parking.html', {
'pslots': ParkingSlot.objects.all(),
})
@login_required(login_url="/vms/")
def suspicious_vehicle_report_form(request):
"""
Function to report suspicious vehicles
"""
if request.method == 'POST':
form = SuspiciousVehicleForm(request.POST, request.FILES)
if form.is_valid():
task = form.save(commit = False)
task.reporter=request.user
task.save()
return render(request, 'vms/suspicious.html',{
'user':request.user,
'form':form,
'message':"Vehicle has been reported. Thanks for the caution."
})
else:
form=SuspiciousVehicleForm()
return render(request, 'vms/suspicious.html', {
'user': request.user,
'form':form,
})
@login_required(login_url="/vms/")
def suspicious_vehicles(request):
"""
Function to allow users to view all suspicious reported activity
"""
str1=""
if request.POST:
SuspiciousVehicle.objects.get(id=request.POST['Delete']).delete()
vehicles = SuspiciousVehicle.objects.all()
messages.success(request,"Report for suspicious activity is deleted")
return render(request, 'vms/suspicious_vehicles.html',{
'user':request.user,
'vehicles':vehicles,
})
else:
vehicles = SuspiciousVehicle.objects.all()
return render(request, 'vms/suspicious_vehicles.html', {
'user': request.user,
'vehicles':vehicles,
})
@login_required(login_url="/vms/")
def delete_suspicious_vehicles(request, suspicious_vehicle_id):
SuspiciousVehicle.objects.get(id=suspicious_vehicle_id).delete()
pass
| mit |
jambonrose/DjangoUnleashed-1.8 | blog/migrations/0002_post_data.py | 1 | 4440 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from datetime import date
from django.db import migrations, models
POSTS = [
{
"title": "Django 1.0 Release",
"slug": "django-10-released",
"pub_date": date(2008, 9, 3),
"startups": [],
"tags": ["django", "python", "web"],
"text": "THE Web Framework.",
},
{
"title": "Simple Robots for Sale",
"slug": "simple-robots-for-sale",
"pub_date": date(2011, 2, 21),
"startups": ["simple-robots"],
"tags": ["augmented-reality", "python"],
"text":
"If only they would make "
"spider bots.",
},
{
"title": "Django Training",
"slug": "django-training",
"pub_date": date(2013, 1, 18),
"startups": ["jambon-software"],
"tags": ["django"],
"text":
"Want to learn Django in a class "
"setting? JamBon Software offers "
"hands-on courses in the web "
"framework. Just looking for help? "
"They'll consult on your web and "
"mobile products and can also be "
"hired for end-to-end development.",
},
{
"title": "Django 1.8 Release",
"slug": "django-18-released",
"pub_date": date(2015, 4, 1),
"startups": [],
"tags": ["django", "python", "web"],
"text": "Django 1.8 is Django's newest "
"version, and the next version "
"slated for Long-Term Support "
"(LTS). LTS means that Django 1.8 "
"will be supported for longer than "
"regular versions: Django core "
"developers will specify a single "
"release as LTS, and then continue "
"to update that version regardless "
"of the usual release cycle. This "
"will last until they pick a new "
"LTS version, which typically "
"happens every 3 to 4 years. The "
"last LTS version was 1.4, "
"released in March 2012, which "
"will stop being supported in "
"October 2015.\n\n"
"For more information: \n"
"http://andrewsforge.com/article/"
"upgrading-django-to-17/part-1-"
"introduction-and-django-releases/",
},
{
"title": "More Django Info",
"slug": "more-django-info",
"pub_date": date(2015, 4, 8),
"startups": ["jambon-software"],
"tags": ["django", "web"],
"text":
"Remember that the official websites "
"for Django and this book contain a "
"number of extra resources.\n\n"
"https://djangoproject.com\n"
"https://django-unleashed.com\n\n"
"Want more Django info? "
"There's always my personal blog!\n\n"
"https://AndrewsForge.com",
},
{
"title": "New Django Version",
"slug": "new-django-version",
"pub_date": date(2020, 5, 15),
"startups": [],
"tags": ["django", "python", "web"],
"text":
"Better integration with "
"HTML Boilerstrap 9.",
},
]
def add_post_data(apps, schema_editor):
Post = apps.get_model('blog', 'Post')
Startup = apps.get_model(
'organizer', 'Startup')
Tag = apps.get_model('organizer', 'Tag')
for post_dict in POSTS:
post = Post.objects.create(
title=post_dict['title'],
slug=post_dict['slug'],
text=post_dict['text'])
post.pub_date = post_dict['pub_date']
post.save()
for tag_slug in post_dict['tags']:
post.tags.add(
Tag.objects.get(
slug=tag_slug))
for startup_slug in post_dict['startups']:
post.startups.add(
Startup.objects.get(
slug=startup_slug))
def remove_post_data(apps, schema_editor):
Post = apps.get_model('blog', 'Post')
for post_dict in POSTS:
post = Post.objects.get(
slug=post_dict['slug'])
post.delete()
class Migration(migrations.Migration):
dependencies = [
('blog', '0001_initial'),
('organizer', '0003_startup_data'),
]
operations = [
migrations.RunPython(
add_post_data,
remove_post_data)
]
| bsd-2-clause |
karstenw/nodebox-pyobjc | examples/Extended Application/matplotlib/examples/event_handling/zoom_window.py | 1 | 2014 | """
===========
Zoom Window
===========
This example shows how to connect events in one window, for example, a mouse
press, to another figure window.
If you click on a point in the first window, the z and y limits of the
second will be adjusted so that the center of the zoom in the second
window will be the x,y coordinates of the clicked point.
Note the diameter of the circles in the scatter are defined in
points**2, so their size is independent of the zoom
"""
import matplotlib.pyplot as plt #import figure, show
import numpy as np
# nodebox section
if __name__ == '__builtin__':
# were in nodebox
import os
import tempfile
W = 800
inset = 20
size(W, 600)
plt.cla()
plt.clf()
plt.close('all')
def tempimage():
fob = tempfile.NamedTemporaryFile(mode='w+b', suffix='.png', delete=False)
fname = fob.name
fob.close()
return fname
imgx = 20
imgy = 0
def pltshow(plt, dpi=150):
global imgx, imgy
temppath = tempimage()
plt.savefig(temppath, dpi=dpi)
dx,dy = imagesize(temppath)
w = min(W,dx)
image(temppath,imgx,imgy,width=w)
imgy = imgy + dy + 20
os.remove(temppath)
size(W, HEIGHT+dy+40)
else:
def pltshow(mplpyplot):
mplpyplot.show()
# nodebox section end
figsrc = plt.figure()
figzoom = plt.figure()
axsrc = figsrc.add_subplot(111, xlim=(0, 1), ylim=(0, 1), autoscale_on=False)
axzoom = figzoom.add_subplot(111, xlim=(0.45, 0.55), ylim=(0.4, .6),
autoscale_on=False)
axsrc.set_title('Click to zoom')
axzoom.set_title('zoom window')
x, y, s, c = np.random.rand(4, 200)
s *= 200
axsrc.scatter(x, y, s, c)
axzoom.scatter(x, y, s, c)
def onpress(event):
if event.button != 1:
return
x, y = event.xdata, event.ydata
axzoom.set_xlim(x - 0.1, x + 0.1)
axzoom.set_ylim(y - 0.1, y + 0.1)
figzoom.canvas.draw()
figsrc.canvas.mpl_connect('button_press_event', onpress)
pltshow(plt)
| mit |
Perferom/android_external_chromium_org | chrome/test/pyautolib/remote_host.py | 80 | 3108 | #!/usr/bin/env python
# Copyright (c) 2011 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import cStringIO
import os
import pickle
import socket
import sys
import pyauto
class RemoteHost(object):
"""Class used as a host for tests that use the PyAuto RemoteProxy.
This class fires up a listener which waits for a connection from a RemoteProxy
and receives method call requests. Run python remote_host.py
remote_host.RemoteHost.RunHost to start up a PyAuto remote instance that you
can connect to and automate using pyauto.RemoteProxy.
"""
def __init__(self, host, *args, **kwargs):
self.StartSocketServer(host)
def StartSocketServer(self, host):
listening_socket = socket.socket()
listening_socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
listening_socket.bind(host)
listening_socket.listen(1)
print 'Listening for incoming connections on port %d.' % host[1]
self._socket, address = listening_socket.accept()
print 'Accepted connection from %s:%d.' % address
while self.Connected():
self._HandleRPC()
def StopSocketServer(self):
if self._socket:
try:
self._socket.shutdown(socket.SHUT_RDWR)
self._socket.close()
except socket.error:
pass
self._socket = None
def Connected(self):
return self._socket
def CreateTarget(self, target_class):
"""Creates an instance of the specified class to serve as the RPC target.
RPC calls can be made on the target.
"""
self.target = target_class()
def _HandleRPC(self):
"""Receives a method call request over the socket and executes the method.
This method captures stdout and stderr for the duration of the method call,
and sends those, the return value, and any thrown exceptions back to the
RemoteProxy.
"""
# Receive request.
request = self._socket.recv(4096)
if not request:
self.StopSocketServer()
return
request = pickle.loads(request)
# Redirect output to strings.
old_stdout = sys.stdout
old_stderr = sys.stderr
sys.stdout = stdout = cStringIO.StringIO()
sys.stderr = stderr = cStringIO.StringIO()
# Make requested method call.
result = None
exception = None
try:
if getattr(self, request[0], None):
result = getattr(self, request[0])(*request[1], **request[2])
else:
result = getattr(self.target, request[0])(*request[1], **request[2])
except BaseException, e:
exception = (e.__class__.__name__, str(e))
# Put output back to the way it was before.
sys.stdout = old_stdout
sys.stderr = old_stderr
# Package up and send the result of the method call.
response = pickle.dumps((result, stdout.getvalue(), stderr.getvalue(),
exception))
if self._socket.send(response) != len(response):
self.StopSocketServer()
if __name__ == '__main__':
pyauto_suite = pyauto.PyUITestSuite(sys.argv)
RemoteHost(('', 7410))
del pyauto_suite
| bsd-3-clause |
HybridF5/jacket | jacket/storage/keymgr/conf_key_mgr.py | 1 | 4888 | # Copyright (c) 2013 The Johns Hopkins University/Applied Physics Laboratory
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
An implementation of a key manager that reads its key from the project's
configuration options.
This key manager implementation provides limited security, assuming that the
key remains secret. Using the volume encryption feature as an example,
encryption provides protection against a lost or stolen disk, assuming that
the configuration file that contains the key is not stored on the disk.
Encryption also protects the confidentiality of data as it is transmitted via
iSCSI from the compute host to the storage host (again assuming that an
attacker who intercepts the data does not know the secret key).
Because this implementation uses a single, fixed key, it proffers no
protection once that key is compromised. In particular, different volumes
encrypted with a key provided by this key manager actually share the same
encryption key so *any* volume can be decrypted once the fixed key is known.
"""
import array
import binascii
from oslo_config import cfg
from oslo_log import log as logging
from jacket.storage import exception
from jacket.storage.i18n import _, _LW
from jacket.storage.keymgr import key
from jacket.storage.keymgr import key_mgr
key_mgr_opts = [
cfg.StrOpt('fixed_key',
help='Fixed key returned by key manager, specified in hex'),
]
CONF = cfg.CONF
CONF.register_opts(key_mgr_opts, group='storage_keymgr')
LOG = logging.getLogger(__name__)
class ConfKeyManager(key_mgr.KeyManager):
"""Key Manager that supports one key defined by the fixed_key conf option.
This key manager implementation supports all the methods specified by the
key manager interface. This implementation creates a single key in response
to all invocations of create_key. Side effects (e.g., raising exceptions)
for each method are handled as specified by the key manager interface.
"""
def __init__(self):
super(ConfKeyManager, self).__init__()
self.key_id = '00000000-0000-0000-0000-000000000000'
def _generate_key(self, **kwargs):
_hex = self._generate_hex_key(**kwargs)
key_list = array.array('B', binascii.unhexlify(_hex)).tolist()
return key.SymmetricKey('AES', key_list)
def _generate_hex_key(self, **kwargs):
if CONF.storage_keymgr.fixed_key is None:
LOG.warning(
_LW('config option storage_keymgr.fixed_key has not been defined:'
' some operations may fail unexpectedly'))
raise ValueError(_('storage_keymgr.fixed_key not defined'))
return CONF.storage_keymgr.fixed_key
def create_key(self, ctxt, **kwargs):
"""Creates a key.
This implementation returns a UUID for the created key. A
NotAuthorized exception is raised if the specified context is None.
"""
if ctxt is None:
raise exception.NotAuthorized()
return self.key_id
def store_key(self, ctxt, key, **kwargs):
"""Stores (i.e., registers) a key with the key manager."""
if ctxt is None:
raise exception.NotAuthorized()
if key != self._generate_key():
raise exception.KeyManagerError(
reason="cannot store arbitrary keys")
return self.key_id
def copy_key(self, ctxt, key_id, **kwargs):
if ctxt is None:
raise exception.NotAuthorized()
return self.key_id
def get_key(self, ctxt, key_id, **kwargs):
"""Retrieves the key identified by the specified id.
This implementation returns the key that is associated with the
specified UUID. A NotAuthorized exception is raised if the specified
context is None; a KeyError is raised if the UUID is invalid.
"""
if ctxt is None:
raise exception.NotAuthorized()
if key_id != self.key_id:
raise KeyError(key_id)
return self._generate_key()
def delete_key(self, ctxt, key_id, **kwargs):
if ctxt is None:
raise exception.NotAuthorized()
if key_id != self.key_id:
raise exception.KeyManagerError(
reason="cannot delete non-existent key")
LOG.warning(_LW("Not deleting key %s"), key_id)
| apache-2.0 |
botherder/volatility | volatility/plugins/overlays/windows/win2003_sp2_x64_vtypes.py | 58 | 337862 | ntkrnlmp_types = {
'LIST_ENTRY64' : [ 0x10, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['unsigned long long']],
} ],
'LIST_ENTRY32' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long']],
'Blink' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1015' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
} ],
'_ULARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['unsigned long']],
'u' : [ 0x0, ['__unnamed_1015']],
'QuadPart' : [ 0x0, ['unsigned long long']],
} ],
'_LIST_ENTRY' : [ 0x10, {
'Flink' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'Blink' : [ 0x8, ['pointer64', ['_LIST_ENTRY']]],
} ],
'_IMAGE_NT_HEADERS64' : [ 0x108, {
'Signature' : [ 0x0, ['unsigned long']],
'FileHeader' : [ 0x4, ['_IMAGE_FILE_HEADER']],
'OptionalHeader' : [ 0x18, ['_IMAGE_OPTIONAL_HEADER64']],
} ],
'__unnamed_1026' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_LARGE_INTEGER' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
'u' : [ 0x0, ['__unnamed_1026']],
'QuadPart' : [ 0x0, ['long long']],
} ],
'_RTL_BITMAP' : [ 0x10, {
'SizeOfBitMap' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned long']]],
} ],
'_LUID' : [ 0x8, {
'LowPart' : [ 0x0, ['unsigned long']],
'HighPart' : [ 0x4, ['long']],
} ],
'_KPRCB' : [ 0x2480, {
'MxCsr' : [ 0x0, ['unsigned long']],
'Number' : [ 0x4, ['unsigned char']],
'NestingLevel' : [ 0x5, ['unsigned char']],
'InterruptRequest' : [ 0x6, ['unsigned char']],
'IdleHalt' : [ 0x7, ['unsigned char']],
'CurrentThread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'NextThread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'IdleThread' : [ 0x18, ['pointer64', ['_KTHREAD']]],
'UserRsp' : [ 0x20, ['unsigned long long']],
'RspBase' : [ 0x28, ['unsigned long long']],
'PrcbLock' : [ 0x30, ['unsigned long long']],
'SetMember' : [ 0x38, ['unsigned long long']],
'ProcessorState' : [ 0x40, ['_KPROCESSOR_STATE']],
'CpuType' : [ 0x5f0, ['unsigned char']],
'CpuID' : [ 0x5f1, ['unsigned char']],
'CpuStep' : [ 0x5f2, ['unsigned short']],
'MHz' : [ 0x5f4, ['unsigned long']],
'HalReserved' : [ 0x5f8, ['array', 8, ['unsigned long long']]],
'MinorVersion' : [ 0x638, ['unsigned short']],
'MajorVersion' : [ 0x63a, ['unsigned short']],
'BuildType' : [ 0x63c, ['unsigned char']],
'CpuVendor' : [ 0x63d, ['unsigned char']],
'InitialApicId' : [ 0x63e, ['unsigned char']],
'LogicalProcessorsPerPhysicalProcessor' : [ 0x63f, ['unsigned char']],
'ApicMask' : [ 0x640, ['unsigned long']],
'CFlushSize' : [ 0x644, ['unsigned char']],
'PrcbPad0x' : [ 0x645, ['array', 3, ['unsigned char']]],
'AcpiReserved' : [ 0x648, ['pointer64', ['void']]],
'PrcbPad00' : [ 0x650, ['array', 4, ['unsigned long long']]],
'LockQueue' : [ 0x670, ['array', 33, ['_KSPIN_LOCK_QUEUE']]],
'PPLookasideList' : [ 0x880, ['array', 16, ['_PP_LOOKASIDE_LIST']]],
'PPNPagedLookasideList' : [ 0x980, ['array', 32, ['_PP_LOOKASIDE_LIST']]],
'PPPagedLookasideList' : [ 0xb80, ['array', 32, ['_PP_LOOKASIDE_LIST']]],
'PacketBarrier' : [ 0xd80, ['unsigned long long']],
'DeferredReadyListHead' : [ 0xd88, ['_SINGLE_LIST_ENTRY']],
'MmPageFaultCount' : [ 0xd90, ['long']],
'MmCopyOnWriteCount' : [ 0xd94, ['long']],
'MmTransitionCount' : [ 0xd98, ['long']],
'MmCacheTransitionCount' : [ 0xd9c, ['long']],
'MmDemandZeroCount' : [ 0xda0, ['long']],
'MmPageReadCount' : [ 0xda4, ['long']],
'MmPageReadIoCount' : [ 0xda8, ['long']],
'MmCacheReadCount' : [ 0xdac, ['long']],
'MmCacheIoCount' : [ 0xdb0, ['long']],
'MmDirtyPagesWriteCount' : [ 0xdb4, ['long']],
'MmDirtyWriteIoCount' : [ 0xdb8, ['long']],
'MmMappedPagesWriteCount' : [ 0xdbc, ['long']],
'MmMappedWriteIoCount' : [ 0xdc0, ['long']],
'LookasideIrpFloat' : [ 0xdc4, ['long']],
'KeSystemCalls' : [ 0xdc8, ['unsigned long']],
'IoReadOperationCount' : [ 0xdcc, ['long']],
'IoWriteOperationCount' : [ 0xdd0, ['long']],
'IoOtherOperationCount' : [ 0xdd4, ['long']],
'IoReadTransferCount' : [ 0xdd8, ['_LARGE_INTEGER']],
'IoWriteTransferCount' : [ 0xde0, ['_LARGE_INTEGER']],
'IoOtherTransferCount' : [ 0xde8, ['_LARGE_INTEGER']],
'KeContextSwitches' : [ 0xdf0, ['unsigned long']],
'PrcbPad2' : [ 0xdf4, ['array', 12, ['unsigned char']]],
'TargetSet' : [ 0xe00, ['unsigned long long']],
'IpiFrozen' : [ 0xe08, ['unsigned long']],
'PrcbPad3' : [ 0xe0c, ['array', 116, ['unsigned char']]],
'RequestMailbox' : [ 0xe80, ['array', 64, ['_REQUEST_MAILBOX']]],
'SenderSummary' : [ 0x1e80, ['unsigned long long']],
'PrcbPad4' : [ 0x1e88, ['array', 120, ['unsigned char']]],
'DpcData' : [ 0x1f00, ['array', 2, ['_KDPC_DATA']]],
'DpcStack' : [ 0x1f40, ['pointer64', ['void']]],
'SavedRsp' : [ 0x1f48, ['pointer64', ['void']]],
'MaximumDpcQueueDepth' : [ 0x1f50, ['long']],
'DpcRequestRate' : [ 0x1f54, ['unsigned long']],
'MinimumDpcRate' : [ 0x1f58, ['unsigned long']],
'DpcInterruptRequested' : [ 0x1f5c, ['unsigned char']],
'DpcThreadRequested' : [ 0x1f5d, ['unsigned char']],
'DpcRoutineActive' : [ 0x1f5e, ['unsigned char']],
'DpcThreadActive' : [ 0x1f5f, ['unsigned char']],
'TimerHand' : [ 0x1f60, ['unsigned long long']],
'TimerRequest' : [ 0x1f60, ['unsigned long long']],
'TickOffset' : [ 0x1f68, ['long']],
'MasterOffset' : [ 0x1f6c, ['long']],
'DpcLastCount' : [ 0x1f70, ['unsigned long']],
'ThreadDpcEnable' : [ 0x1f74, ['unsigned char']],
'QuantumEnd' : [ 0x1f75, ['unsigned char']],
'PrcbPad50' : [ 0x1f76, ['unsigned char']],
'IdleSchedule' : [ 0x1f77, ['unsigned char']],
'DpcSetEventRequest' : [ 0x1f78, ['long']],
'PrcbPad40' : [ 0x1f7c, ['long']],
'DpcThread' : [ 0x1f80, ['pointer64', ['void']]],
'DpcEvent' : [ 0x1f88, ['_KEVENT']],
'CallDpc' : [ 0x1fa0, ['_KDPC']],
'PrcbPad7' : [ 0x1fe0, ['array', 4, ['unsigned long long']]],
'WaitListHead' : [ 0x2000, ['_LIST_ENTRY']],
'ReadySummary' : [ 0x2010, ['unsigned long']],
'QueueIndex' : [ 0x2014, ['unsigned long']],
'DispatcherReadyListHead' : [ 0x2018, ['array', 32, ['_LIST_ENTRY']]],
'InterruptCount' : [ 0x2218, ['unsigned long']],
'KernelTime' : [ 0x221c, ['unsigned long']],
'UserTime' : [ 0x2220, ['unsigned long']],
'DpcTime' : [ 0x2224, ['unsigned long']],
'InterruptTime' : [ 0x2228, ['unsigned long']],
'AdjustDpcThreshold' : [ 0x222c, ['unsigned long']],
'SkipTick' : [ 0x2230, ['unsigned char']],
'DebuggerSavedIRQL' : [ 0x2231, ['unsigned char']],
'PollSlot' : [ 0x2232, ['unsigned char']],
'PrcbPad8' : [ 0x2233, ['array', 13, ['unsigned char']]],
'ParentNode' : [ 0x2240, ['pointer64', ['_KNODE']]],
'MultiThreadProcessorSet' : [ 0x2248, ['unsigned long long']],
'MultiThreadSetMaster' : [ 0x2250, ['pointer64', ['_KPRCB']]],
'Sleeping' : [ 0x2258, ['long']],
'PrcbPad90' : [ 0x225c, ['array', 1, ['unsigned long']]],
'DebugDpcTime' : [ 0x2260, ['unsigned long']],
'PageColor' : [ 0x2264, ['unsigned long']],
'NodeColor' : [ 0x2268, ['unsigned long']],
'NodeShiftedColor' : [ 0x226c, ['unsigned long']],
'SecondaryColorMask' : [ 0x2270, ['unsigned long']],
'PrcbPad9' : [ 0x2274, ['array', 12, ['unsigned char']]],
'CcFastReadNoWait' : [ 0x2280, ['unsigned long']],
'CcFastReadWait' : [ 0x2284, ['unsigned long']],
'CcFastReadNotPossible' : [ 0x2288, ['unsigned long']],
'CcCopyReadNoWait' : [ 0x228c, ['unsigned long']],
'CcCopyReadWait' : [ 0x2290, ['unsigned long']],
'CcCopyReadNoWaitMiss' : [ 0x2294, ['unsigned long']],
'KeAlignmentFixupCount' : [ 0x2298, ['unsigned long']],
'KeDcacheFlushCount' : [ 0x229c, ['unsigned long']],
'KeExceptionDispatchCount' : [ 0x22a0, ['unsigned long']],
'KeFirstLevelTbFills' : [ 0x22a4, ['unsigned long']],
'KeFloatingEmulationCount' : [ 0x22a8, ['unsigned long']],
'KeIcacheFlushCount' : [ 0x22ac, ['unsigned long']],
'KeSecondLevelTbFills' : [ 0x22b0, ['unsigned long']],
'VendorString' : [ 0x22b4, ['array', 13, ['unsigned char']]],
'PrcbPad10' : [ 0x22c1, ['array', 2, ['unsigned char']]],
'FeatureBits' : [ 0x22c4, ['unsigned long']],
'UpdateSignature' : [ 0x22c8, ['_LARGE_INTEGER']],
'PowerState' : [ 0x22d0, ['_PROCESSOR_POWER_STATE']],
'Cache' : [ 0x2440, ['array', 5, ['_CACHE_DESCRIPTOR']]],
'CacheCount' : [ 0x247c, ['unsigned long']],
} ],
'_SINGLE_LIST_ENTRY' : [ 0x8, {
'Next' : [ 0x0, ['pointer64', ['_SINGLE_LIST_ENTRY']]],
} ],
'_KDPC' : [ 0x40, {
'Type' : [ 0x0, ['unsigned char']],
'Importance' : [ 0x1, ['unsigned char']],
'Number' : [ 0x2, ['unsigned char']],
'Expedite' : [ 0x3, ['unsigned char']],
'DpcListEntry' : [ 0x8, ['_LIST_ENTRY']],
'DeferredRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeferredContext' : [ 0x20, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x28, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x30, ['pointer64', ['void']]],
'DpcData' : [ 0x38, ['pointer64', ['void']]],
} ],
'_KERNEL_STACK_CONTROL' : [ 0x200, {
'XmmSaveArea' : [ 0x0, ['_XMM_SAVE_AREA32']],
'Fill' : [ 0x0, ['array', 432, ['unsigned char']]],
'Current' : [ 0x1b0, ['_KERNEL_STACK_SEGMENT']],
'Previous' : [ 0x1d8, ['_KERNEL_STACK_SEGMENT']],
} ],
'_KTHREAD' : [ 0x308, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListHead' : [ 0x18, ['_LIST_ENTRY']],
'InitialStack' : [ 0x28, ['pointer64', ['void']]],
'StackLimit' : [ 0x30, ['pointer64', ['void']]],
'KernelStack' : [ 0x38, ['pointer64', ['void']]],
'ThreadLock' : [ 0x40, ['unsigned long long']],
'ApcState' : [ 0x48, ['_KAPC_STATE']],
'ApcStateFill' : [ 0x48, ['array', 43, ['unsigned char']]],
'ApcQueueable' : [ 0x73, ['unsigned char']],
'NextProcessor' : [ 0x74, ['unsigned char']],
'DeferredProcessor' : [ 0x75, ['unsigned char']],
'AdjustReason' : [ 0x76, ['unsigned char']],
'AdjustIncrement' : [ 0x77, ['unsigned char']],
'ApcQueueLock' : [ 0x78, ['unsigned long long']],
'WaitStatus' : [ 0x80, ['long long']],
'WaitBlockList' : [ 0x88, ['pointer64', ['_KWAIT_BLOCK']]],
'GateObject' : [ 0x88, ['pointer64', ['_KGATE']]],
'Alertable' : [ 0x90, ['unsigned char']],
'WaitNext' : [ 0x91, ['unsigned char']],
'WaitReason' : [ 0x92, ['unsigned char']],
'Priority' : [ 0x93, ['unsigned char']],
'EnableStackSwap' : [ 0x94, ['unsigned char']],
'SwapBusy' : [ 0x95, ['unsigned char']],
'Alerted' : [ 0x96, ['array', 2, ['unsigned char']]],
'WaitListEntry' : [ 0x98, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x98, ['_SINGLE_LIST_ENTRY']],
'Queue' : [ 0xa8, ['pointer64', ['_KQUEUE']]],
'Teb' : [ 0xb0, ['pointer64', ['void']]],
'Timer' : [ 0xb8, ['_KTIMER']],
'TimerFill' : [ 0xb8, ['array', 60, ['unsigned char']]],
'AutoAlignment' : [ 0xf4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DisableBoost' : [ 0xf4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'GuiThread' : [ 0xf4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ReservedFlags' : [ 0xf4, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'ThreadFlags' : [ 0xf4, ['long']],
'WaitBlock' : [ 0xf8, ['array', 4, ['_KWAIT_BLOCK']]],
'WaitBlockFill0' : [ 0xf8, ['array', 43, ['unsigned char']]],
'SystemAffinityActive' : [ 0x123, ['unsigned char']],
'WaitBlockFill1' : [ 0xf8, ['array', 91, ['unsigned char']]],
'PreviousMode' : [ 0x153, ['unsigned char']],
'WaitBlockFill2' : [ 0xf8, ['array', 139, ['unsigned char']]],
'ResourceIndex' : [ 0x183, ['unsigned char']],
'WaitBlockFill3' : [ 0xf8, ['array', 187, ['unsigned char']]],
'LargeStack' : [ 0x1b3, ['unsigned char']],
'WaitBlockFill4' : [ 0xf8, ['array', 44, ['unsigned char']]],
'ContextSwitches' : [ 0x124, ['unsigned long']],
'WaitBlockFill5' : [ 0xf8, ['array', 92, ['unsigned char']]],
'State' : [ 0x154, ['unsigned char']],
'NpxState' : [ 0x155, ['unsigned char']],
'WaitIrql' : [ 0x156, ['unsigned char']],
'WaitMode' : [ 0x157, ['unsigned char']],
'WaitBlockFill6' : [ 0xf8, ['array', 140, ['unsigned char']]],
'WaitTime' : [ 0x184, ['unsigned long']],
'WaitBlockFill7' : [ 0xf8, ['array', 188, ['unsigned char']]],
'KernelApcDisable' : [ 0x1b4, ['short']],
'SpecialApcDisable' : [ 0x1b6, ['short']],
'CombinedApcDisable' : [ 0x1b4, ['unsigned long']],
'QueueListEntry' : [ 0x1b8, ['_LIST_ENTRY']],
'TrapFrame' : [ 0x1c8, ['pointer64', ['_KTRAP_FRAME']]],
'CallbackStack' : [ 0x1d0, ['pointer64', ['void']]],
'ApcStateIndex' : [ 0x1d8, ['unsigned char']],
'IdealProcessor' : [ 0x1d9, ['unsigned char']],
'Preempted' : [ 0x1da, ['unsigned char']],
'ProcessReadyQueue' : [ 0x1db, ['unsigned char']],
'KernelStackResident' : [ 0x1dc, ['unsigned char']],
'BasePriority' : [ 0x1dd, ['unsigned char']],
'PriorityDecrement' : [ 0x1de, ['unsigned char']],
'Saturation' : [ 0x1df, ['unsigned char']],
'UserAffinity' : [ 0x1e0, ['unsigned long long']],
'Process' : [ 0x1e8, ['pointer64', ['_KPROCESS']]],
'Affinity' : [ 0x1f0, ['unsigned long long']],
'ApcStatePointer' : [ 0x1f8, ['array', 2, ['pointer64', ['_KAPC_STATE']]]],
'SavedApcState' : [ 0x208, ['_KAPC_STATE']],
'SavedApcStateFill' : [ 0x208, ['array', 43, ['unsigned char']]],
'FreezeCount' : [ 0x233, ['unsigned char']],
'SuspendCount' : [ 0x234, ['unsigned char']],
'UserIdealProcessor' : [ 0x235, ['unsigned char']],
'CalloutActive' : [ 0x236, ['unsigned char']],
'CodePatchInProgress' : [ 0x237, ['unsigned char']],
'Win32Thread' : [ 0x238, ['pointer64', ['void']]],
'StackBase' : [ 0x240, ['pointer64', ['void']]],
'SuspendApc' : [ 0x248, ['_KAPC']],
'SuspendApcFill0' : [ 0x248, ['array', 1, ['unsigned char']]],
'Quantum' : [ 0x249, ['unsigned char']],
'SuspendApcFill1' : [ 0x248, ['array', 3, ['unsigned char']]],
'QuantumReset' : [ 0x24b, ['unsigned char']],
'SuspendApcFill2' : [ 0x248, ['array', 4, ['unsigned char']]],
'KernelTime' : [ 0x24c, ['unsigned long']],
'SuspendApcFill3' : [ 0x248, ['array', 64, ['unsigned char']]],
'TlsArray' : [ 0x288, ['pointer64', ['void']]],
'SuspendApcFill4' : [ 0x248, ['array', 72, ['unsigned char']]],
'LegoData' : [ 0x290, ['pointer64', ['void']]],
'SuspendApcFill5' : [ 0x248, ['array', 83, ['unsigned char']]],
'PowerState' : [ 0x29b, ['unsigned char']],
'UserTime' : [ 0x29c, ['unsigned long']],
'SuspendSemaphore' : [ 0x2a0, ['_KSEMAPHORE']],
'SuspendSemaphorefill' : [ 0x2a0, ['array', 28, ['unsigned char']]],
'SListFaultCount' : [ 0x2bc, ['unsigned long']],
'ThreadListEntry' : [ 0x2c0, ['_LIST_ENTRY']],
'SListFaultAddress' : [ 0x2d0, ['pointer64', ['void']]],
'ReadOperationCount' : [ 0x2d8, ['long long']],
'WriteOperationCount' : [ 0x2e0, ['long long']],
'OtherOperationCount' : [ 0x2e8, ['long long']],
'ReadTransferCount' : [ 0x2f0, ['long long']],
'WriteTransferCount' : [ 0x2f8, ['long long']],
'OtherTransferCount' : [ 0x300, ['long long']],
} ],
'_KERNEL_STACK_SEGMENT' : [ 0x28, {
'StackBase' : [ 0x0, ['unsigned long long']],
'StackLimit' : [ 0x8, ['unsigned long long']],
'KernelStack' : [ 0x10, ['unsigned long long']],
'InitialStack' : [ 0x18, ['unsigned long long']],
'ActualLimit' : [ 0x20, ['unsigned long long']],
} ],
'_FAST_MUTEX' : [ 0x38, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Contention' : [ 0x10, ['unsigned long']],
'Gate' : [ 0x18, ['_KEVENT']],
'OldIrql' : [ 0x30, ['unsigned long']],
} ],
'_SLIST_HEADER' : [ 0x10, {
'Alignment' : [ 0x0, ['unsigned long long']],
'Region' : [ 0x8, ['unsigned long long']],
} ],
'_NPAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_PAGED_LOOKASIDE_LIST' : [ 0x80, {
'L' : [ 0x0, ['_GENERAL_LOOKASIDE']],
} ],
'_GENERAL_LOOKASIDE' : [ 0x80, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'AllocateHits' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'FreeHits' : [ 0x20, ['unsigned long']],
'Type' : [ 0x24, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'Tag' : [ 0x28, ['unsigned long']],
'Size' : [ 0x2c, ['unsigned long']],
'Allocate' : [ 0x30, ['pointer64', ['void']]],
'Free' : [ 0x38, ['pointer64', ['void']]],
'ListEntry' : [ 0x40, ['_LIST_ENTRY']],
'LastTotalAllocates' : [ 0x50, ['unsigned long']],
'LastAllocateMisses' : [ 0x54, ['unsigned long']],
'LastAllocateHits' : [ 0x54, ['unsigned long']],
'Future' : [ 0x58, ['array', 2, ['unsigned long']]],
} ],
'_QUAD' : [ 0x8, {
'UseThisFieldToCopy' : [ 0x0, ['long long']],
'DoNotUseThisField' : [ 0x0, ['double']],
} ],
'_UNICODE_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned short']]],
} ],
'_IO_STATUS_BLOCK' : [ 0x10, {
'Status' : [ 0x0, ['long']],
'Pointer' : [ 0x0, ['pointer64', ['void']]],
'Information' : [ 0x8, ['unsigned long long']],
} ],
'_EX_RUNDOWN_REF' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_EX_FAST_REF' : [ 0x8, {
'Object' : [ 0x0, ['pointer64', ['void']]],
'RefCnt' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
} ],
'_EX_PUSH_LOCK' : [ 0x8, {
'Locked' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Waiting' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Waking' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'MultipleShared' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'Shared' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 64, native_type='unsigned long long')]],
'Value' : [ 0x0, ['unsigned long long']],
'Ptr' : [ 0x0, ['pointer64', ['void']]],
} ],
'_EX_PUSH_LOCK_WAIT_BLOCK' : [ 0x40, {
'WakeGate' : [ 0x0, ['_KGATE']],
'WakeEvent' : [ 0x0, ['_KEVENT']],
'Next' : [ 0x18, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Last' : [ 0x20, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'Previous' : [ 0x28, ['pointer64', ['_EX_PUSH_LOCK_WAIT_BLOCK']]],
'ShareCount' : [ 0x30, ['long']],
'Flags' : [ 0x34, ['long']],
} ],
'_EX_PUSH_LOCK_CACHE_AWARE' : [ 0x100, {
'Locks' : [ 0x0, ['array', 32, ['pointer64', ['_EX_PUSH_LOCK']]]],
} ],
'_ETHREAD' : [ 0x410, {
'Tcb' : [ 0x0, ['_KTHREAD']],
'CreateTime' : [ 0x308, ['_LARGE_INTEGER']],
'ExitTime' : [ 0x310, ['_LARGE_INTEGER']],
'LpcReplyChain' : [ 0x310, ['_LIST_ENTRY']],
'KeyedWaitChain' : [ 0x310, ['_LIST_ENTRY']],
'ExitStatus' : [ 0x320, ['long']],
'OfsChain' : [ 0x320, ['pointer64', ['void']]],
'PostBlockList' : [ 0x328, ['_LIST_ENTRY']],
'TerminationPort' : [ 0x338, ['pointer64', ['_TERMINATION_PORT']]],
'ReaperLink' : [ 0x338, ['pointer64', ['_ETHREAD']]],
'KeyedWaitValue' : [ 0x338, ['pointer64', ['void']]],
'ActiveTimerListLock' : [ 0x340, ['unsigned long long']],
'ActiveTimerListHead' : [ 0x348, ['_LIST_ENTRY']],
'Cid' : [ 0x358, ['_CLIENT_ID']],
'LpcReplySemaphore' : [ 0x368, ['_KSEMAPHORE']],
'KeyedWaitSemaphore' : [ 0x368, ['_KSEMAPHORE']],
'LpcReplyMessage' : [ 0x388, ['pointer64', ['void']]],
'LpcWaitingOnPort' : [ 0x388, ['pointer64', ['void']]],
'ImpersonationInfo' : [ 0x390, ['pointer64', ['_PS_IMPERSONATION_INFORMATION']]],
'IrpList' : [ 0x398, ['_LIST_ENTRY']],
'TopLevelIrp' : [ 0x3a8, ['unsigned long long']],
'DeviceToVerify' : [ 0x3b0, ['pointer64', ['_DEVICE_OBJECT']]],
'ThreadsProcess' : [ 0x3b8, ['pointer64', ['_EPROCESS']]],
'StartAddress' : [ 0x3c0, ['pointer64', ['void']]],
'Win32StartAddress' : [ 0x3c8, ['pointer64', ['void']]],
'LpcReceivedMessageId' : [ 0x3c8, ['unsigned long']],
'ThreadListEntry' : [ 0x3d0, ['_LIST_ENTRY']],
'RundownProtect' : [ 0x3e0, ['_EX_RUNDOWN_REF']],
'ThreadLock' : [ 0x3e8, ['_EX_PUSH_LOCK']],
'LpcReplyMessageId' : [ 0x3f0, ['unsigned long']],
'ReadClusterSize' : [ 0x3f4, ['unsigned long']],
'GrantedAccess' : [ 0x3f8, ['unsigned long']],
'CrossThreadFlags' : [ 0x3fc, ['unsigned long']],
'Terminated' : [ 0x3fc, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeadThread' : [ 0x3fc, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'HideFromDebugger' : [ 0x3fc, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ActiveImpersonationInfo' : [ 0x3fc, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SystemThread' : [ 0x3fc, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'HardErrorsAreDisabled' : [ 0x3fc, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x3fc, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SkipCreationMsg' : [ 0x3fc, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'SkipTerminationMsg' : [ 0x3fc, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SameThreadPassiveFlags' : [ 0x400, ['unsigned long']],
'ActiveExWorker' : [ 0x400, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ExWorkerCanWaitUser' : [ 0x400, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'MemoryMaker' : [ 0x400, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'KeyedEventInUse' : [ 0x400, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'SameThreadApcFlags' : [ 0x404, ['unsigned long']],
'LpcReceivedMsgIdValid' : [ 0x404, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'LpcExitThreadCalled' : [ 0x404, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'AddressSpaceOwner' : [ 0x404, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'OwnsProcessWorkingSetExclusive' : [ 0x404, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'OwnsProcessWorkingSetShared' : [ 0x404, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'OwnsSystemWorkingSetExclusive' : [ 0x404, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'OwnsSystemWorkingSetShared' : [ 0x404, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'OwnsSessionWorkingSetExclusive' : [ 0x404, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'OwnsSessionWorkingSetShared' : [ 0x405, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ApcNeeded' : [ 0x405, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'ForwardClusterOnly' : [ 0x408, ['unsigned char']],
'DisablePageFaultClustering' : [ 0x409, ['unsigned char']],
'ActiveFaultCount' : [ 0x40a, ['unsigned char']],
} ],
'_EPROCESS' : [ 0x3e0, {
'Pcb' : [ 0x0, ['_KPROCESS']],
'ProcessLock' : [ 0xb8, ['_EX_PUSH_LOCK']],
'CreateTime' : [ 0xc0, ['_LARGE_INTEGER']],
'ExitTime' : [ 0xc8, ['_LARGE_INTEGER']],
'RundownProtect' : [ 0xd0, ['_EX_RUNDOWN_REF']],
'UniqueProcessId' : [ 0xd8, ['pointer64', ['void']]],
'ActiveProcessLinks' : [ 0xe0, ['_LIST_ENTRY']],
'QuotaUsage' : [ 0xf0, ['array', 3, ['unsigned long long']]],
'QuotaPeak' : [ 0x108, ['array', 3, ['unsigned long long']]],
'CommitCharge' : [ 0x120, ['unsigned long long']],
'PeakVirtualSize' : [ 0x128, ['unsigned long long']],
'VirtualSize' : [ 0x130, ['unsigned long long']],
'SessionProcessLinks' : [ 0x138, ['_LIST_ENTRY']],
'DebugPort' : [ 0x148, ['pointer64', ['void']]],
'ExceptionPort' : [ 0x150, ['pointer64', ['void']]],
'ObjectTable' : [ 0x158, ['pointer64', ['_HANDLE_TABLE']]],
'Token' : [ 0x160, ['_EX_FAST_REF']],
'WorkingSetPage' : [ 0x168, ['unsigned long long']],
'AddressCreationLock' : [ 0x170, ['_KGUARDED_MUTEX']],
'HyperSpaceLock' : [ 0x1a8, ['unsigned long long']],
'ForkInProgress' : [ 0x1b0, ['pointer64', ['_ETHREAD']]],
'HardwareTrigger' : [ 0x1b8, ['unsigned long long']],
'PhysicalVadRoot' : [ 0x1c0, ['pointer64', ['_MM_AVL_TABLE']]],
'CloneRoot' : [ 0x1c8, ['pointer64', ['void']]],
'NumberOfPrivatePages' : [ 0x1d0, ['unsigned long long']],
'NumberOfLockedPages' : [ 0x1d8, ['unsigned long long']],
'Win32Process' : [ 0x1e0, ['pointer64', ['void']]],
'Job' : [ 0x1e8, ['pointer64', ['_EJOB']]],
'SectionObject' : [ 0x1f0, ['pointer64', ['void']]],
'SectionBaseAddress' : [ 0x1f8, ['pointer64', ['void']]],
'QuotaBlock' : [ 0x200, ['pointer64', ['_EPROCESS_QUOTA_BLOCK']]],
'WorkingSetWatch' : [ 0x208, ['pointer64', ['_PAGEFAULT_HISTORY']]],
'Win32WindowStation' : [ 0x210, ['pointer64', ['void']]],
'InheritedFromUniqueProcessId' : [ 0x218, ['pointer64', ['void']]],
'LdtInformation' : [ 0x220, ['pointer64', ['void']]],
'VadFreeHint' : [ 0x228, ['pointer64', ['void']]],
'VdmObjects' : [ 0x230, ['pointer64', ['void']]],
'DeviceMap' : [ 0x238, ['pointer64', ['void']]],
'Spare0' : [ 0x240, ['array', 3, ['pointer64', ['void']]]],
'PageDirectoryPte' : [ 0x258, ['_HARDWARE_PTE']],
'Filler' : [ 0x258, ['unsigned long long']],
'Session' : [ 0x260, ['pointer64', ['void']]],
'ImageFileName' : [ 0x268, ['array', 16, ['unsigned char']]],
'JobLinks' : [ 0x278, ['_LIST_ENTRY']],
'LockedPagesList' : [ 0x288, ['pointer64', ['void']]],
'ThreadListHead' : [ 0x290, ['_LIST_ENTRY']],
'SecurityPort' : [ 0x2a0, ['pointer64', ['void']]],
'Wow64Process' : [ 0x2a8, ['pointer64', ['_WOW64_PROCESS']]],
'ActiveThreads' : [ 0x2b0, ['unsigned long']],
'GrantedAccess' : [ 0x2b4, ['unsigned long']],
'DefaultHardErrorProcessing' : [ 0x2b8, ['unsigned long']],
'LastThreadExitStatus' : [ 0x2bc, ['long']],
'Peb' : [ 0x2c0, ['pointer64', ['_PEB']]],
'PrefetchTrace' : [ 0x2c8, ['_EX_FAST_REF']],
'ReadOperationCount' : [ 0x2d0, ['_LARGE_INTEGER']],
'WriteOperationCount' : [ 0x2d8, ['_LARGE_INTEGER']],
'OtherOperationCount' : [ 0x2e0, ['_LARGE_INTEGER']],
'ReadTransferCount' : [ 0x2e8, ['_LARGE_INTEGER']],
'WriteTransferCount' : [ 0x2f0, ['_LARGE_INTEGER']],
'OtherTransferCount' : [ 0x2f8, ['_LARGE_INTEGER']],
'CommitChargeLimit' : [ 0x300, ['unsigned long long']],
'CommitChargePeak' : [ 0x308, ['unsigned long long']],
'AweInfo' : [ 0x310, ['pointer64', ['void']]],
'SeAuditProcessCreationInfo' : [ 0x318, ['_SE_AUDIT_PROCESS_CREATION_INFO']],
'Vm' : [ 0x320, ['_MMSUPPORT']],
'Spares' : [ 0x378, ['array', 2, ['unsigned long']]],
'ModifiedPageCount' : [ 0x380, ['unsigned long']],
'JobStatus' : [ 0x384, ['unsigned long']],
'Flags' : [ 0x388, ['unsigned long']],
'CreateReported' : [ 0x388, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'NoDebugInherit' : [ 0x388, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ProcessExiting' : [ 0x388, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'ProcessDelete' : [ 0x388, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Wow64SplitPages' : [ 0x388, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'VmDeleted' : [ 0x388, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'OutswapEnabled' : [ 0x388, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'Outswapped' : [ 0x388, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'ForkFailed' : [ 0x388, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Wow64VaSpace4Gb' : [ 0x388, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'AddressSpaceInitialized' : [ 0x388, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long')]],
'SetTimerResolution' : [ 0x388, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'BreakOnTermination' : [ 0x388, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'SessionCreationUnderway' : [ 0x388, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WriteWatch' : [ 0x388, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'ProcessInSession' : [ 0x388, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'OverrideAddressSpace' : [ 0x388, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'HasAddressSpace' : [ 0x388, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'LaunchPrefetched' : [ 0x388, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'InjectInpageErrors' : [ 0x388, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'VmTopDown' : [ 0x388, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'ImageNotifyDone' : [ 0x388, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'PdeUpdateNeeded' : [ 0x388, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'VdmAllowed' : [ 0x388, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'SmapAllowed' : [ 0x388, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'CreateFailed' : [ 0x388, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'DefaultIoPriority' : [ 0x388, ['BitField', dict(start_bit = 27, end_bit = 30, native_type='unsigned long')]],
'Spare1' : [ 0x388, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'Spare2' : [ 0x388, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'ExitStatus' : [ 0x38c, ['long']],
'NextPageColor' : [ 0x390, ['unsigned short']],
'SubSystemMinorVersion' : [ 0x392, ['unsigned char']],
'SubSystemMajorVersion' : [ 0x393, ['unsigned char']],
'SubSystemVersion' : [ 0x392, ['unsigned short']],
'PriorityClass' : [ 0x394, ['unsigned char']],
'VadRoot' : [ 0x398, ['_MM_AVL_TABLE']],
'Cookie' : [ 0x3d8, ['unsigned long']],
} ],
'_OBJECT_HEADER' : [ 0x38, {
'PointerCount' : [ 0x0, ['long long']],
'HandleCount' : [ 0x8, ['long long']],
'NextToFree' : [ 0x8, ['pointer64', ['void']]],
'Type' : [ 0x10, ['pointer64', ['_OBJECT_TYPE']]],
'NameInfoOffset' : [ 0x18, ['unsigned char']],
'HandleInfoOffset' : [ 0x19, ['unsigned char']],
'QuotaInfoOffset' : [ 0x1a, ['unsigned char']],
'Flags' : [ 0x1b, ['unsigned char']],
'ObjectCreateInfo' : [ 0x20, ['pointer64', ['_OBJECT_CREATE_INFORMATION']]],
'QuotaBlockCharged' : [ 0x20, ['pointer64', ['void']]],
'SecurityDescriptor' : [ 0x28, ['pointer64', ['void']]],
'Body' : [ 0x30, ['_QUAD']],
} ],
'_OBJECT_HEADER_QUOTA_INFO' : [ 0x20, {
'PagedPoolCharge' : [ 0x0, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x4, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x8, ['unsigned long']],
'ExclusiveProcess' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'Reserved' : [ 0x18, ['unsigned long long']],
} ],
'_OBJECT_HEADER_HANDLE_INFO' : [ 0x10, {
'HandleCountDataBase' : [ 0x0, ['pointer64', ['_OBJECT_HANDLE_COUNT_DATABASE']]],
'SingleEntry' : [ 0x0, ['_OBJECT_HANDLE_COUNT_ENTRY']],
} ],
'_OBJECT_HEADER_NAME_INFO' : [ 0x20, {
'Directory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'Name' : [ 0x8, ['_UNICODE_STRING']],
'QueryReferences' : [ 0x18, ['unsigned long']],
} ],
'_OBJECT_HEADER_CREATOR_INFO' : [ 0x20, {
'TypeList' : [ 0x0, ['_LIST_ENTRY']],
'CreatorUniqueProcess' : [ 0x10, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x18, ['unsigned short']],
'Reserved' : [ 0x1a, ['unsigned short']],
} ],
'_OBJECT_ATTRIBUTES' : [ 0x30, {
'Length' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ObjectName' : [ 0x10, ['pointer64', ['_UNICODE_STRING']]],
'Attributes' : [ 0x18, ['unsigned long']],
'SecurityDescriptor' : [ 0x20, ['pointer64', ['void']]],
'SecurityQualityOfService' : [ 0x28, ['pointer64', ['void']]],
} ],
'_OBJECT_TYPE' : [ 0x2c0, {
'Mutex' : [ 0x0, ['_ERESOURCE']],
'TypeList' : [ 0x68, ['_LIST_ENTRY']],
'Name' : [ 0x78, ['_UNICODE_STRING']],
'DefaultObject' : [ 0x88, ['pointer64', ['void']]],
'Index' : [ 0x90, ['unsigned long']],
'TotalNumberOfObjects' : [ 0x94, ['unsigned long']],
'TotalNumberOfHandles' : [ 0x98, ['unsigned long']],
'HighWaterNumberOfObjects' : [ 0x9c, ['unsigned long']],
'HighWaterNumberOfHandles' : [ 0xa0, ['unsigned long']],
'TypeInfo' : [ 0xa8, ['_OBJECT_TYPE_INITIALIZER']],
'Key' : [ 0x118, ['unsigned long']],
'ObjectLocks' : [ 0x120, ['array', 4, ['_ERESOURCE']]],
} ],
'_OBJECT_HANDLE_INFORMATION' : [ 0x8, {
'HandleAttributes' : [ 0x0, ['unsigned long']],
'GrantedAccess' : [ 0x4, ['unsigned long']],
} ],
'_PERFINFO_GROUPMASK' : [ 0x20, {
'Masks' : [ 0x0, ['array', 8, ['unsigned long']]],
} ],
'_KGUARDED_MUTEX' : [ 0x38, {
'Count' : [ 0x0, ['long']],
'Owner' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'Contention' : [ 0x10, ['unsigned long']],
'Gate' : [ 0x18, ['_KGATE']],
'KernelApcDisable' : [ 0x30, ['short']],
'SpecialApcDisable' : [ 0x32, ['short']],
'CombinedApcDisable' : [ 0x30, ['unsigned long']],
} ],
'__unnamed_115f' : [ 0x8, {
'Long' : [ 0x0, ['unsigned long long']],
'Hard' : [ 0x0, ['_MMPTE_HARDWARE']],
'HardLarge' : [ 0x0, ['_MMPTE_HARDWARE_LARGEPAGE']],
'Flush' : [ 0x0, ['_HARDWARE_PTE']],
'Proto' : [ 0x0, ['_MMPTE_PROTOTYPE']],
'Soft' : [ 0x0, ['_MMPTE_SOFTWARE']],
'Trans' : [ 0x0, ['_MMPTE_TRANSITION']],
'Subsect' : [ 0x0, ['_MMPTE_SUBSECTION']],
'List' : [ 0x0, ['_MMPTE_LIST']],
} ],
'_MMPTE' : [ 0x8, {
'u' : [ 0x0, ['__unnamed_115f']],
} ],
'__unnamed_116a' : [ 0x8, {
'Flink' : [ 0x0, ['unsigned long long']],
'WsIndex' : [ 0x0, ['unsigned long']],
'Event' : [ 0x0, ['pointer64', ['_KEVENT']]],
'ReadStatus' : [ 0x0, ['long']],
'NextStackPfn' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
} ],
'__unnamed_116c' : [ 0x8, {
'Blink' : [ 0x0, ['unsigned long long']],
'ShareCount' : [ 0x0, ['unsigned long long']],
} ],
'__unnamed_116f' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'ShortFlags' : [ 0x2, ['unsigned short']],
} ],
'__unnamed_1171' : [ 0x4, {
'ReferenceCount' : [ 0x0, ['unsigned short']],
'e1' : [ 0x2, ['_MMPFNENTRY']],
'e2' : [ 0x0, ['__unnamed_116f']],
} ],
'__unnamed_1179' : [ 0x8, {
'EntireFrame' : [ 0x0, ['unsigned long long']],
'PteFrame' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 57, native_type='unsigned long long')]],
'InPageError' : [ 0x0, ['BitField', dict(start_bit = 57, end_bit = 58, native_type='unsigned long long')]],
'VerifierAllocation' : [ 0x0, ['BitField', dict(start_bit = 58, end_bit = 59, native_type='unsigned long long')]],
'AweAllocation' : [ 0x0, ['BitField', dict(start_bit = 59, end_bit = 60, native_type='unsigned long long')]],
'Priority' : [ 0x0, ['BitField', dict(start_bit = 60, end_bit = 63, native_type='unsigned long long')]],
'MustBeCached' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_MMPFN' : [ 0x30, {
'u1' : [ 0x0, ['__unnamed_116a']],
'PteAddress' : [ 0x8, ['pointer64', ['_MMPTE']]],
'u2' : [ 0x10, ['__unnamed_116c']],
'u3' : [ 0x18, ['__unnamed_1171']],
'UsedPageTableEntries' : [ 0x1c, ['unsigned long']],
'OriginalPte' : [ 0x20, ['_MMPTE']],
'AweReferenceCount' : [ 0x20, ['long']],
'u4' : [ 0x28, ['__unnamed_1179']],
} ],
'__unnamed_1180' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMVAD']]],
} ],
'__unnamed_1183' : [ 0x8, {
'LongFlags' : [ 0x0, ['unsigned long long']],
'VadFlags' : [ 0x0, ['_MMVAD_FLAGS']],
} ],
'__unnamed_1188' : [ 0x4, {
'LongFlags2' : [ 0x0, ['unsigned long']],
'VadFlags2' : [ 0x0, ['_MMVAD_FLAGS2']],
} ],
'_MMVAD' : [ 0x50, {
'u1' : [ 0x0, ['__unnamed_1180']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_1183']],
'ControlArea' : [ 0x30, ['pointer64', ['_CONTROL_AREA']]],
'FirstPrototypePte' : [ 0x38, ['pointer64', ['_MMPTE']]],
'LastContiguousPte' : [ 0x40, ['pointer64', ['_MMPTE']]],
'u2' : [ 0x48, ['__unnamed_1188']],
} ],
'_MM_AVL_TABLE' : [ 0x40, {
'BalancedRoot' : [ 0x0, ['_MMADDRESS_NODE']],
'DepthOfTree' : [ 0x28, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long long')]],
'Unused' : [ 0x28, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long long')]],
'NumberGenericTableElements' : [ 0x28, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='unsigned long long')]],
'NodeHint' : [ 0x30, ['pointer64', ['void']]],
'NodeFreeHint' : [ 0x38, ['pointer64', ['void']]],
} ],
'_MMPTE_FLUSH_LIST' : [ 0xa8, {
'Count' : [ 0x0, ['unsigned long']],
'FlushVa' : [ 0x8, ['array', 20, ['pointer64', ['void']]]],
} ],
'__unnamed_119a' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'SubsectionFlags' : [ 0x0, ['_MMSUBSECTION_FLAGS']],
} ],
'_SUBSECTION' : [ 0x30, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'u' : [ 0x8, ['__unnamed_119a']],
'StartingSector' : [ 0xc, ['unsigned long']],
'NumberOfFullSectors' : [ 0x10, ['unsigned long']],
'SubsectionBase' : [ 0x18, ['pointer64', ['_MMPTE']]],
'UnusedPtes' : [ 0x20, ['unsigned long']],
'PtesInSubsection' : [ 0x24, ['unsigned long']],
'NextSubsection' : [ 0x28, ['pointer64', ['_SUBSECTION']]],
} ],
'_MMPAGING_FILE' : [ 0x78, {
'Size' : [ 0x0, ['unsigned long long']],
'MaximumSize' : [ 0x8, ['unsigned long long']],
'MinimumSize' : [ 0x10, ['unsigned long long']],
'FreeSpace' : [ 0x18, ['unsigned long long']],
'CurrentUsage' : [ 0x20, ['unsigned long long']],
'PeakUsage' : [ 0x28, ['unsigned long long']],
'HighestPage' : [ 0x30, ['unsigned long long']],
'File' : [ 0x38, ['pointer64', ['_FILE_OBJECT']]],
'Entry' : [ 0x40, ['array', 2, ['pointer64', ['_MMMOD_WRITER_MDL_ENTRY']]]],
'PageFileName' : [ 0x50, ['_UNICODE_STRING']],
'Bitmap' : [ 0x60, ['pointer64', ['_RTL_BITMAP']]],
'PageFileNumber' : [ 0x68, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'ReferenceCount' : [ 0x68, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'BootPartition' : [ 0x68, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Reserved' : [ 0x68, ['BitField', dict(start_bit = 9, end_bit = 32, native_type='unsigned long')]],
'FileHandle' : [ 0x70, ['pointer64', ['void']]],
} ],
'_EXCEPTION_RECORD' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['pointer64', ['_EXCEPTION_RECORD']]],
'ExceptionAddress' : [ 0x10, ['pointer64', ['void']]],
'NumberParameters' : [ 0x18, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_EXCEPTION_RECORD64' : [ 0x98, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long long']],
'ExceptionAddress' : [ 0x10, ['unsigned long long']],
'NumberParameters' : [ 0x18, ['unsigned long']],
'__unusedAlignment' : [ 0x1c, ['unsigned long']],
'ExceptionInformation' : [ 0x20, ['array', 15, ['unsigned long long']]],
} ],
'_KTIMER' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'DueTime' : [ 0x18, ['_ULARGE_INTEGER']],
'TimerListEntry' : [ 0x20, ['_LIST_ENTRY']],
'Dpc' : [ 0x30, ['pointer64', ['_KDPC']]],
'Period' : [ 0x38, ['long']],
} ],
'_KEVENT' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_KLOCK_QUEUE_HANDLE' : [ 0x18, {
'LockQueue' : [ 0x0, ['_KSPIN_LOCK_QUEUE']],
'OldIrql' : [ 0x10, ['unsigned char']],
} ],
'_KSPIN_LOCK_QUEUE' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Lock' : [ 0x8, ['pointer64', ['unsigned long long']]],
} ],
'_KQUEUE' : [ 0x40, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'EntryListHead' : [ 0x18, ['_LIST_ENTRY']],
'CurrentCount' : [ 0x28, ['unsigned long']],
'MaximumCount' : [ 0x2c, ['unsigned long']],
'ThreadListHead' : [ 0x30, ['_LIST_ENTRY']],
} ],
'_KWAIT_BLOCK' : [ 0x30, {
'WaitListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Thread' : [ 0x10, ['pointer64', ['_KTHREAD']]],
'Object' : [ 0x18, ['pointer64', ['void']]],
'NextWaitBlock' : [ 0x20, ['pointer64', ['_KWAIT_BLOCK']]],
'WaitKey' : [ 0x28, ['unsigned short']],
'WaitType' : [ 0x2a, ['unsigned char']],
'SpareByte' : [ 0x2b, ['unsigned char']],
'SpareLong' : [ 0x2c, ['long']],
} ],
'_KTIMER_TABLE_ENTRY' : [ 0x18, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'Time' : [ 0x10, ['_ULARGE_INTEGER']],
} ],
'_KPROCESS' : [ 0xb8, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'ProfileListHead' : [ 0x18, ['_LIST_ENTRY']],
'DirectoryTableBase' : [ 0x28, ['array', 2, ['unsigned long long']]],
'IopmOffset' : [ 0x38, ['unsigned short']],
'ActiveProcessors' : [ 0x40, ['unsigned long long']],
'KernelTime' : [ 0x48, ['unsigned long']],
'UserTime' : [ 0x4c, ['unsigned long']],
'ReadyListHead' : [ 0x50, ['_LIST_ENTRY']],
'SwapListEntry' : [ 0x60, ['_SINGLE_LIST_ENTRY']],
'Reserved1' : [ 0x68, ['pointer64', ['void']]],
'ThreadListHead' : [ 0x70, ['_LIST_ENTRY']],
'ProcessLock' : [ 0x80, ['unsigned long long']],
'Affinity' : [ 0x88, ['unsigned long long']],
'AutoAlignment' : [ 0x90, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='long')]],
'DisableBoost' : [ 0x90, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='long')]],
'DisableQuantum' : [ 0x90, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='long')]],
'ReservedFlags' : [ 0x90, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='long')]],
'ProcessFlags' : [ 0x90, ['long']],
'BasePriority' : [ 0x94, ['unsigned char']],
'QuantumReset' : [ 0x95, ['unsigned char']],
'State' : [ 0x96, ['unsigned char']],
'ThreadSeed' : [ 0x97, ['unsigned char']],
'PowerState' : [ 0x98, ['unsigned char']],
'IdealNode' : [ 0x99, ['unsigned char']],
'Visited' : [ 0x9a, ['unsigned char']],
'Flags' : [ 0x9b, ['_KEXECUTE_OPTIONS']],
'ExecuteOptions' : [ 0x9b, ['unsigned char']],
'StackCount' : [ 0xa0, ['unsigned long long']],
'ProcessListEntry' : [ 0xa8, ['_LIST_ENTRY']],
} ],
'_KEXCEPTION_FRAME' : [ 0x180, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5' : [ 0x20, ['unsigned long long']],
'InitialStack' : [ 0x28, ['unsigned long long']],
'Xmm6' : [ 0x30, ['_M128A']],
'Xmm7' : [ 0x40, ['_M128A']],
'Xmm8' : [ 0x50, ['_M128A']],
'Xmm9' : [ 0x60, ['_M128A']],
'Xmm10' : [ 0x70, ['_M128A']],
'Xmm11' : [ 0x80, ['_M128A']],
'Xmm12' : [ 0x90, ['_M128A']],
'Xmm13' : [ 0xa0, ['_M128A']],
'Xmm14' : [ 0xb0, ['_M128A']],
'Xmm15' : [ 0xc0, ['_M128A']],
'TrapFrame' : [ 0xd0, ['unsigned long long']],
'CallbackStack' : [ 0xd8, ['unsigned long long']],
'OutputBuffer' : [ 0xe0, ['unsigned long long']],
'OutputLength' : [ 0xe8, ['unsigned long long']],
'ExceptionRecord' : [ 0xf0, ['array', 64, ['unsigned char']]],
'MxCsr' : [ 0x130, ['unsigned long long']],
'Rbp' : [ 0x138, ['unsigned long long']],
'Rbx' : [ 0x140, ['unsigned long long']],
'Rdi' : [ 0x148, ['unsigned long long']],
'Rsi' : [ 0x150, ['unsigned long long']],
'R12' : [ 0x158, ['unsigned long long']],
'R13' : [ 0x160, ['unsigned long long']],
'R14' : [ 0x168, ['unsigned long long']],
'R15' : [ 0x170, ['unsigned long long']],
'Return' : [ 0x178, ['unsigned long long']],
} ],
'_KTRAP_FRAME' : [ 0x190, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5' : [ 0x20, ['unsigned long long']],
'PreviousMode' : [ 0x28, ['unsigned char']],
'PreviousIrql' : [ 0x29, ['unsigned char']],
'FaultIndicator' : [ 0x2a, ['unsigned char']],
'ExceptionActive' : [ 0x2b, ['unsigned char']],
'MxCsr' : [ 0x2c, ['unsigned long']],
'Rax' : [ 0x30, ['unsigned long long']],
'Rcx' : [ 0x38, ['unsigned long long']],
'Rdx' : [ 0x40, ['unsigned long long']],
'R8' : [ 0x48, ['unsigned long long']],
'R9' : [ 0x50, ['unsigned long long']],
'R10' : [ 0x58, ['unsigned long long']],
'R11' : [ 0x60, ['unsigned long long']],
'GsBase' : [ 0x68, ['unsigned long long']],
'GsSwap' : [ 0x68, ['unsigned long long']],
'Xmm0' : [ 0x70, ['_M128A']],
'Xmm1' : [ 0x80, ['_M128A']],
'Xmm2' : [ 0x90, ['_M128A']],
'Xmm3' : [ 0xa0, ['_M128A']],
'Xmm4' : [ 0xb0, ['_M128A']],
'Xmm5' : [ 0xc0, ['_M128A']],
'FaultAddress' : [ 0xd0, ['unsigned long long']],
'ContextRecord' : [ 0xd0, ['unsigned long long']],
'TimeStamp' : [ 0xd0, ['unsigned long long']],
'Dr0' : [ 0xd8, ['unsigned long long']],
'Dr1' : [ 0xe0, ['unsigned long long']],
'Dr2' : [ 0xe8, ['unsigned long long']],
'Dr3' : [ 0xf0, ['unsigned long long']],
'Dr6' : [ 0xf8, ['unsigned long long']],
'Dr7' : [ 0x100, ['unsigned long long']],
'DebugControl' : [ 0x108, ['unsigned long long']],
'LastBranchToRip' : [ 0x110, ['unsigned long long']],
'LastBranchFromRip' : [ 0x118, ['unsigned long long']],
'LastExceptionToRip' : [ 0x120, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x128, ['unsigned long long']],
'LastBranchControl' : [ 0x108, ['unsigned long long']],
'LastBranchMSR' : [ 0x110, ['unsigned long']],
'SegDs' : [ 0x130, ['unsigned short']],
'SegEs' : [ 0x132, ['unsigned short']],
'SegFs' : [ 0x134, ['unsigned short']],
'SegGs' : [ 0x136, ['unsigned short']],
'TrapFrame' : [ 0x138, ['unsigned long long']],
'Rbx' : [ 0x140, ['unsigned long long']],
'Rdi' : [ 0x148, ['unsigned long long']],
'Rsi' : [ 0x150, ['unsigned long long']],
'Rbp' : [ 0x158, ['unsigned long long']],
'ErrorCode' : [ 0x160, ['unsigned long long']],
'ExceptionFrame' : [ 0x160, ['unsigned long long']],
'Rip' : [ 0x168, ['unsigned long long']],
'SegCs' : [ 0x170, ['unsigned short']],
'Fill1' : [ 0x172, ['array', 3, ['unsigned short']]],
'EFlags' : [ 0x178, ['unsigned long']],
'Fill2' : [ 0x17c, ['unsigned long']],
'Rsp' : [ 0x180, ['unsigned long long']],
'SegSs' : [ 0x188, ['unsigned short']],
'Fill3' : [ 0x18a, ['array', 1, ['unsigned short']]],
'CodePatchCycle' : [ 0x18c, ['long']],
} ],
'_EXCEPTION_RECORD32' : [ 0x50, {
'ExceptionCode' : [ 0x0, ['long']],
'ExceptionFlags' : [ 0x4, ['unsigned long']],
'ExceptionRecord' : [ 0x8, ['unsigned long']],
'ExceptionAddress' : [ 0xc, ['unsigned long']],
'NumberParameters' : [ 0x10, ['unsigned long']],
'ExceptionInformation' : [ 0x14, ['array', 15, ['unsigned long']]],
} ],
'_DBGKM_EXCEPTION64' : [ 0xa0, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD64']],
'FirstChance' : [ 0x98, ['unsigned long']],
} ],
'_DBGKM_EXCEPTION32' : [ 0x54, {
'ExceptionRecord' : [ 0x0, ['_EXCEPTION_RECORD32']],
'FirstChance' : [ 0x50, ['unsigned long']],
} ],
'_DBGKD_LOAD_SYMBOLS64' : [ 0x28, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x8, ['unsigned long long']],
'ProcessId' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
'SizeOfImage' : [ 0x1c, ['unsigned long']],
'UnloadSymbols' : [ 0x20, ['unsigned char']],
} ],
'_DBGKD_LOAD_SYMBOLS32' : [ 0x18, {
'PathNameLength' : [ 0x0, ['unsigned long']],
'BaseOfDll' : [ 0x4, ['unsigned long']],
'ProcessId' : [ 0x8, ['unsigned long']],
'CheckSum' : [ 0xc, ['unsigned long']],
'SizeOfImage' : [ 0x10, ['unsigned long']],
'UnloadSymbols' : [ 0x14, ['unsigned char']],
} ],
'_DBGKD_READ_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesRead' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesRead' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY64' : [ 0x10, {
'TargetBaseAddress' : [ 0x0, ['unsigned long long']],
'TransferCount' : [ 0x8, ['unsigned long']],
'ActualBytesWritten' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_WRITE_MEMORY32' : [ 0xc, {
'TargetBaseAddress' : [ 0x0, ['unsigned long']],
'TransferCount' : [ 0x4, ['unsigned long']],
'ActualBytesWritten' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT64' : [ 0x10, {
'BreakPointAddress' : [ 0x0, ['unsigned long long']],
'BreakPointHandle' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_WRITE_BREAKPOINT32' : [ 0x8, {
'BreakPointAddress' : [ 0x0, ['unsigned long']],
'BreakPointHandle' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO64' : [ 0x10, {
'IoAddress' : [ 0x0, ['unsigned long long']],
'DataSize' : [ 0x8, ['unsigned long']],
'DataValue' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO32' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'IoAddress' : [ 0x4, ['unsigned long']],
'DataValue' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED64' : [ 0x20, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long long']],
'DataValue' : [ 0x18, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_IO_EXTENDED32' : [ 0x18, {
'DataSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['unsigned long']],
'BusNumber' : [ 0x8, ['unsigned long']],
'AddressSpace' : [ 0xc, ['unsigned long']],
'IoAddress' : [ 0x10, ['unsigned long']],
'DataValue' : [ 0x14, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL32' : [ 0x4, {
'SpecialCall' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_SET_SPECIAL_CALL64' : [ 0x8, {
'SpecialCall' : [ 0x0, ['unsigned long long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT32' : [ 0x8, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
} ],
'_DBGKD_SET_INTERNAL_BREAKPOINT64' : [ 0x10, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT64' : [ 0x20, {
'BreakpointAddress' : [ 0x0, ['unsigned long long']],
'Flags' : [ 0x8, ['unsigned long']],
'Calls' : [ 0xc, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0x10, ['unsigned long']],
'MinInstructions' : [ 0x14, ['unsigned long']],
'MaxInstructions' : [ 0x18, ['unsigned long']],
'TotalInstructions' : [ 0x1c, ['unsigned long']],
} ],
'_DBGKD_GET_INTERNAL_BREAKPOINT32' : [ 0x1c, {
'BreakpointAddress' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x4, ['unsigned long']],
'Calls' : [ 0x8, ['unsigned long']],
'MaxCallsPerPeriod' : [ 0xc, ['unsigned long']],
'MinInstructions' : [ 0x10, ['unsigned long']],
'MaxInstructions' : [ 0x14, ['unsigned long']],
'TotalInstructions' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1240' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT64']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO64']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED64']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL64']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT64']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT64']],
'GetVersion64' : [ 0x0, ['_DBGKD_GET_VERSION64']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
'GetSetBusData' : [ 0x0, ['_DBGKD_GET_SET_BUS_DATA']],
'FillMemory' : [ 0x0, ['_DBGKD_FILL_MEMORY']],
'QueryMemory' : [ 0x0, ['_DBGKD_QUERY_MEMORY']],
'SwitchPartition' : [ 0x0, ['_DBGKD_SWITCH_PARTITION']],
} ],
'_DBGKD_MANIPULATE_STATE64' : [ 0x38, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0x10, ['__unnamed_1240']],
} ],
'__unnamed_1247' : [ 0x28, {
'ReadMemory' : [ 0x0, ['_DBGKD_READ_MEMORY32']],
'WriteMemory' : [ 0x0, ['_DBGKD_WRITE_MEMORY32']],
'ReadMemory64' : [ 0x0, ['_DBGKD_READ_MEMORY64']],
'WriteMemory64' : [ 0x0, ['_DBGKD_WRITE_MEMORY64']],
'GetContext' : [ 0x0, ['_DBGKD_GET_CONTEXT']],
'SetContext' : [ 0x0, ['_DBGKD_SET_CONTEXT']],
'WriteBreakPoint' : [ 0x0, ['_DBGKD_WRITE_BREAKPOINT32']],
'RestoreBreakPoint' : [ 0x0, ['_DBGKD_RESTORE_BREAKPOINT']],
'Continue' : [ 0x0, ['_DBGKD_CONTINUE']],
'Continue2' : [ 0x0, ['_DBGKD_CONTINUE2']],
'ReadWriteIo' : [ 0x0, ['_DBGKD_READ_WRITE_IO32']],
'ReadWriteIoExtended' : [ 0x0, ['_DBGKD_READ_WRITE_IO_EXTENDED32']],
'QuerySpecialCalls' : [ 0x0, ['_DBGKD_QUERY_SPECIAL_CALLS']],
'SetSpecialCall' : [ 0x0, ['_DBGKD_SET_SPECIAL_CALL32']],
'SetInternalBreakpoint' : [ 0x0, ['_DBGKD_SET_INTERNAL_BREAKPOINT32']],
'GetInternalBreakpoint' : [ 0x0, ['_DBGKD_GET_INTERNAL_BREAKPOINT32']],
'GetVersion32' : [ 0x0, ['_DBGKD_GET_VERSION32']],
'BreakPointEx' : [ 0x0, ['_DBGKD_BREAKPOINTEX']],
'ReadWriteMsr' : [ 0x0, ['_DBGKD_READ_WRITE_MSR']],
'SearchMemory' : [ 0x0, ['_DBGKD_SEARCH_MEMORY']],
} ],
'_DBGKD_MANIPULATE_STATE32' : [ 0x34, {
'ApiNumber' : [ 0x0, ['unsigned long']],
'ProcessorLevel' : [ 0x4, ['unsigned short']],
'Processor' : [ 0x6, ['unsigned short']],
'ReturnStatus' : [ 0x8, ['long']],
'u' : [ 0xc, ['__unnamed_1247']],
} ],
'_SHARED_CACHE_MAP' : [ 0x1b8, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'OpenCount' : [ 0x4, ['unsigned long']],
'FileSize' : [ 0x8, ['_LARGE_INTEGER']],
'BcbList' : [ 0x10, ['_LIST_ENTRY']],
'SectionSize' : [ 0x20, ['_LARGE_INTEGER']],
'ValidDataLength' : [ 0x28, ['_LARGE_INTEGER']],
'ValidDataGoal' : [ 0x30, ['_LARGE_INTEGER']],
'InitialVacbs' : [ 0x38, ['array', 4, ['pointer64', ['_VACB']]]],
'Vacbs' : [ 0x58, ['pointer64', ['pointer64', ['_VACB']]]],
'FileObject' : [ 0x60, ['pointer64', ['_FILE_OBJECT']]],
'ActiveVacb' : [ 0x68, ['pointer64', ['_VACB']]],
'NeedToZero' : [ 0x70, ['pointer64', ['void']]],
'ActivePage' : [ 0x78, ['unsigned long']],
'NeedToZeroPage' : [ 0x7c, ['unsigned long']],
'ActiveVacbSpinLock' : [ 0x80, ['unsigned long long']],
'VacbActiveCount' : [ 0x88, ['unsigned long']],
'DirtyPages' : [ 0x8c, ['unsigned long']],
'SharedCacheMapLinks' : [ 0x90, ['_LIST_ENTRY']],
'Flags' : [ 0xa0, ['unsigned long']],
'Status' : [ 0xa4, ['long']],
'Mbcb' : [ 0xa8, ['pointer64', ['_MBCB']]],
'Section' : [ 0xb0, ['pointer64', ['void']]],
'CreateEvent' : [ 0xb8, ['pointer64', ['_KEVENT']]],
'WaitOnActiveCount' : [ 0xc0, ['pointer64', ['_KEVENT']]],
'PagesToWrite' : [ 0xc8, ['unsigned long']],
'BeyondLastFlush' : [ 0xd0, ['long long']],
'Callbacks' : [ 0xd8, ['pointer64', ['_CACHE_MANAGER_CALLBACKS']]],
'LazyWriteContext' : [ 0xe0, ['pointer64', ['void']]],
'PrivateList' : [ 0xe8, ['_LIST_ENTRY']],
'LogHandle' : [ 0xf8, ['pointer64', ['void']]],
'FlushToLsnRoutine' : [ 0x100, ['pointer64', ['void']]],
'DirtyPageThreshold' : [ 0x108, ['unsigned long']],
'LazyWritePassCount' : [ 0x10c, ['unsigned long']],
'UninitializeEvent' : [ 0x110, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'NeedToZeroVacb' : [ 0x118, ['pointer64', ['_VACB']]],
'BcbSpinLock' : [ 0x120, ['unsigned long long']],
'Reserved' : [ 0x128, ['pointer64', ['void']]],
'Event' : [ 0x130, ['_KEVENT']],
'VacbPushLock' : [ 0x148, ['_EX_PUSH_LOCK']],
'PrivateCacheMap' : [ 0x150, ['_PRIVATE_CACHE_MAP']],
'WriteBehindWorkQueueEntry' : [ 0x1b0, ['pointer64', ['void']]],
} ],
'_FILE_OBJECT' : [ 0xb8, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Vpb' : [ 0x10, ['pointer64', ['_VPB']]],
'FsContext' : [ 0x18, ['pointer64', ['void']]],
'FsContext2' : [ 0x20, ['pointer64', ['void']]],
'SectionObjectPointer' : [ 0x28, ['pointer64', ['_SECTION_OBJECT_POINTERS']]],
'PrivateCacheMap' : [ 0x30, ['pointer64', ['void']]],
'FinalStatus' : [ 0x38, ['long']],
'RelatedFileObject' : [ 0x40, ['pointer64', ['_FILE_OBJECT']]],
'LockOperation' : [ 0x48, ['unsigned char']],
'DeletePending' : [ 0x49, ['unsigned char']],
'ReadAccess' : [ 0x4a, ['unsigned char']],
'WriteAccess' : [ 0x4b, ['unsigned char']],
'DeleteAccess' : [ 0x4c, ['unsigned char']],
'SharedRead' : [ 0x4d, ['unsigned char']],
'SharedWrite' : [ 0x4e, ['unsigned char']],
'SharedDelete' : [ 0x4f, ['unsigned char']],
'Flags' : [ 0x50, ['unsigned long']],
'FileName' : [ 0x58, ['_UNICODE_STRING']],
'CurrentByteOffset' : [ 0x68, ['_LARGE_INTEGER']],
'Waiters' : [ 0x70, ['unsigned long']],
'Busy' : [ 0x74, ['unsigned long']],
'LastLock' : [ 0x78, ['pointer64', ['void']]],
'Lock' : [ 0x80, ['_KEVENT']],
'Event' : [ 0x98, ['_KEVENT']],
'CompletionContext' : [ 0xb0, ['pointer64', ['_IO_COMPLETION_CONTEXT']]],
} ],
'__unnamed_126d' : [ 0x8, {
'FileOffset' : [ 0x0, ['_LARGE_INTEGER']],
'ActiveCount' : [ 0x0, ['unsigned short']],
} ],
'_VACB' : [ 0x28, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['_SHARED_CACHE_MAP']]],
'Overlay' : [ 0x10, ['__unnamed_126d']],
'LruList' : [ 0x18, ['_LIST_ENTRY']],
} ],
'_VACB_LEVEL_REFERENCE' : [ 0x8, {
'Reference' : [ 0x0, ['long']],
'SpecialReference' : [ 0x4, ['long']],
} ],
'__unnamed_1282' : [ 0x10, {
'FreeListsInUseUlong' : [ 0x0, ['array', 4, ['unsigned long']]],
'FreeListsInUseBytes' : [ 0x0, ['array', 16, ['unsigned char']]],
} ],
'__unnamed_1284' : [ 0x2, {
'FreeListsInUseTerminate' : [ 0x0, ['unsigned short']],
'DecommitCount' : [ 0x0, ['unsigned short']],
} ],
'_HEAP' : [ 0xae8, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'Signature' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
'ForceFlags' : [ 0x18, ['unsigned long']],
'VirtualMemoryThreshold' : [ 0x1c, ['unsigned long']],
'SegmentReserve' : [ 0x20, ['unsigned long long']],
'SegmentCommit' : [ 0x28, ['unsigned long long']],
'DeCommitFreeBlockThreshold' : [ 0x30, ['unsigned long long']],
'DeCommitTotalFreeThreshold' : [ 0x38, ['unsigned long long']],
'TotalFreeSize' : [ 0x40, ['unsigned long long']],
'MaximumAllocationSize' : [ 0x48, ['unsigned long long']],
'ProcessHeapsListIndex' : [ 0x50, ['unsigned short']],
'HeaderValidateLength' : [ 0x52, ['unsigned short']],
'HeaderValidateCopy' : [ 0x58, ['pointer64', ['void']]],
'NextAvailableTagIndex' : [ 0x60, ['unsigned short']],
'MaximumTagIndex' : [ 0x62, ['unsigned short']],
'TagEntries' : [ 0x68, ['pointer64', ['_HEAP_TAG_ENTRY']]],
'UCRSegments' : [ 0x70, ['pointer64', ['_HEAP_UCR_SEGMENT']]],
'UnusedUnCommittedRanges' : [ 0x78, ['pointer64', ['_HEAP_UNCOMMMTTED_RANGE']]],
'AlignRound' : [ 0x80, ['unsigned long long']],
'AlignMask' : [ 0x88, ['unsigned long long']],
'VirtualAllocdBlocks' : [ 0x90, ['_LIST_ENTRY']],
'Segments' : [ 0xa0, ['array', 64, ['pointer64', ['_HEAP_SEGMENT']]]],
'u' : [ 0x2a0, ['__unnamed_1282']],
'u2' : [ 0x2b0, ['__unnamed_1284']],
'AllocatorBackTraceIndex' : [ 0x2b2, ['unsigned short']],
'NonDedicatedListLength' : [ 0x2b4, ['unsigned long']],
'LargeBlocksIndex' : [ 0x2b8, ['pointer64', ['void']]],
'PseudoTagEntries' : [ 0x2c0, ['pointer64', ['_HEAP_PSEUDO_TAG_ENTRY']]],
'FreeLists' : [ 0x2c8, ['array', 128, ['_LIST_ENTRY']]],
'LockVariable' : [ 0xac8, ['pointer64', ['_HEAP_LOCK']]],
'CommitRoutine' : [ 0xad0, ['pointer64', ['void']]],
'FrontEndHeap' : [ 0xad8, ['pointer64', ['void']]],
'FrontHeapLockCount' : [ 0xae0, ['unsigned short']],
'FrontEndHeapType' : [ 0xae2, ['unsigned char']],
'LastSegmentIndex' : [ 0xae3, ['unsigned char']],
} ],
'_HEAP_ENTRY' : [ 0x10, {
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'PreviousSize' : [ 0xa, ['unsigned short']],
'SmallTagIndex' : [ 0xc, ['unsigned char']],
'Flags' : [ 0xd, ['unsigned char']],
'UnusedBytes' : [ 0xe, ['unsigned char']],
'SegmentIndex' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
} ],
'_HEAP_SEGMENT' : [ 0x68, {
'Entry' : [ 0x0, ['_HEAP_ENTRY']],
'Signature' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
'Heap' : [ 0x18, ['pointer64', ['_HEAP']]],
'LargestUnCommittedRange' : [ 0x20, ['unsigned long long']],
'BaseAddress' : [ 0x28, ['pointer64', ['void']]],
'NumberOfPages' : [ 0x30, ['unsigned long']],
'FirstEntry' : [ 0x38, ['pointer64', ['_HEAP_ENTRY']]],
'LastValidEntry' : [ 0x40, ['pointer64', ['_HEAP_ENTRY']]],
'NumberOfUnCommittedPages' : [ 0x48, ['unsigned long']],
'NumberOfUnCommittedRanges' : [ 0x4c, ['unsigned long']],
'UnCommittedRanges' : [ 0x50, ['pointer64', ['_HEAP_UNCOMMMTTED_RANGE']]],
'AllocatorBackTraceIndex' : [ 0x58, ['unsigned short']],
'Reserved' : [ 0x5a, ['unsigned short']],
'LastEntryInSegment' : [ 0x60, ['pointer64', ['_HEAP_ENTRY']]],
} ],
'_HEAP_SUBSEGMENT' : [ 0x30, {
'Bucket' : [ 0x0, ['pointer64', ['void']]],
'UserBlocks' : [ 0x8, ['pointer64', ['_HEAP_USERDATA_HEADER']]],
'AggregateExchg' : [ 0x10, ['_INTERLOCK_SEQ']],
'BlockSize' : [ 0x18, ['unsigned short']],
'FreeThreshold' : [ 0x1a, ['unsigned short']],
'BlockCount' : [ 0x1c, ['unsigned short']],
'SizeIndex' : [ 0x1e, ['unsigned char']],
'AffinityIndex' : [ 0x1f, ['unsigned char']],
'Alignment' : [ 0x18, ['array', 2, ['unsigned long']]],
'SFreeListEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'Lock' : [ 0x28, ['unsigned long']],
} ],
'_TOKEN' : [ 0xd0, {
'TokenSource' : [ 0x0, ['_TOKEN_SOURCE']],
'TokenId' : [ 0x10, ['_LUID']],
'AuthenticationId' : [ 0x18, ['_LUID']],
'ParentTokenId' : [ 0x20, ['_LUID']],
'ExpirationTime' : [ 0x28, ['_LARGE_INTEGER']],
'TokenLock' : [ 0x30, ['pointer64', ['_ERESOURCE']]],
'AuditPolicy' : [ 0x38, ['_SEP_AUDIT_POLICY']],
'ModifiedId' : [ 0x40, ['_LUID']],
'SessionId' : [ 0x48, ['unsigned long']],
'UserAndGroupCount' : [ 0x4c, ['unsigned long']],
'RestrictedSidCount' : [ 0x50, ['unsigned long']],
'PrivilegeCount' : [ 0x54, ['unsigned long']],
'VariableLength' : [ 0x58, ['unsigned long']],
'DynamicCharged' : [ 0x5c, ['unsigned long']],
'DynamicAvailable' : [ 0x60, ['unsigned long']],
'DefaultOwnerIndex' : [ 0x64, ['unsigned long']],
'UserAndGroups' : [ 0x68, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'RestrictedSids' : [ 0x70, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'PrimaryGroup' : [ 0x78, ['pointer64', ['void']]],
'Privileges' : [ 0x80, ['pointer64', ['_LUID_AND_ATTRIBUTES']]],
'DynamicPart' : [ 0x88, ['pointer64', ['unsigned long']]],
'DefaultDacl' : [ 0x90, ['pointer64', ['_ACL']]],
'TokenType' : [ 0x98, ['Enumeration', dict(target = 'long', choices = {1: 'TokenPrimary', 2: 'TokenImpersonation'})]],
'ImpersonationLevel' : [ 0x9c, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'TokenFlags' : [ 0xa0, ['unsigned char']],
'TokenInUse' : [ 0xa1, ['unsigned char']],
'ProxyData' : [ 0xa8, ['pointer64', ['_SECURITY_TOKEN_PROXY_DATA']]],
'AuditData' : [ 0xb0, ['pointer64', ['_SECURITY_TOKEN_AUDIT_DATA']]],
'LogonSession' : [ 0xb8, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'OriginatingLogonSession' : [ 0xc0, ['_LUID']],
'VariablePart' : [ 0xc8, ['unsigned long']],
} ],
'_SEP_LOGON_SESSION_REFERENCES' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_SEP_LOGON_SESSION_REFERENCES']]],
'LogonId' : [ 0x8, ['_LUID']],
'ReferenceCount' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
'pDeviceMap' : [ 0x18, ['pointer64', ['_DEVICE_MAP']]],
} ],
'_TEB' : [ 0x17d8, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'EnvironmentPointer' : [ 0x38, ['pointer64', ['void']]],
'ClientId' : [ 0x40, ['_CLIENT_ID']],
'ActiveRpcHandle' : [ 0x50, ['pointer64', ['void']]],
'ThreadLocalStoragePointer' : [ 0x58, ['pointer64', ['void']]],
'ProcessEnvironmentBlock' : [ 0x60, ['pointer64', ['_PEB']]],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['pointer64', ['void']]],
'Win32ThreadInfo' : [ 0x78, ['pointer64', ['void']]],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['pointer64', ['void']]],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['pointer64', ['void']]]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['pointer64', ['_ACTIVATION_CONTEXT_STACK']]],
'SpareBytes1' : [ 0x2d0, ['array', 28, ['unsigned char']]],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID']],
'GdiCachedProcessHandle' : [ 0x7e8, ['pointer64', ['void']]],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['pointer64', ['void']]],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['pointer64', ['void']]]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['pointer64', ['void']]],
'glSectionInfo' : [ 0x1228, ['pointer64', ['void']]],
'glSection' : [ 0x1230, ['pointer64', ['void']]],
'glTable' : [ 0x1238, ['pointer64', ['void']]],
'glCurrentRC' : [ 0x1240, ['pointer64', ['void']]],
'glContext' : [ 0x1248, ['pointer64', ['void']]],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_UNICODE_STRING']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['unsigned short']]],
'DeallocationStack' : [ 0x1478, ['pointer64', ['void']]],
'TlsSlots' : [ 0x1480, ['array', 64, ['pointer64', ['void']]]],
'TlsLinks' : [ 0x1680, ['_LIST_ENTRY']],
'Vdm' : [ 0x1690, ['pointer64', ['void']]],
'ReservedForNtRpc' : [ 0x1698, ['pointer64', ['void']]],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['pointer64', ['void']]]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 14, ['pointer64', ['void']]]],
'SubProcessTag' : [ 0x1728, ['pointer64', ['void']]],
'EtwTraceData' : [ 0x1730, ['pointer64', ['void']]],
'WinSockData' : [ 0x1738, ['pointer64', ['void']]],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'InDbgPrint' : [ 0x1744, ['unsigned char']],
'FreeStackOnTermination' : [ 0x1745, ['unsigned char']],
'HasFiberData' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['pointer64', ['void']]],
'ReservedForOle' : [ 0x1758, ['pointer64', ['void']]],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SparePointer1' : [ 0x1768, ['unsigned long long']],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'SoftPatchPtr2' : [ 0x1778, ['unsigned long long']],
'TlsExpansionSlots' : [ 0x1780, ['pointer64', ['pointer64', ['void']]]],
'DeallocationBStore' : [ 0x1788, ['pointer64', ['void']]],
'BStoreLimit' : [ 0x1790, ['pointer64', ['void']]],
'ImpersonationLocale' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['pointer64', ['void']]],
'pShimData' : [ 0x17a8, ['pointer64', ['void']]],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['pointer64', ['void']]],
'ActiveFrame' : [ 0x17c0, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'FlsData' : [ 0x17c8, ['pointer64', ['void']]],
'SafeThunkCall' : [ 0x17d0, ['unsigned char']],
'BooleanSpare' : [ 0x17d1, ['array', 3, ['unsigned char']]],
} ],
'_HEAP_UCR_SEGMENT' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_HEAP_UCR_SEGMENT']]],
'ReservedSize' : [ 0x8, ['unsigned long long']],
'CommittedSize' : [ 0x10, ['unsigned long long']],
'filler' : [ 0x18, ['unsigned long']],
} ],
'_HMAP_TABLE' : [ 0x4000, {
'Table' : [ 0x0, ['array', 512, ['_HMAP_ENTRY']]],
} ],
'_ERESOURCE' : [ 0x68, {
'SystemResourcesList' : [ 0x0, ['_LIST_ENTRY']],
'OwnerTable' : [ 0x10, ['pointer64', ['_OWNER_ENTRY']]],
'ActiveCount' : [ 0x18, ['short']],
'Flag' : [ 0x1a, ['unsigned short']],
'SharedWaiters' : [ 0x20, ['pointer64', ['_KSEMAPHORE']]],
'ExclusiveWaiters' : [ 0x28, ['pointer64', ['_KEVENT']]],
'OwnerThreads' : [ 0x30, ['array', 2, ['_OWNER_ENTRY']]],
'ContentionCount' : [ 0x50, ['unsigned long']],
'NumberOfSharedWaiters' : [ 0x54, ['unsigned short']],
'NumberOfExclusiveWaiters' : [ 0x56, ['unsigned short']],
'Address' : [ 0x58, ['pointer64', ['void']]],
'CreatorBackTraceIndex' : [ 0x58, ['unsigned long long']],
'SpinLock' : [ 0x60, ['unsigned long long']],
} ],
'_OBJECT_SYMBOLIC_LINK' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LinkTarget' : [ 0x8, ['_UNICODE_STRING']],
'LinkTargetRemaining' : [ 0x18, ['_UNICODE_STRING']],
'LinkTargetObject' : [ 0x28, ['pointer64', ['void']]],
'DosDeviceDriveIndex' : [ 0x30, ['unsigned long']],
} ],
'_POOL_BLOCK_HEAD' : [ 0x20, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'List' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_DISPATCHER_HEADER' : [ 0x18, {
'Type' : [ 0x0, ['unsigned char']],
'Absolute' : [ 0x1, ['unsigned char']],
'NpxIrql' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'Hand' : [ 0x2, ['unsigned char']],
'Inserted' : [ 0x3, ['unsigned char']],
'DebugActive' : [ 0x3, ['unsigned char']],
'Lock' : [ 0x0, ['long']],
'SignalState' : [ 0x4, ['long']],
'WaitListHead' : [ 0x8, ['_LIST_ENTRY']],
} ],
'_LDR_DATA_TABLE_ENTRY' : [ 0x98, {
'InLoadOrderLinks' : [ 0x0, ['_LIST_ENTRY']],
'InMemoryOrderLinks' : [ 0x10, ['_LIST_ENTRY']],
'InInitializationOrderLinks' : [ 0x20, ['_LIST_ENTRY']],
'DllBase' : [ 0x30, ['pointer64', ['void']]],
'EntryPoint' : [ 0x38, ['pointer64', ['void']]],
'SizeOfImage' : [ 0x40, ['unsigned long']],
'FullDllName' : [ 0x48, ['_UNICODE_STRING']],
'BaseDllName' : [ 0x58, ['_UNICODE_STRING']],
'Flags' : [ 0x68, ['unsigned long']],
'LoadCount' : [ 0x6c, ['unsigned short']],
'TlsIndex' : [ 0x6e, ['unsigned short']],
'HashLinks' : [ 0x70, ['_LIST_ENTRY']],
'SectionPointer' : [ 0x70, ['pointer64', ['void']]],
'CheckSum' : [ 0x78, ['unsigned long']],
'TimeDateStamp' : [ 0x80, ['unsigned long']],
'LoadedImports' : [ 0x80, ['pointer64', ['void']]],
'EntryPointActivationContext' : [ 0x88, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'PatchInformation' : [ 0x90, ['pointer64', ['void']]],
} ],
'_HEAP_UNCOMMMTTED_RANGE' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_HEAP_UNCOMMMTTED_RANGE']]],
'Address' : [ 0x8, ['unsigned long long']],
'Size' : [ 0x10, ['unsigned long long']],
'filler' : [ 0x18, ['unsigned long']],
} ],
'_LUID_AND_ATTRIBUTES' : [ 0xc, {
'Luid' : [ 0x0, ['_LUID']],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_VI_DEADLOCK_GLOBALS' : [ 0x1e0, {
'Nodes' : [ 0x0, ['array', 2, ['unsigned long']]],
'Resources' : [ 0x8, ['array', 2, ['unsigned long']]],
'Threads' : [ 0x10, ['array', 2, ['unsigned long']]],
'TimeAcquire' : [ 0x18, ['long long']],
'TimeRelease' : [ 0x20, ['long long']],
'BytesAllocated' : [ 0x28, ['unsigned long long']],
'ResourceDatabase' : [ 0x30, ['pointer64', ['_LIST_ENTRY']]],
'ThreadDatabase' : [ 0x38, ['pointer64', ['_LIST_ENTRY']]],
'AllocationFailures' : [ 0x40, ['unsigned long']],
'NodesTrimmedBasedOnAge' : [ 0x44, ['unsigned long']],
'NodesTrimmedBasedOnCount' : [ 0x48, ['unsigned long']],
'NodesSearched' : [ 0x4c, ['unsigned long']],
'MaxNodesSearched' : [ 0x50, ['unsigned long']],
'SequenceNumber' : [ 0x54, ['unsigned long']],
'RecursionDepthLimit' : [ 0x58, ['unsigned long']],
'SearchedNodesLimit' : [ 0x5c, ['unsigned long']],
'DepthLimitHits' : [ 0x60, ['unsigned long']],
'SearchLimitHits' : [ 0x64, ['unsigned long']],
'ABC_ACB_Skipped' : [ 0x68, ['unsigned long']],
'OutOfOrderReleases' : [ 0x6c, ['unsigned long']],
'NodesReleasedOutOfOrder' : [ 0x70, ['unsigned long']],
'TotalReleases' : [ 0x74, ['unsigned long']],
'RootNodesDeleted' : [ 0x78, ['unsigned long']],
'ForgetHistoryCounter' : [ 0x7c, ['unsigned long']],
'PoolTrimCounter' : [ 0x80, ['unsigned long']],
'FreeResourceList' : [ 0x88, ['_LIST_ENTRY']],
'FreeThreadList' : [ 0x98, ['_LIST_ENTRY']],
'FreeNodeList' : [ 0xa8, ['_LIST_ENTRY']],
'FreeResourceCount' : [ 0xb8, ['unsigned long']],
'FreeThreadCount' : [ 0xbc, ['unsigned long']],
'FreeNodeCount' : [ 0xc0, ['unsigned long']],
'Instigator' : [ 0xc8, ['pointer64', ['void']]],
'NumberOfParticipants' : [ 0xd0, ['unsigned long']],
'Participant' : [ 0xd8, ['array', 32, ['pointer64', ['_VI_DEADLOCK_NODE']]]],
'CacheReductionInProgress' : [ 0x1d8, ['unsigned long']],
} ],
'_THERMAL_INFORMATION' : [ 0x58, {
'ThermalStamp' : [ 0x0, ['unsigned long']],
'ThermalConstant1' : [ 0x4, ['unsigned long']],
'ThermalConstant2' : [ 0x8, ['unsigned long']],
'Processors' : [ 0x10, ['unsigned long long']],
'SamplingPeriod' : [ 0x18, ['unsigned long']],
'CurrentTemperature' : [ 0x1c, ['unsigned long']],
'PassiveTripPoint' : [ 0x20, ['unsigned long']],
'CriticalTripPoint' : [ 0x24, ['unsigned long']],
'ActiveTripPointCount' : [ 0x28, ['unsigned char']],
'ActiveTripPoint' : [ 0x2c, ['array', 10, ['unsigned long']]],
} ],
'_DBGKD_SEARCH_MEMORY' : [ 0x18, {
'SearchAddress' : [ 0x0, ['unsigned long long']],
'FoundAddress' : [ 0x0, ['unsigned long long']],
'SearchLength' : [ 0x8, ['unsigned long long']],
'PatternLength' : [ 0x10, ['unsigned long']],
} ],
'_SECTION_OBJECT' : [ 0x30, {
'StartingVa' : [ 0x0, ['pointer64', ['void']]],
'EndingVa' : [ 0x8, ['pointer64', ['void']]],
'Parent' : [ 0x10, ['pointer64', ['void']]],
'LeftChild' : [ 0x18, ['pointer64', ['void']]],
'RightChild' : [ 0x20, ['pointer64', ['void']]],
'Segment' : [ 0x28, ['pointer64', ['_SEGMENT_OBJECT']]],
} ],
'_POWER_STATE' : [ 0x4, {
'SystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_KAPC' : [ 0x58, {
'Type' : [ 0x0, ['unsigned char']],
'SpareByte0' : [ 0x1, ['unsigned char']],
'Size' : [ 0x2, ['unsigned char']],
'SpareByte1' : [ 0x3, ['unsigned char']],
'SpareLong0' : [ 0x4, ['unsigned long']],
'Thread' : [ 0x8, ['pointer64', ['_KTHREAD']]],
'ApcListEntry' : [ 0x10, ['_LIST_ENTRY']],
'KernelRoutine' : [ 0x20, ['pointer64', ['void']]],
'RundownRoutine' : [ 0x28, ['pointer64', ['void']]],
'NormalRoutine' : [ 0x30, ['pointer64', ['void']]],
'NormalContext' : [ 0x38, ['pointer64', ['void']]],
'SystemArgument1' : [ 0x40, ['pointer64', ['void']]],
'SystemArgument2' : [ 0x48, ['pointer64', ['void']]],
'ApcStateIndex' : [ 0x50, ['unsigned char']],
'ApcMode' : [ 0x51, ['unsigned char']],
'Inserted' : [ 0x52, ['unsigned char']],
} ],
'_WMI_LOGGER_CONTEXT' : [ 0x280, {
'BufferSpinLock' : [ 0x0, ['unsigned long long']],
'StartTime' : [ 0x8, ['_LARGE_INTEGER']],
'LogFileHandle' : [ 0x10, ['pointer64', ['void']]],
'LoggerSemaphore' : [ 0x18, ['_KSEMAPHORE']],
'LoggerThread' : [ 0x38, ['pointer64', ['_ETHREAD']]],
'LoggerEvent' : [ 0x40, ['_KEVENT']],
'FlushEvent' : [ 0x58, ['_KEVENT']],
'LoggerStatus' : [ 0x70, ['long']],
'LoggerId' : [ 0x74, ['unsigned long']],
'BuffersAvailable' : [ 0x78, ['long']],
'UsePerfClock' : [ 0x7c, ['unsigned long']],
'WriteFailureLimit' : [ 0x80, ['unsigned long']],
'BuffersDirty' : [ 0x84, ['long']],
'BuffersInUse' : [ 0x88, ['long']],
'SwitchingInProgress' : [ 0x8c, ['unsigned long']],
'FreeList' : [ 0x90, ['_SLIST_HEADER']],
'FlushList' : [ 0xa0, ['_SLIST_HEADER']],
'WaitList' : [ 0xb0, ['_SLIST_HEADER']],
'GlobalList' : [ 0xc0, ['_SLIST_HEADER']],
'ProcessorBuffers' : [ 0xd0, ['pointer64', ['pointer64', ['_WMI_BUFFER_HEADER']]]],
'LoggerName' : [ 0xd8, ['_UNICODE_STRING']],
'LogFileName' : [ 0xe8, ['_UNICODE_STRING']],
'LogFilePattern' : [ 0xf8, ['_UNICODE_STRING']],
'NewLogFileName' : [ 0x108, ['_UNICODE_STRING']],
'EndPageMarker' : [ 0x118, ['pointer64', ['unsigned char']]],
'CollectionOn' : [ 0x120, ['long']],
'KernelTraceOn' : [ 0x124, ['unsigned long']],
'PerfLogInTransition' : [ 0x128, ['long']],
'RequestFlag' : [ 0x12c, ['unsigned long']],
'EnableFlags' : [ 0x130, ['unsigned long']],
'MaximumFileSize' : [ 0x134, ['unsigned long']],
'LoggerMode' : [ 0x138, ['unsigned long']],
'LoggerModeFlags' : [ 0x138, ['_WMI_LOGGER_MODE']],
'Wow' : [ 0x13c, ['unsigned long']],
'LastFlushedBuffer' : [ 0x140, ['unsigned long']],
'RefCount' : [ 0x144, ['unsigned long']],
'FlushTimer' : [ 0x148, ['unsigned long']],
'FirstBufferOffset' : [ 0x150, ['_LARGE_INTEGER']],
'ByteOffset' : [ 0x158, ['_LARGE_INTEGER']],
'BufferAgeLimit' : [ 0x160, ['_LARGE_INTEGER']],
'MaximumBuffers' : [ 0x168, ['unsigned long']],
'MinimumBuffers' : [ 0x16c, ['unsigned long']],
'EventsLost' : [ 0x170, ['unsigned long']],
'BuffersWritten' : [ 0x174, ['unsigned long']],
'LogBuffersLost' : [ 0x178, ['unsigned long']],
'RealTimeBuffersLost' : [ 0x17c, ['unsigned long']],
'BufferSize' : [ 0x180, ['unsigned long']],
'NumberOfBuffers' : [ 0x184, ['long']],
'SequencePtr' : [ 0x188, ['pointer64', ['long']]],
'InstanceGuid' : [ 0x190, ['_GUID']],
'LoggerHeader' : [ 0x1a0, ['pointer64', ['void']]],
'GetCpuClock' : [ 0x1a8, ['pointer64', ['void']]],
'ClientSecurityContext' : [ 0x1b0, ['_SECURITY_CLIENT_CONTEXT']],
'LoggerExtension' : [ 0x1f8, ['pointer64', ['void']]],
'ReleaseQueue' : [ 0x200, ['long']],
'EnableFlagExtension' : [ 0x204, ['_TRACE_ENABLE_FLAG_EXTENSION']],
'LocalSequence' : [ 0x208, ['unsigned long']],
'MaximumIrql' : [ 0x20c, ['unsigned long']],
'EnableFlagArray' : [ 0x210, ['pointer64', ['unsigned long']]],
'LoggerMutex' : [ 0x218, ['_KMUTANT']],
'MutexCount' : [ 0x250, ['long']],
'FileCounter' : [ 0x254, ['long']],
'BufferCallback' : [ 0x258, ['pointer64', ['void']]],
'CallbackContext' : [ 0x260, ['pointer64', ['void']]],
'PoolType' : [ 0x268, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'ReferenceSystemTime' : [ 0x270, ['_LARGE_INTEGER']],
'ReferenceTimeStamp' : [ 0x278, ['_LARGE_INTEGER']],
} ],
'_SEGMENT_OBJECT' : [ 0x48, {
'BaseAddress' : [ 0x0, ['pointer64', ['void']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'SizeOfSegment' : [ 0x10, ['_LARGE_INTEGER']],
'NonExtendedPtes' : [ 0x18, ['unsigned long']],
'ImageCommitment' : [ 0x1c, ['unsigned long']],
'ControlArea' : [ 0x20, ['pointer64', ['_CONTROL_AREA']]],
'Subsection' : [ 0x28, ['pointer64', ['_SUBSECTION']]],
'LargeControlArea' : [ 0x30, ['pointer64', ['_LARGE_CONTROL_AREA']]],
'MmSectionFlags' : [ 0x38, ['pointer64', ['_MMSECTION_FLAGS']]],
'MmSubSectionFlags' : [ 0x40, ['pointer64', ['_MMSUBSECTION_FLAGS']]],
} ],
'__unnamed_13b7' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MMSECTION_FLAGS']],
} ],
'_CONTROL_AREA' : [ 0x48, {
'Segment' : [ 0x0, ['pointer64', ['_SEGMENT']]],
'DereferenceList' : [ 0x8, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0x18, ['unsigned long']],
'NumberOfPfnReferences' : [ 0x1c, ['unsigned long']],
'NumberOfMappedViews' : [ 0x20, ['unsigned long']],
'NumberOfSystemCacheViews' : [ 0x24, ['unsigned long']],
'NumberOfUserReferences' : [ 0x28, ['unsigned long']],
'u' : [ 0x2c, ['__unnamed_13b7']],
'FilePointer' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'WaitingForDeletion' : [ 0x38, ['pointer64', ['_EVENT_COUNTER']]],
'ModifiedWriteCount' : [ 0x40, ['unsigned short']],
'FlushInProgressCount' : [ 0x42, ['unsigned short']],
'WritableUserReferences' : [ 0x44, ['unsigned long']],
} ],
'_HANDLE_TABLE' : [ 0x70, {
'TableCode' : [ 0x0, ['unsigned long long']],
'QuotaProcess' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'UniqueProcessId' : [ 0x10, ['pointer64', ['void']]],
'HandleTableLock' : [ 0x18, ['array', 4, ['_EX_PUSH_LOCK']]],
'HandleTableList' : [ 0x38, ['_LIST_ENTRY']],
'HandleContentionEvent' : [ 0x48, ['_EX_PUSH_LOCK']],
'DebugInfo' : [ 0x50, ['pointer64', ['_HANDLE_TRACE_DEBUG_INFO']]],
'ExtraInfoPages' : [ 0x58, ['long']],
'FirstFree' : [ 0x5c, ['unsigned long']],
'LastFree' : [ 0x60, ['unsigned long']],
'NextHandleNeedingPool' : [ 0x64, ['unsigned long']],
'HandleCount' : [ 0x68, ['long']],
'Flags' : [ 0x6c, ['unsigned long']],
'StrictFIFO' : [ 0x6c, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
} ],
'_POOL_HEADER' : [ 0x10, {
'PreviousSize' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'PoolIndex' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'BlockSize' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 24, native_type='unsigned long')]],
'PoolType' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'Ulong1' : [ 0x0, ['unsigned long']],
'PoolTag' : [ 0x4, ['unsigned long']],
'ProcessBilled' : [ 0x8, ['pointer64', ['_EPROCESS']]],
'AllocatorBackTraceIndex' : [ 0x8, ['unsigned short']],
'PoolTagHash' : [ 0xa, ['unsigned short']],
} ],
'_MMVAD_FLAGS2' : [ 0x4, {
'FileOffset' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 24, native_type='unsigned long')]],
'SecNoChange' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'OneSecured' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'MultipleSecured' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'LongVad' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'ExtendableFile' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'Inherit' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_TEB_ACTIVE_FRAME' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned long']],
'Previous' : [ 0x8, ['pointer64', ['_TEB_ACTIVE_FRAME']]],
'Context' : [ 0x10, ['pointer64', ['_TEB_ACTIVE_FRAME_CONTEXT']]],
} ],
'_XMM_SAVE_AREA32' : [ 0x200, {
'ControlWord' : [ 0x0, ['unsigned short']],
'StatusWord' : [ 0x2, ['unsigned short']],
'TagWord' : [ 0x4, ['unsigned char']],
'Reserved1' : [ 0x5, ['unsigned char']],
'ErrorOpcode' : [ 0x6, ['unsigned short']],
'ErrorOffset' : [ 0x8, ['unsigned long']],
'ErrorSelector' : [ 0xc, ['unsigned short']],
'Reserved2' : [ 0xe, ['unsigned short']],
'DataOffset' : [ 0x10, ['unsigned long']],
'DataSelector' : [ 0x14, ['unsigned short']],
'Reserved3' : [ 0x16, ['unsigned short']],
'MxCsr' : [ 0x18, ['unsigned long']],
'MxCsr_Mask' : [ 0x1c, ['unsigned long']],
'FloatRegisters' : [ 0x20, ['array', 8, ['_M128A']]],
'XmmRegisters' : [ 0xa0, ['array', 16, ['_M128A']]],
'Reserved4' : [ 0x1a0, ['array', 96, ['unsigned char']]],
} ],
'_MMPTE_PROTOTYPE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned long long')]],
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned long long')]],
'ProtoAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_MMSUPPORT' : [ 0x58, {
'WorkingSetExpansionLinks' : [ 0x0, ['_LIST_ENTRY']],
'LastTrimTime' : [ 0x10, ['_LARGE_INTEGER']],
'Flags' : [ 0x18, ['_MMSUPPORT_FLAGS']],
'PageFaultCount' : [ 0x1c, ['unsigned long']],
'PeakWorkingSetSize' : [ 0x20, ['unsigned long']],
'GrowthSinceLastEstimate' : [ 0x24, ['unsigned long']],
'MinimumWorkingSetSize' : [ 0x28, ['unsigned long']],
'MaximumWorkingSetSize' : [ 0x2c, ['unsigned long']],
'VmWorkingSetList' : [ 0x30, ['pointer64', ['_MMWSL']]],
'Claim' : [ 0x38, ['unsigned long']],
'NextEstimationSlot' : [ 0x3c, ['unsigned long']],
'NextAgingSlot' : [ 0x40, ['unsigned long']],
'EstimatedAvailable' : [ 0x44, ['unsigned long']],
'WorkingSetSize' : [ 0x48, ['unsigned long']],
'WorkingSetMutex' : [ 0x50, ['_EX_PUSH_LOCK']],
} ],
'_EX_WORK_QUEUE' : [ 0x58, {
'WorkerQueue' : [ 0x0, ['_KQUEUE']],
'DynamicThreadCount' : [ 0x40, ['unsigned long']],
'WorkItemsProcessed' : [ 0x44, ['unsigned long']],
'WorkItemsProcessedLastPass' : [ 0x48, ['unsigned long']],
'QueueDepthLastPass' : [ 0x4c, ['unsigned long']],
'Info' : [ 0x50, ['EX_QUEUE_WORKER_INFO']],
} ],
'_MMSUBSECTION_FLAGS' : [ 0x4, {
'ReadOnly' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'ReadWrite' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'SubsectionStatic' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 9, native_type='unsigned long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'StartingSector4132' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 20, native_type='unsigned long')]],
'SectorEndOffset' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 32, native_type='unsigned long')]],
} ],
'_KMUTANT' : [ 0x38, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'MutantListEntry' : [ 0x18, ['_LIST_ENTRY']],
'OwnerThread' : [ 0x28, ['pointer64', ['_KTHREAD']]],
'Abandoned' : [ 0x30, ['unsigned char']],
'ApcDisable' : [ 0x31, ['unsigned char']],
} ],
'_HEAP_TAG_ENTRY' : [ 0x48, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
'TagIndex' : [ 0x10, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x12, ['unsigned short']],
'TagName' : [ 0x14, ['array', 24, ['unsigned short']]],
} ],
'_EPROCESS_QUOTA_BLOCK' : [ 0x78, {
'QuotaEntry' : [ 0x0, ['array', 3, ['_EPROCESS_QUOTA_ENTRY']]],
'QuotaList' : [ 0x60, ['_LIST_ENTRY']],
'ReferenceCount' : [ 0x70, ['unsigned long']],
'ProcessCount' : [ 0x74, ['unsigned long']],
} ],
'_NT_TIB' : [ 0x38, {
'ExceptionList' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'StackBase' : [ 0x8, ['pointer64', ['void']]],
'StackLimit' : [ 0x10, ['pointer64', ['void']]],
'SubSystemTib' : [ 0x18, ['pointer64', ['void']]],
'FiberData' : [ 0x20, ['pointer64', ['void']]],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['pointer64', ['void']]],
'Self' : [ 0x30, ['pointer64', ['_NT_TIB']]],
} ],
'_EVENT_COUNTER' : [ 0x30, {
'ListEntry' : [ 0x0, ['_SLIST_ENTRY']],
'RefCount' : [ 0x10, ['unsigned long']],
'Event' : [ 0x18, ['_KEVENT']],
} ],
'_EJOB' : [ 0x220, {
'Event' : [ 0x0, ['_KEVENT']],
'JobLinks' : [ 0x18, ['_LIST_ENTRY']],
'ProcessListHead' : [ 0x28, ['_LIST_ENTRY']],
'JobLock' : [ 0x38, ['_ERESOURCE']],
'TotalUserTime' : [ 0xa0, ['_LARGE_INTEGER']],
'TotalKernelTime' : [ 0xa8, ['_LARGE_INTEGER']],
'ThisPeriodTotalUserTime' : [ 0xb0, ['_LARGE_INTEGER']],
'ThisPeriodTotalKernelTime' : [ 0xb8, ['_LARGE_INTEGER']],
'TotalPageFaultCount' : [ 0xc0, ['unsigned long']],
'TotalProcesses' : [ 0xc4, ['unsigned long']],
'ActiveProcesses' : [ 0xc8, ['unsigned long']],
'TotalTerminatedProcesses' : [ 0xcc, ['unsigned long']],
'PerProcessUserTimeLimit' : [ 0xd0, ['_LARGE_INTEGER']],
'PerJobUserTimeLimit' : [ 0xd8, ['_LARGE_INTEGER']],
'LimitFlags' : [ 0xe0, ['unsigned long']],
'MinimumWorkingSetSize' : [ 0xe8, ['unsigned long long']],
'MaximumWorkingSetSize' : [ 0xf0, ['unsigned long long']],
'ActiveProcessLimit' : [ 0xf8, ['unsigned long']],
'Affinity' : [ 0x100, ['unsigned long long']],
'PriorityClass' : [ 0x108, ['unsigned char']],
'UIRestrictionsClass' : [ 0x10c, ['unsigned long']],
'SecurityLimitFlags' : [ 0x110, ['unsigned long']],
'Token' : [ 0x118, ['pointer64', ['void']]],
'Filter' : [ 0x120, ['pointer64', ['_PS_JOB_TOKEN_FILTER']]],
'EndOfJobTimeAction' : [ 0x128, ['unsigned long']],
'CompletionPort' : [ 0x130, ['pointer64', ['void']]],
'CompletionKey' : [ 0x138, ['pointer64', ['void']]],
'SessionId' : [ 0x140, ['unsigned long']],
'SchedulingClass' : [ 0x144, ['unsigned long']],
'ReadOperationCount' : [ 0x148, ['unsigned long long']],
'WriteOperationCount' : [ 0x150, ['unsigned long long']],
'OtherOperationCount' : [ 0x158, ['unsigned long long']],
'ReadTransferCount' : [ 0x160, ['unsigned long long']],
'WriteTransferCount' : [ 0x168, ['unsigned long long']],
'OtherTransferCount' : [ 0x170, ['unsigned long long']],
'IoInfo' : [ 0x178, ['_IO_COUNTERS']],
'ProcessMemoryLimit' : [ 0x1a8, ['unsigned long long']],
'JobMemoryLimit' : [ 0x1b0, ['unsigned long long']],
'PeakProcessMemoryUsed' : [ 0x1b8, ['unsigned long long']],
'PeakJobMemoryUsed' : [ 0x1c0, ['unsigned long long']],
'CurrentJobMemoryUsed' : [ 0x1c8, ['unsigned long long']],
'MemoryLimitsLock' : [ 0x1d0, ['_KGUARDED_MUTEX']],
'JobSetLinks' : [ 0x208, ['_LIST_ENTRY']],
'MemberLevel' : [ 0x218, ['unsigned long']],
'JobFlags' : [ 0x21c, ['unsigned long']],
} ],
'_LARGE_CONTROL_AREA' : [ 0x68, {
'Segment' : [ 0x0, ['pointer64', ['_SEGMENT']]],
'DereferenceList' : [ 0x8, ['_LIST_ENTRY']],
'NumberOfSectionReferences' : [ 0x18, ['unsigned long']],
'NumberOfPfnReferences' : [ 0x1c, ['unsigned long']],
'NumberOfMappedViews' : [ 0x20, ['unsigned long']],
'NumberOfSystemCacheViews' : [ 0x24, ['unsigned long']],
'NumberOfUserReferences' : [ 0x28, ['unsigned long']],
'u' : [ 0x2c, ['__unnamed_13b7']],
'FilePointer' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'WaitingForDeletion' : [ 0x38, ['pointer64', ['_EVENT_COUNTER']]],
'ModifiedWriteCount' : [ 0x40, ['unsigned short']],
'FlushInProgressCount' : [ 0x42, ['unsigned short']],
'WritableUserReferences' : [ 0x44, ['unsigned long']],
'StartingFrame' : [ 0x48, ['unsigned long long']],
'UserGlobalList' : [ 0x50, ['_LIST_ENTRY']],
'SessionId' : [ 0x60, ['unsigned long']],
} ],
'_GUID' : [ 0x10, {
'Data1' : [ 0x0, ['unsigned long']],
'Data2' : [ 0x4, ['unsigned short']],
'Data3' : [ 0x6, ['unsigned short']],
'Data4' : [ 0x8, ['array', 8, ['unsigned char']]],
} ],
'_KGATE' : [ 0x18, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
} ],
'_PS_JOB_TOKEN_FILTER' : [ 0x38, {
'CapturedSidCount' : [ 0x0, ['unsigned long']],
'CapturedSids' : [ 0x8, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'CapturedSidsLength' : [ 0x10, ['unsigned long']],
'CapturedGroupCount' : [ 0x14, ['unsigned long']],
'CapturedGroups' : [ 0x18, ['pointer64', ['_SID_AND_ATTRIBUTES']]],
'CapturedGroupsLength' : [ 0x20, ['unsigned long']],
'CapturedPrivilegeCount' : [ 0x24, ['unsigned long']],
'CapturedPrivileges' : [ 0x28, ['pointer64', ['_LUID_AND_ATTRIBUTES']]],
'CapturedPrivilegesLength' : [ 0x30, ['unsigned long']],
} ],
'_MM_DRIVER_VERIFIER_DATA' : [ 0x80, {
'Level' : [ 0x0, ['unsigned long']],
'RaiseIrqls' : [ 0x4, ['unsigned long']],
'AcquireSpinLocks' : [ 0x8, ['unsigned long']],
'SynchronizeExecutions' : [ 0xc, ['unsigned long']],
'AllocationsAttempted' : [ 0x10, ['unsigned long']],
'AllocationsSucceeded' : [ 0x14, ['unsigned long']],
'AllocationsSucceededSpecialPool' : [ 0x18, ['unsigned long']],
'AllocationsWithNoTag' : [ 0x1c, ['unsigned long']],
'TrimRequests' : [ 0x20, ['unsigned long']],
'Trims' : [ 0x24, ['unsigned long']],
'AllocationsFailed' : [ 0x28, ['unsigned long']],
'AllocationsFailedDeliberately' : [ 0x2c, ['unsigned long']],
'Loads' : [ 0x30, ['unsigned long']],
'Unloads' : [ 0x34, ['unsigned long']],
'UnTrackedPool' : [ 0x38, ['unsigned long']],
'UserTrims' : [ 0x3c, ['unsigned long']],
'CurrentPagedPoolAllocations' : [ 0x40, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x44, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x48, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x4c, ['unsigned long']],
'PagedBytes' : [ 0x50, ['unsigned long long']],
'NonPagedBytes' : [ 0x58, ['unsigned long long']],
'PeakPagedBytes' : [ 0x60, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x68, ['unsigned long long']],
'BurstAllocationsFailedDeliberately' : [ 0x70, ['unsigned long']],
'SessionTrims' : [ 0x74, ['unsigned long']],
'Reserved' : [ 0x78, ['array', 2, ['unsigned long']]],
} ],
'_IMAGE_FILE_HEADER' : [ 0x14, {
'Machine' : [ 0x0, ['unsigned short']],
'NumberOfSections' : [ 0x2, ['unsigned short']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'PointerToSymbolTable' : [ 0x8, ['unsigned long']],
'NumberOfSymbols' : [ 0xc, ['unsigned long']],
'SizeOfOptionalHeader' : [ 0x10, ['unsigned short']],
'Characteristics' : [ 0x12, ['unsigned short']],
} ],
'_MMPTE_HARDWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Writable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 40, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_IO_COMPLETION_CONTEXT' : [ 0x10, {
'Port' : [ 0x0, ['pointer64', ['void']]],
'Key' : [ 0x8, ['pointer64', ['void']]],
} ],
'_CALL_HASH_ENTRY' : [ 0x28, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'CallersAddress' : [ 0x10, ['pointer64', ['void']]],
'CallersCaller' : [ 0x18, ['pointer64', ['void']]],
'CallCount' : [ 0x20, ['unsigned long']],
} ],
'_HMAP_ENTRY' : [ 0x20, {
'BlockAddress' : [ 0x0, ['unsigned long long']],
'BinAddress' : [ 0x8, ['unsigned long long']],
'CmView' : [ 0x10, ['pointer64', ['_CM_VIEW_OF_FILE']]],
'MemAlloc' : [ 0x18, ['unsigned long']],
} ],
'_DBGKD_SET_CONTEXT' : [ 0x4, {
'ContextFlags' : [ 0x0, ['unsigned long']],
} ],
'_MMSECTION_FLAGS' : [ 0x4, {
'BeingDeleted' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'BeingCreated' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'BeingPurged' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'NoModifiedWriting' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'FailAllIo' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'Image' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'Based' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'File' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'Networked' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'NoCache' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'PhysicalMemory' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'Reserve' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'Commit' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'FloppyMedia' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'WasPurged' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'UserReference' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'GlobalMemory' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'DeleteOnClose' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 19, native_type='unsigned long')]],
'FilePointerNull' : [ 0x0, ['BitField', dict(start_bit = 19, end_bit = 20, native_type='unsigned long')]],
'DebugSymbolsLoaded' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'SetMappedFileIoComplete' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'CollidedFlush' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'filler0' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 25, native_type='unsigned long')]],
'ImageMappedInSystemSpace' : [ 0x0, ['BitField', dict(start_bit = 25, end_bit = 26, native_type='unsigned long')]],
'UserWritable' : [ 0x0, ['BitField', dict(start_bit = 26, end_bit = 27, native_type='unsigned long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 27, end_bit = 28, native_type='unsigned long')]],
'GlobalOnlyPerSession' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 29, native_type='unsigned long')]],
'Rom' : [ 0x0, ['BitField', dict(start_bit = 29, end_bit = 30, native_type='unsigned long')]],
'WriteCombined' : [ 0x0, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'filler' : [ 0x0, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
} ],
'_DEFERRED_WRITE' : [ 0x50, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeByteSize' : [ 0x2, ['short']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'BytesToWrite' : [ 0x10, ['unsigned long']],
'DeferredWriteLinks' : [ 0x18, ['_LIST_ENTRY']],
'Event' : [ 0x28, ['pointer64', ['_KEVENT']]],
'PostRoutine' : [ 0x30, ['pointer64', ['void']]],
'Context1' : [ 0x38, ['pointer64', ['void']]],
'Context2' : [ 0x40, ['pointer64', ['void']]],
'LimitModifiedPages' : [ 0x48, ['unsigned char']],
} ],
'_TRACE_ENABLE_FLAG_EXTENSION' : [ 0x4, {
'Offset' : [ 0x0, ['unsigned short']],
'Length' : [ 0x2, ['unsigned char']],
'Flag' : [ 0x3, ['unsigned char']],
} ],
'_SID_AND_ATTRIBUTES' : [ 0x10, {
'Sid' : [ 0x0, ['pointer64', ['void']]],
'Attributes' : [ 0x8, ['unsigned long']],
} ],
'_HIVE_LIST_ENTRY' : [ 0x30, {
'Name' : [ 0x0, ['pointer64', ['unsigned short']]],
'BaseName' : [ 0x8, ['pointer64', ['unsigned short']]],
'CmHive' : [ 0x10, ['pointer64', ['_CMHIVE']]],
'HHiveFlags' : [ 0x18, ['unsigned long']],
'CmHiveFlags' : [ 0x1c, ['unsigned long']],
'CmHive2' : [ 0x20, ['pointer64', ['_CMHIVE']]],
'ThreadFinished' : [ 0x28, ['unsigned char']],
'ThreadStarted' : [ 0x29, ['unsigned char']],
'Allocate' : [ 0x2a, ['unsigned char']],
} ],
'_MMVAD_FLAGS' : [ 0x8, {
'CommitCharge' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 51, native_type='unsigned long long')]],
'NoChange' : [ 0x0, ['BitField', dict(start_bit = 51, end_bit = 52, native_type='unsigned long long')]],
'VadType' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 55, native_type='unsigned long long')]],
'MemCommit' : [ 0x0, ['BitField', dict(start_bit = 55, end_bit = 56, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 56, end_bit = 61, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 61, end_bit = 63, native_type='unsigned long long')]],
'PrivateMemory' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_PS_IMPERSONATION_INFORMATION' : [ 0x10, {
'Token' : [ 0x0, ['pointer64', ['void']]],
'CopyOnOpen' : [ 0x8, ['unsigned char']],
'EffectiveOnly' : [ 0x9, ['unsigned char']],
'ImpersonationLevel' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
} ],
'__unnamed_1472' : [ 0x8, {
'LegacyDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'PendingDeviceRelations' : [ 0x0, ['pointer64', ['_DEVICE_RELATIONS']]],
} ],
'__unnamed_1474' : [ 0x8, {
'NextResourceDeviceNode' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
} ],
'__unnamed_1478' : [ 0x20, {
'DockStatus' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DOCK_NOTDOCKDEVICE', 1: 'DOCK_QUIESCENT', 2: 'DOCK_ARRIVING', 3: 'DOCK_DEPARTING', 4: 'DOCK_EJECTIRP_COMPLETED'})]],
'ListEntry' : [ 0x8, ['_LIST_ENTRY']],
'SerialNumber' : [ 0x18, ['pointer64', ['unsigned short']]],
} ],
'_DEVICE_NODE' : [ 0x1c0, {
'Sibling' : [ 0x0, ['pointer64', ['_DEVICE_NODE']]],
'Child' : [ 0x8, ['pointer64', ['_DEVICE_NODE']]],
'Parent' : [ 0x10, ['pointer64', ['_DEVICE_NODE']]],
'LastChild' : [ 0x18, ['pointer64', ['_DEVICE_NODE']]],
'Level' : [ 0x20, ['unsigned long']],
'Notify' : [ 0x28, ['pointer64', ['_PO_DEVICE_NOTIFY']]],
'State' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'PreviousState' : [ 0x34, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]],
'StateHistory' : [ 0x38, ['array', -80, ['Enumeration', dict(target = 'long', choices = {768: 'DeviceNodeUnspecified', 769: 'DeviceNodeUninitialized', 770: 'DeviceNodeInitialized', 771: 'DeviceNodeDriversAdded', 772: 'DeviceNodeResourcesAssigned', 773: 'DeviceNodeStartPending', 774: 'DeviceNodeStartCompletion', 775: 'DeviceNodeStartPostWork', 776: 'DeviceNodeStarted', 777: 'DeviceNodeQueryStopped', 778: 'DeviceNodeStopped', 779: 'DeviceNodeRestartCompletion', 780: 'DeviceNodeEnumeratePending', 781: 'DeviceNodeEnumerateCompletion', 782: 'DeviceNodeAwaitingQueuedDeletion', 783: 'DeviceNodeAwaitingQueuedRemoval', 784: 'DeviceNodeQueryRemoved', 785: 'DeviceNodeRemovePendingCloses', 786: 'DeviceNodeRemoved', 787: 'DeviceNodeDeletePendingCloses', 788: 'DeviceNodeDeleted', 789: 'MaxDeviceNodeState'})]]],
'StateHistoryEntry' : [ 0x88, ['unsigned long']],
'CompletionStatus' : [ 0x8c, ['long']],
'PendingIrp' : [ 0x90, ['pointer64', ['_IRP']]],
'Flags' : [ 0x98, ['unsigned long']],
'UserFlags' : [ 0x9c, ['unsigned long']],
'Problem' : [ 0xa0, ['unsigned long']],
'PhysicalDeviceObject' : [ 0xa8, ['pointer64', ['_DEVICE_OBJECT']]],
'ResourceList' : [ 0xb0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'ResourceListTranslated' : [ 0xb8, ['pointer64', ['_CM_RESOURCE_LIST']]],
'InstancePath' : [ 0xc0, ['_UNICODE_STRING']],
'ServiceName' : [ 0xd0, ['_UNICODE_STRING']],
'DuplicatePDO' : [ 0xe0, ['pointer64', ['_DEVICE_OBJECT']]],
'ResourceRequirements' : [ 0xe8, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
'InterfaceType' : [ 0xf0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0xf4, ['unsigned long']],
'ChildInterfaceType' : [ 0xf8, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ChildBusNumber' : [ 0xfc, ['unsigned long']],
'ChildBusTypeIndex' : [ 0x100, ['unsigned short']],
'RemovalPolicy' : [ 0x102, ['unsigned char']],
'HardwareRemovalPolicy' : [ 0x103, ['unsigned char']],
'TargetDeviceNotify' : [ 0x108, ['_LIST_ENTRY']],
'DeviceArbiterList' : [ 0x118, ['_LIST_ENTRY']],
'DeviceTranslatorList' : [ 0x128, ['_LIST_ENTRY']],
'NoTranslatorMask' : [ 0x138, ['unsigned short']],
'QueryTranslatorMask' : [ 0x13a, ['unsigned short']],
'NoArbiterMask' : [ 0x13c, ['unsigned short']],
'QueryArbiterMask' : [ 0x13e, ['unsigned short']],
'OverUsed1' : [ 0x140, ['__unnamed_1472']],
'OverUsed2' : [ 0x148, ['__unnamed_1474']],
'BootResources' : [ 0x150, ['pointer64', ['_CM_RESOURCE_LIST']]],
'CapabilityFlags' : [ 0x158, ['unsigned long']],
'DockInfo' : [ 0x160, ['__unnamed_1478']],
'DisableableDepends' : [ 0x180, ['unsigned long']],
'PendedSetInterfaceState' : [ 0x188, ['_LIST_ENTRY']],
'LegacyBusListEntry' : [ 0x198, ['_LIST_ENTRY']],
'DriverUnloadRetryCount' : [ 0x1a8, ['unsigned long']],
'PreviousParent' : [ 0x1b0, ['pointer64', ['_DEVICE_NODE']]],
'DeletedChildren' : [ 0x1b8, ['unsigned long']],
} ],
'__unnamed_147d' : [ 0x68, {
'CriticalSection' : [ 0x0, ['_RTL_CRITICAL_SECTION']],
'Resource' : [ 0x0, ['_ERESOURCE']],
} ],
'_HEAP_LOCK' : [ 0x68, {
'Lock' : [ 0x0, ['__unnamed_147d']],
} ],
'_PEB64' : [ 0x358, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x8, ['unsigned long long']],
'ImageBaseAddress' : [ 0x10, ['unsigned long long']],
'Ldr' : [ 0x18, ['unsigned long long']],
'ProcessParameters' : [ 0x20, ['unsigned long long']],
'SubSystemData' : [ 0x28, ['unsigned long long']],
'ProcessHeap' : [ 0x30, ['unsigned long long']],
'FastPebLock' : [ 0x38, ['unsigned long long']],
'AtlThunkSListPtr' : [ 0x40, ['unsigned long long']],
'SparePtr2' : [ 0x48, ['unsigned long long']],
'EnvironmentUpdateCount' : [ 0x50, ['unsigned long']],
'KernelCallbackTable' : [ 0x58, ['unsigned long long']],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'SpareUlong' : [ 0x64, ['unsigned long']],
'FreeList' : [ 0x68, ['unsigned long long']],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'TlsBitmap' : [ 0x78, ['unsigned long long']],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['unsigned long long']],
'ReadOnlySharedMemoryHeap' : [ 0x90, ['unsigned long long']],
'ReadOnlyStaticServerData' : [ 0x98, ['unsigned long long']],
'AnsiCodePageData' : [ 0xa0, ['unsigned long long']],
'OemCodePageData' : [ 0xa8, ['unsigned long long']],
'UnicodeCaseTableData' : [ 0xb0, ['unsigned long long']],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['unsigned long long']],
'GdiSharedHandleTable' : [ 0xf8, ['unsigned long long']],
'ProcessStarterHelper' : [ 0x100, ['unsigned long long']],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'LoaderLock' : [ 0x110, ['unsigned long long']],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'ImageProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['unsigned long long']],
'TlsExpansionBitmap' : [ 0x238, ['unsigned long long']],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['unsigned long long']],
'AppCompatInfo' : [ 0x2e0, ['unsigned long long']],
'CSDVersion' : [ 0x2e8, ['_STRING64']],
'ActivationContextData' : [ 0x2f8, ['unsigned long long']],
'ProcessAssemblyStorageMap' : [ 0x300, ['unsigned long long']],
'SystemDefaultActivationContextData' : [ 0x308, ['unsigned long long']],
'SystemAssemblyStorageMap' : [ 0x310, ['unsigned long long']],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['unsigned long long']],
'FlsListHead' : [ 0x328, ['LIST_ENTRY64']],
'FlsBitmap' : [ 0x338, ['unsigned long long']],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
} ],
'_KIDTENTRY64' : [ 0x10, {
'OffsetLow' : [ 0x0, ['unsigned short']],
'Selector' : [ 0x2, ['unsigned short']],
'IstIndex' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned short')]],
'Reserved0' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned short')]],
'Type' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned short')]],
'Dpl' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned short')]],
'Present' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned short')]],
'OffsetMiddle' : [ 0x6, ['unsigned short']],
'OffsetHigh' : [ 0x8, ['unsigned long']],
'Reserved1' : [ 0xc, ['unsigned long']],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_KPCR' : [ 0x2600, {
'NtTib' : [ 0x0, ['_NT_TIB']],
'GdtBase' : [ 0x0, ['pointer64', ['_KGDTENTRY64']]],
'TssBase' : [ 0x8, ['pointer64', ['_KTSS64']]],
'PerfGlobalGroupMask' : [ 0x10, ['pointer64', ['void']]],
'Self' : [ 0x18, ['pointer64', ['_KPCR']]],
'CurrentPrcb' : [ 0x20, ['pointer64', ['_KPRCB']]],
'LockArray' : [ 0x28, ['pointer64', ['_KSPIN_LOCK_QUEUE']]],
'Used_Self' : [ 0x30, ['pointer64', ['void']]],
'IdtBase' : [ 0x38, ['pointer64', ['_KIDTENTRY64']]],
'Unused' : [ 0x40, ['array', 2, ['unsigned long long']]],
'Irql' : [ 0x50, ['unsigned char']],
'SecondLevelCacheAssociativity' : [ 0x51, ['unsigned char']],
'ObsoleteNumber' : [ 0x52, ['unsigned char']],
'Fill0' : [ 0x53, ['unsigned char']],
'Unused0' : [ 0x54, ['array', 3, ['unsigned long']]],
'MajorVersion' : [ 0x60, ['unsigned short']],
'MinorVersion' : [ 0x62, ['unsigned short']],
'StallScaleFactor' : [ 0x64, ['unsigned long']],
'Unused1' : [ 0x68, ['array', 3, ['pointer64', ['void']]]],
'KernelReserved' : [ 0x80, ['array', 15, ['unsigned long']]],
'SecondLevelCacheSize' : [ 0xbc, ['unsigned long']],
'HalReserved' : [ 0xc0, ['array', 16, ['unsigned long']]],
'Unused2' : [ 0x100, ['unsigned long']],
'KdVersionBlock' : [ 0x108, ['pointer64', ['void']]],
'Unused3' : [ 0x110, ['pointer64', ['void']]],
'PcrAlign1' : [ 0x118, ['array', 24, ['unsigned long']]],
'Prcb' : [ 0x180, ['_KPRCB']],
} ],
'_MMCOLOR_TABLES' : [ 0x18, {
'Flink' : [ 0x0, ['unsigned long long']],
'Blink' : [ 0x8, ['pointer64', ['void']]],
'Count' : [ 0x10, ['unsigned long long']],
} ],
'_ACL' : [ 0x8, {
'AclRevision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'AclSize' : [ 0x2, ['unsigned short']],
'AceCount' : [ 0x4, ['unsigned short']],
'Sbz2' : [ 0x6, ['unsigned short']],
} ],
'_DBGKD_FILL_MEMORY' : [ 0x10, {
'Address' : [ 0x0, ['unsigned long long']],
'Length' : [ 0x8, ['unsigned long']],
'Flags' : [ 0xc, ['unsigned short']],
'PatternLength' : [ 0xe, ['unsigned short']],
} ],
'_PP_LOOKASIDE_LIST' : [ 0x10, {
'P' : [ 0x0, ['pointer64', ['_GENERAL_LOOKASIDE']]],
'L' : [ 0x8, ['pointer64', ['_GENERAL_LOOKASIDE']]],
} ],
'_PHYSICAL_MEMORY_RUN' : [ 0x10, {
'BasePage' : [ 0x0, ['unsigned long long']],
'PageCount' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_14ad' : [ 0x4, {
'LongFlags' : [ 0x0, ['unsigned long']],
'Flags' : [ 0x0, ['_MM_SESSION_SPACE_FLAGS']],
} ],
'_MM_SESSION_SPACE' : [ 0x1d80, {
'GlobalVirtualAddress' : [ 0x0, ['pointer64', ['_MM_SESSION_SPACE']]],
'ReferenceCount' : [ 0x8, ['long']],
'u' : [ 0xc, ['__unnamed_14ad']],
'SessionId' : [ 0x10, ['unsigned long']],
'ProcessList' : [ 0x18, ['_LIST_ENTRY']],
'LastProcessSwappedOutTime' : [ 0x28, ['_LARGE_INTEGER']],
'SessionPageDirectoryIndex' : [ 0x30, ['unsigned long long']],
'NonPagablePages' : [ 0x38, ['unsigned long long']],
'CommittedPages' : [ 0x40, ['unsigned long long']],
'PagedPoolStart' : [ 0x48, ['pointer64', ['void']]],
'PagedPoolEnd' : [ 0x50, ['pointer64', ['void']]],
'PagedPoolBasePde' : [ 0x58, ['pointer64', ['_MMPTE']]],
'Color' : [ 0x60, ['unsigned long']],
'ResidentProcessCount' : [ 0x64, ['long']],
'SessionPoolAllocationFailures' : [ 0x68, ['array', 4, ['unsigned long']]],
'ImageList' : [ 0x78, ['_LIST_ENTRY']],
'LocaleId' : [ 0x88, ['unsigned long']],
'AttachCount' : [ 0x8c, ['unsigned long']],
'AttachEvent' : [ 0x90, ['_KEVENT']],
'LastProcess' : [ 0xa8, ['pointer64', ['_EPROCESS']]],
'ProcessReferenceToSession' : [ 0xb0, ['long']],
'WsListEntry' : [ 0xb8, ['_LIST_ENTRY']],
'Lookaside' : [ 0x100, ['array', 21, ['_GENERAL_LOOKASIDE']]],
'Session' : [ 0xb80, ['_MMSESSION']],
'PagedPoolMutex' : [ 0xbe8, ['_KGUARDED_MUTEX']],
'PagedPoolInfo' : [ 0xc20, ['_MM_PAGED_POOL_INFO']],
'Vm' : [ 0xc60, ['_MMSUPPORT']],
'Wsle' : [ 0xcb8, ['pointer64', ['_MMWSLE']]],
'Win32KDriverUnload' : [ 0xcc0, ['pointer64', ['void']]],
'PagedPool' : [ 0xcc8, ['_POOL_DESCRIPTOR']],
'PageDirectory' : [ 0x1d10, ['_MMPTE']],
'SpecialPoolFirstPte' : [ 0x1d18, ['pointer64', ['_MMPTE']]],
'SpecialPoolLastPte' : [ 0x1d20, ['pointer64', ['_MMPTE']]],
'NextPdeForSpecialPoolExpansion' : [ 0x1d28, ['pointer64', ['_MMPTE']]],
'LastPdeForSpecialPoolExpansion' : [ 0x1d30, ['pointer64', ['_MMPTE']]],
'SpecialPagesInUse' : [ 0x1d38, ['unsigned long long']],
'ImageLoadingCount' : [ 0x1d40, ['long']],
} ],
'_PEB' : [ 0x358, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x8, ['pointer64', ['void']]],
'ImageBaseAddress' : [ 0x10, ['pointer64', ['void']]],
'Ldr' : [ 0x18, ['pointer64', ['_PEB_LDR_DATA']]],
'ProcessParameters' : [ 0x20, ['pointer64', ['_RTL_USER_PROCESS_PARAMETERS']]],
'SubSystemData' : [ 0x28, ['pointer64', ['void']]],
'ProcessHeap' : [ 0x30, ['pointer64', ['void']]],
'FastPebLock' : [ 0x38, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'AtlThunkSListPtr' : [ 0x40, ['pointer64', ['void']]],
'SparePtr2' : [ 0x48, ['pointer64', ['void']]],
'EnvironmentUpdateCount' : [ 0x50, ['unsigned long']],
'KernelCallbackTable' : [ 0x58, ['pointer64', ['void']]],
'SystemReserved' : [ 0x60, ['array', 1, ['unsigned long']]],
'SpareUlong' : [ 0x64, ['unsigned long']],
'FreeList' : [ 0x68, ['pointer64', ['_PEB_FREE_BLOCK']]],
'TlsExpansionCounter' : [ 0x70, ['unsigned long']],
'TlsBitmap' : [ 0x78, ['pointer64', ['void']]],
'TlsBitmapBits' : [ 0x80, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x88, ['pointer64', ['void']]],
'ReadOnlySharedMemoryHeap' : [ 0x90, ['pointer64', ['void']]],
'ReadOnlyStaticServerData' : [ 0x98, ['pointer64', ['pointer64', ['void']]]],
'AnsiCodePageData' : [ 0xa0, ['pointer64', ['void']]],
'OemCodePageData' : [ 0xa8, ['pointer64', ['void']]],
'UnicodeCaseTableData' : [ 0xb0, ['pointer64', ['void']]],
'NumberOfProcessors' : [ 0xb8, ['unsigned long']],
'NtGlobalFlag' : [ 0xbc, ['unsigned long']],
'CriticalSectionTimeout' : [ 0xc0, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0xc8, ['unsigned long long']],
'HeapSegmentCommit' : [ 0xd0, ['unsigned long long']],
'HeapDeCommitTotalFreeThreshold' : [ 0xd8, ['unsigned long long']],
'HeapDeCommitFreeBlockThreshold' : [ 0xe0, ['unsigned long long']],
'NumberOfHeaps' : [ 0xe8, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0xec, ['unsigned long']],
'ProcessHeaps' : [ 0xf0, ['pointer64', ['pointer64', ['void']]]],
'GdiSharedHandleTable' : [ 0xf8, ['pointer64', ['void']]],
'ProcessStarterHelper' : [ 0x100, ['pointer64', ['void']]],
'GdiDCAttributeList' : [ 0x108, ['unsigned long']],
'LoaderLock' : [ 0x110, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'OSMajorVersion' : [ 0x118, ['unsigned long']],
'OSMinorVersion' : [ 0x11c, ['unsigned long']],
'OSBuildNumber' : [ 0x120, ['unsigned short']],
'OSCSDVersion' : [ 0x122, ['unsigned short']],
'OSPlatformId' : [ 0x124, ['unsigned long']],
'ImageSubsystem' : [ 0x128, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0x12c, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0x130, ['unsigned long']],
'ImageProcessAffinityMask' : [ 0x138, ['unsigned long long']],
'GdiHandleBuffer' : [ 0x140, ['array', 60, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x230, ['pointer64', ['void']]],
'TlsExpansionBitmap' : [ 0x238, ['pointer64', ['void']]],
'TlsExpansionBitmapBits' : [ 0x240, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x2c0, ['unsigned long']],
'AppCompatFlags' : [ 0x2c8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x2d0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x2d8, ['pointer64', ['void']]],
'AppCompatInfo' : [ 0x2e0, ['pointer64', ['void']]],
'CSDVersion' : [ 0x2e8, ['_UNICODE_STRING']],
'ActivationContextData' : [ 0x2f8, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'ProcessAssemblyStorageMap' : [ 0x300, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'SystemDefaultActivationContextData' : [ 0x308, ['pointer64', ['_ACTIVATION_CONTEXT_DATA']]],
'SystemAssemblyStorageMap' : [ 0x310, ['pointer64', ['_ASSEMBLY_STORAGE_MAP']]],
'MinimumStackCommit' : [ 0x318, ['unsigned long long']],
'FlsCallback' : [ 0x320, ['pointer64', ['pointer64', ['void']]]],
'FlsListHead' : [ 0x328, ['_LIST_ENTRY']],
'FlsBitmap' : [ 0x338, ['pointer64', ['void']]],
'FlsBitmapBits' : [ 0x340, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x350, ['unsigned long']],
} ],
'_HEAP_FREE_ENTRY' : [ 0x20, {
'PreviousBlockPrivateData' : [ 0x0, ['pointer64', ['void']]],
'Size' : [ 0x8, ['unsigned short']],
'PreviousSize' : [ 0xa, ['unsigned short']],
'SmallTagIndex' : [ 0xc, ['unsigned char']],
'Flags' : [ 0xd, ['unsigned char']],
'UnusedBytes' : [ 0xe, ['unsigned char']],
'SegmentIndex' : [ 0xf, ['unsigned char']],
'CompactHeader' : [ 0x8, ['unsigned long long']],
'FreeList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_DBGKD_GET_CONTEXT' : [ 0x4, {
'Unused' : [ 0x0, ['unsigned long']],
} ],
'_TEB_ACTIVE_FRAME_CONTEXT' : [ 0x10, {
'Flags' : [ 0x0, ['unsigned long']],
'FrameName' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_MMPTE_SOFTWARE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'PageFileLow' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'UsedPageTableEntries' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 22, native_type='unsigned long long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long long')]],
'PageFileHigh' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_IO_RESOURCE_REQUIREMENTS_LIST' : [ 0x48, {
'ListSize' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x8, ['unsigned long']],
'SlotNumber' : [ 0xc, ['unsigned long']],
'Reserved' : [ 0x10, ['array', 3, ['unsigned long']]],
'AlternativeLists' : [ 0x1c, ['unsigned long']],
'List' : [ 0x20, ['array', 1, ['_IO_RESOURCE_LIST']]],
} ],
'__unnamed_14dd' : [ 0x10, {
'IoStatus' : [ 0x0, ['_IO_STATUS_BLOCK']],
'LastByte' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'_MMMOD_WRITER_MDL_ENTRY' : [ 0xa8, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'WriteOffset' : [ 0x10, ['_LARGE_INTEGER']],
'u' : [ 0x18, ['__unnamed_14dd']],
'Irp' : [ 0x28, ['pointer64', ['_IRP']]],
'LastPageToWrite' : [ 0x30, ['unsigned long long']],
'PagingListHead' : [ 0x38, ['pointer64', ['_MMMOD_WRITER_LISTHEAD']]],
'CurrentList' : [ 0x40, ['pointer64', ['_LIST_ENTRY']]],
'PagingFile' : [ 0x48, ['pointer64', ['_MMPAGING_FILE']]],
'File' : [ 0x50, ['pointer64', ['_FILE_OBJECT']]],
'ControlArea' : [ 0x58, ['pointer64', ['_CONTROL_AREA']]],
'FileResource' : [ 0x60, ['pointer64', ['_ERESOURCE']]],
'IssueTime' : [ 0x68, ['_LARGE_INTEGER']],
'Mdl' : [ 0x70, ['_MDL']],
'Page' : [ 0xa0, ['array', 1, ['unsigned long long']]],
} ],
'_CACHE_UNINITIALIZE_EVENT' : [ 0x20, {
'Next' : [ 0x0, ['pointer64', ['_CACHE_UNINITIALIZE_EVENT']]],
'Event' : [ 0x8, ['_KEVENT']],
} ],
'_SECURITY_TOKEN_AUDIT_DATA' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'GrantMask' : [ 0x4, ['unsigned long']],
'DenyMask' : [ 0x8, ['unsigned long']],
} ],
'_CM_RESOURCE_LIST' : [ 0x28, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['array', 1, ['_CM_FULL_RESOURCE_DESCRIPTOR']]],
} ],
'_TEB32' : [ 0xfbc, {
'NtTib' : [ 0x0, ['_NT_TIB32']],
'EnvironmentPointer' : [ 0x1c, ['unsigned long']],
'ClientId' : [ 0x20, ['_CLIENT_ID32']],
'ActiveRpcHandle' : [ 0x28, ['unsigned long']],
'ThreadLocalStoragePointer' : [ 0x2c, ['unsigned long']],
'ProcessEnvironmentBlock' : [ 0x30, ['unsigned long']],
'LastErrorValue' : [ 0x34, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x38, ['unsigned long']],
'CsrClientThread' : [ 0x3c, ['unsigned long']],
'Win32ThreadInfo' : [ 0x40, ['unsigned long']],
'User32Reserved' : [ 0x44, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xac, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0xc0, ['unsigned long']],
'CurrentLocale' : [ 0xc4, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0xc8, ['unsigned long']],
'SystemReserved1' : [ 0xcc, ['array', 54, ['unsigned long']]],
'ExceptionCode' : [ 0x1a4, ['long']],
'ActivationContextStackPointer' : [ 0x1a8, ['unsigned long']],
'SpareBytes1' : [ 0x1ac, ['array', 40, ['unsigned char']]],
'GdiTebBatch' : [ 0x1d4, ['_GDI_TEB_BATCH32']],
'RealClientId' : [ 0x6b4, ['_CLIENT_ID32']],
'GdiCachedProcessHandle' : [ 0x6bc, ['unsigned long']],
'GdiClientPID' : [ 0x6c0, ['unsigned long']],
'GdiClientTID' : [ 0x6c4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x6c8, ['unsigned long']],
'Win32ClientInfo' : [ 0x6cc, ['array', 62, ['unsigned long']]],
'glDispatchTable' : [ 0x7c4, ['array', 233, ['unsigned long']]],
'glReserved1' : [ 0xb68, ['array', 29, ['unsigned long']]],
'glReserved2' : [ 0xbdc, ['unsigned long']],
'glSectionInfo' : [ 0xbe0, ['unsigned long']],
'glSection' : [ 0xbe4, ['unsigned long']],
'glTable' : [ 0xbe8, ['unsigned long']],
'glCurrentRC' : [ 0xbec, ['unsigned long']],
'glContext' : [ 0xbf0, ['unsigned long']],
'LastStatusValue' : [ 0xbf4, ['unsigned long']],
'StaticUnicodeString' : [ 0xbf8, ['_STRING32']],
'StaticUnicodeBuffer' : [ 0xc00, ['array', 261, ['unsigned short']]],
'DeallocationStack' : [ 0xe0c, ['unsigned long']],
'TlsSlots' : [ 0xe10, ['array', 64, ['unsigned long']]],
'TlsLinks' : [ 0xf10, ['LIST_ENTRY32']],
'Vdm' : [ 0xf18, ['unsigned long']],
'ReservedForNtRpc' : [ 0xf1c, ['unsigned long']],
'DbgSsReserved' : [ 0xf20, ['array', 2, ['unsigned long']]],
'HardErrorMode' : [ 0xf28, ['unsigned long']],
'Instrumentation' : [ 0xf2c, ['array', 14, ['unsigned long']]],
'SubProcessTag' : [ 0xf64, ['unsigned long']],
'EtwTraceData' : [ 0xf68, ['unsigned long']],
'WinSockData' : [ 0xf6c, ['unsigned long']],
'GdiBatchCount' : [ 0xf70, ['unsigned long']],
'InDbgPrint' : [ 0xf74, ['unsigned char']],
'FreeStackOnTermination' : [ 0xf75, ['unsigned char']],
'HasFiberData' : [ 0xf76, ['unsigned char']],
'IdealProcessor' : [ 0xf77, ['unsigned char']],
'GuaranteedStackBytes' : [ 0xf78, ['unsigned long']],
'ReservedForPerf' : [ 0xf7c, ['unsigned long']],
'ReservedForOle' : [ 0xf80, ['unsigned long']],
'WaitingOnLoaderLock' : [ 0xf84, ['unsigned long']],
'SparePointer1' : [ 0xf88, ['unsigned long']],
'SoftPatchPtr1' : [ 0xf8c, ['unsigned long']],
'SoftPatchPtr2' : [ 0xf90, ['unsigned long']],
'TlsExpansionSlots' : [ 0xf94, ['unsigned long']],
'ImpersonationLocale' : [ 0xf98, ['unsigned long']],
'IsImpersonating' : [ 0xf9c, ['unsigned long']],
'NlsCache' : [ 0xfa0, ['unsigned long']],
'pShimData' : [ 0xfa4, ['unsigned long']],
'HeapVirtualAffinity' : [ 0xfa8, ['unsigned long']],
'CurrentTransactionHandle' : [ 0xfac, ['unsigned long']],
'ActiveFrame' : [ 0xfb0, ['unsigned long']],
'FlsData' : [ 0xfb4, ['unsigned long']],
'SafeThunkCall' : [ 0xfb8, ['unsigned char']],
'BooleanSpare' : [ 0xfb9, ['array', 3, ['unsigned char']]],
} ],
'_CM_FULL_RESOURCE_DESCRIPTOR' : [ 0x24, {
'InterfaceType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'BusNumber' : [ 0x4, ['unsigned long']],
'PartialResourceList' : [ 0x8, ['_CM_PARTIAL_RESOURCE_LIST']],
} ],
'_EPROCESS_QUOTA_ENTRY' : [ 0x20, {
'Usage' : [ 0x0, ['unsigned long long']],
'Limit' : [ 0x8, ['unsigned long long']],
'Peak' : [ 0x10, ['unsigned long long']],
'Return' : [ 0x18, ['unsigned long long']],
} ],
'__unnamed_1502' : [ 0x50, {
'CellData' : [ 0x0, ['_CELL_DATA']],
'List' : [ 0x0, ['array', 1, ['unsigned long long']]],
} ],
'_CM_CACHED_VALUE_INDEX' : [ 0x58, {
'CellIndex' : [ 0x0, ['unsigned long']],
'Data' : [ 0x8, ['__unnamed_1502']],
} ],
'_WMI_BUFFER_HEADER' : [ 0x48, {
'Wnode' : [ 0x0, ['_WNODE_HEADER']],
'Reserved1' : [ 0x0, ['unsigned long long']],
'Reserved2' : [ 0x8, ['unsigned long long']],
'Reserved3' : [ 0x10, ['_LARGE_INTEGER']],
'Alignment' : [ 0x18, ['pointer64', ['void']]],
'SlistEntry' : [ 0x20, ['_SINGLE_LIST_ENTRY']],
'Entry' : [ 0x18, ['_LIST_ENTRY']],
'ReferenceCount' : [ 0x0, ['long']],
'SavedOffset' : [ 0x4, ['unsigned long']],
'CurrentOffset' : [ 0x8, ['unsigned long']],
'UsePerfClock' : [ 0xc, ['unsigned long']],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'Guid' : [ 0x18, ['_GUID']],
'ClientContext' : [ 0x28, ['_WMI_CLIENT_CONTEXT']],
'State' : [ 0x2c, ['_WMI_BUFFER_STATE']],
'Flags' : [ 0x2c, ['unsigned long']],
'Offset' : [ 0x30, ['unsigned long']],
'BufferFlag' : [ 0x34, ['unsigned short']],
'BufferType' : [ 0x36, ['unsigned short']],
'InstanceGuid' : [ 0x38, ['_GUID']],
'LoggerContext' : [ 0x38, ['pointer64', ['void']]],
'GlobalEntry' : [ 0x40, ['_SINGLE_LIST_ENTRY']],
} ],
'_KSEMAPHORE' : [ 0x20, {
'Header' : [ 0x0, ['_DISPATCHER_HEADER']],
'Limit' : [ 0x18, ['long']],
} ],
'_PROCESSOR_POWER_STATE' : [ 0x170, {
'IdleFunction' : [ 0x0, ['pointer64', ['void']]],
'Idle0KernelTimeLimit' : [ 0x8, ['unsigned long']],
'Idle0LastTime' : [ 0xc, ['unsigned long']],
'IdleHandlers' : [ 0x10, ['pointer64', ['void']]],
'IdleState' : [ 0x18, ['pointer64', ['void']]],
'IdleHandlersCount' : [ 0x20, ['unsigned long']],
'LastCheck' : [ 0x28, ['unsigned long long']],
'IdleTimes' : [ 0x30, ['PROCESSOR_IDLE_TIMES']],
'IdleTime1' : [ 0x50, ['unsigned long']],
'PromotionCheck' : [ 0x54, ['unsigned long']],
'IdleTime2' : [ 0x58, ['unsigned long']],
'CurrentThrottle' : [ 0x5c, ['unsigned char']],
'ThermalThrottleLimit' : [ 0x5d, ['unsigned char']],
'CurrentThrottleIndex' : [ 0x5e, ['unsigned char']],
'ThermalThrottleIndex' : [ 0x5f, ['unsigned char']],
'LastKernelUserTime' : [ 0x60, ['unsigned long']],
'LastIdleThreadKernelTime' : [ 0x64, ['unsigned long']],
'PackageIdleStartTime' : [ 0x68, ['unsigned long']],
'PackageIdleTime' : [ 0x6c, ['unsigned long']],
'DebugCount' : [ 0x70, ['unsigned long']],
'LastSysTime' : [ 0x74, ['unsigned long']],
'TotalIdleStateTime' : [ 0x78, ['array', 3, ['unsigned long long']]],
'TotalIdleTransitions' : [ 0x90, ['array', 3, ['unsigned long']]],
'PreviousC3StateTime' : [ 0xa0, ['unsigned long long']],
'KneeThrottleIndex' : [ 0xa8, ['unsigned char']],
'ThrottleLimitIndex' : [ 0xa9, ['unsigned char']],
'PerfStatesCount' : [ 0xaa, ['unsigned char']],
'ProcessorMinThrottle' : [ 0xab, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0xac, ['unsigned char']],
'EnableIdleAccounting' : [ 0xad, ['unsigned char']],
'LastC3Percentage' : [ 0xae, ['unsigned char']],
'LastAdjustedBusyPercentage' : [ 0xaf, ['unsigned char']],
'PromotionCount' : [ 0xb0, ['unsigned long']],
'DemotionCount' : [ 0xb4, ['unsigned long']],
'ErrorCount' : [ 0xb8, ['unsigned long']],
'RetryCount' : [ 0xbc, ['unsigned long']],
'Flags' : [ 0xc0, ['unsigned long']],
'PerfCounterFrequency' : [ 0xc8, ['_LARGE_INTEGER']],
'PerfTickCount' : [ 0xd0, ['unsigned long']],
'PerfTimer' : [ 0xd8, ['_KTIMER']],
'PerfDpc' : [ 0x118, ['_KDPC']],
'PerfStates' : [ 0x158, ['pointer64', ['PROCESSOR_PERF_STATE']]],
'PerfSetThrottle' : [ 0x160, ['pointer64', ['void']]],
'LastC3KernelUserTime' : [ 0x168, ['unsigned long']],
'LastPackageIdleTime' : [ 0x16c, ['unsigned long']],
} ],
'_DBGKD_READ_WRITE_MSR' : [ 0xc, {
'Msr' : [ 0x0, ['unsigned long']],
'DataValueLow' : [ 0x4, ['unsigned long']],
'DataValueHigh' : [ 0x8, ['unsigned long']],
} ],
'_MMPFNENTRY' : [ 0x2, {
'Modified' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'ReadInProgress' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'WriteInProgress' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned short')]],
'PrototypePte' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned short')]],
'PageColor' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned short')]],
'PageLocation' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 11, native_type='unsigned short')]],
'RemovalRequested' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned short')]],
'CacheAttribute' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 14, native_type='unsigned short')]],
'Rom' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned short')]],
'ParityError' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned short')]],
} ],
'_IO_COUNTERS' : [ 0x30, {
'ReadOperationCount' : [ 0x0, ['unsigned long long']],
'WriteOperationCount' : [ 0x8, ['unsigned long long']],
'OtherOperationCount' : [ 0x10, ['unsigned long long']],
'ReadTransferCount' : [ 0x18, ['unsigned long long']],
'WriteTransferCount' : [ 0x20, ['unsigned long long']],
'OtherTransferCount' : [ 0x28, ['unsigned long long']],
} ],
'_TOKEN_SOURCE' : [ 0x10, {
'SourceName' : [ 0x0, ['array', 8, ['unsigned char']]],
'SourceIdentifier' : [ 0x8, ['_LUID']],
} ],
'_DBGKD_QUERY_MEMORY' : [ 0x18, {
'Address' : [ 0x0, ['unsigned long long']],
'Reserved' : [ 0x8, ['unsigned long long']],
'AddressSpace' : [ 0x10, ['unsigned long']],
'Flags' : [ 0x14, ['unsigned long']],
} ],
'_DEVICE_OBJECT_POWER_EXTENSION' : [ 0x80, {
'IdleCount' : [ 0x0, ['long']],
'ConservationIdleTime' : [ 0x4, ['unsigned long']],
'PerformanceIdleTime' : [ 0x8, ['unsigned long']],
'DeviceObject' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'IdleList' : [ 0x18, ['_LIST_ENTRY']],
'DeviceType' : [ 0x28, ['unsigned char']],
'State' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'NotifySourceList' : [ 0x30, ['_LIST_ENTRY']],
'NotifyTargetList' : [ 0x40, ['_LIST_ENTRY']],
'PowerChannelSummary' : [ 0x50, ['_POWER_CHANNEL_SUMMARY']],
'Volume' : [ 0x70, ['_LIST_ENTRY']],
} ],
'_MMSUPPORT_FLAGS' : [ 0x4, {
'SessionSpace' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'BeingTrimmed' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'SessionLeader' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'TrimHard' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'MaximumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ForceTrim' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'MinimumWorkingSetHard' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'Available0' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
'MemoryPriority' : [ 0x1, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned char')]],
'GrowWsleHash' : [ 0x2, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned short')]],
'AcquiredUnsafe' : [ 0x2, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned short')]],
'Available' : [ 0x2, ['BitField', dict(start_bit = 2, end_bit = 16, native_type='unsigned short')]],
} ],
'EX_QUEUE_WORKER_INFO' : [ 0x4, {
'QueueDisabled' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'MakeThreadsAsNecessary' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'WaitMode' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'WorkerCount' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'QueueWorkerInfo' : [ 0x0, ['long']],
} ],
'PROCESSOR_PERF_STATE' : [ 0x20, {
'PercentFrequency' : [ 0x0, ['unsigned char']],
'MinCapacity' : [ 0x1, ['unsigned char']],
'Power' : [ 0x2, ['unsigned short']],
'IncreaseLevel' : [ 0x4, ['unsigned char']],
'DecreaseLevel' : [ 0x5, ['unsigned char']],
'Flags' : [ 0x6, ['unsigned short']],
'IncreaseTime' : [ 0x8, ['unsigned long']],
'DecreaseTime' : [ 0xc, ['unsigned long']],
'IncreaseCount' : [ 0x10, ['unsigned long']],
'DecreaseCount' : [ 0x14, ['unsigned long']],
'PerformanceTime' : [ 0x18, ['unsigned long long']],
} ],
'PROCESSOR_IDLE_TIMES' : [ 0x20, {
'StartTime' : [ 0x0, ['unsigned long long']],
'EndTime' : [ 0x8, ['unsigned long long']],
'IdleHandlerReserved' : [ 0x10, ['array', 4, ['unsigned long']]],
} ],
'_TERMINATION_PORT' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_TERMINATION_PORT']]],
'Port' : [ 0x8, ['pointer64', ['void']]],
} ],
'_MMMOD_WRITER_LISTHEAD' : [ 0x28, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Event' : [ 0x10, ['_KEVENT']],
} ],
'_SYSTEM_POWER_POLICY' : [ 0xe8, {
'Revision' : [ 0x0, ['unsigned long']],
'PowerButton' : [ 0x4, ['POWER_ACTION_POLICY']],
'SleepButton' : [ 0x10, ['POWER_ACTION_POLICY']],
'LidClose' : [ 0x1c, ['POWER_ACTION_POLICY']],
'LidOpenWake' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Reserved' : [ 0x2c, ['unsigned long']],
'Idle' : [ 0x30, ['POWER_ACTION_POLICY']],
'IdleTimeout' : [ 0x3c, ['unsigned long']],
'IdleSensitivity' : [ 0x40, ['unsigned char']],
'DynamicThrottle' : [ 0x41, ['unsigned char']],
'Spare2' : [ 0x42, ['array', 2, ['unsigned char']]],
'MinSleep' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MaxSleep' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ReducedLatencySleep' : [ 0x4c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WinLogonFlags' : [ 0x50, ['unsigned long']],
'Spare3' : [ 0x54, ['unsigned long']],
'DozeS4Timeout' : [ 0x58, ['unsigned long']],
'BroadcastCapacityResolution' : [ 0x5c, ['unsigned long']],
'DischargePolicy' : [ 0x60, ['array', 4, ['SYSTEM_POWER_LEVEL']]],
'VideoTimeout' : [ 0xc0, ['unsigned long']],
'VideoDimDisplay' : [ 0xc4, ['unsigned char']],
'VideoReserved' : [ 0xc8, ['array', 3, ['unsigned long']]],
'SpindownTimeout' : [ 0xd4, ['unsigned long']],
'OptimizeForPower' : [ 0xd8, ['unsigned char']],
'FanThrottleTolerance' : [ 0xd9, ['unsigned char']],
'ForcedThrottle' : [ 0xda, ['unsigned char']],
'MinThrottle' : [ 0xdb, ['unsigned char']],
'OverThrottled' : [ 0xdc, ['POWER_ACTION_POLICY']],
} ],
'_GDI_TEB_BATCH' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'_POP_THERMAL_ZONE' : [ 0x120, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'State' : [ 0x10, ['unsigned char']],
'Flags' : [ 0x11, ['unsigned char']],
'Mode' : [ 0x12, ['unsigned char']],
'PendingMode' : [ 0x13, ['unsigned char']],
'ActivePoint' : [ 0x14, ['unsigned char']],
'PendingActivePoint' : [ 0x15, ['unsigned char']],
'Throttle' : [ 0x18, ['long']],
'LastTime' : [ 0x20, ['unsigned long long']],
'SampleRate' : [ 0x28, ['unsigned long']],
'LastTemp' : [ 0x2c, ['unsigned long']],
'PassiveTimer' : [ 0x30, ['_KTIMER']],
'PassiveDpc' : [ 0x70, ['_KDPC']],
'OverThrottled' : [ 0xb0, ['_POP_ACTION_TRIGGER']],
'Irp' : [ 0xc0, ['pointer64', ['_IRP']]],
'Info' : [ 0xc8, ['_THERMAL_INFORMATION']],
} ],
'_DBGKD_CONTINUE2' : [ 0x20, {
'ContinueStatus' : [ 0x0, ['long']],
'ControlSet' : [ 0x4, ['_AMD64_DBGKD_CONTROL_SET']],
'AnyControlSet' : [ 0x4, ['_DBGKD_ANY_CONTROL_SET']],
} ],
'_SECURITY_TOKEN_PROXY_DATA' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'ProxyClass' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'ProxyFull', 1: 'ProxyService', 2: 'ProxyTree', 3: 'ProxyDirectory'})]],
'PathInfo' : [ 0x8, ['_UNICODE_STRING']],
'ContainerMask' : [ 0x18, ['unsigned long']],
'ObjectMask' : [ 0x1c, ['unsigned long']],
} ],
'_PROCESSOR_POWER_POLICY' : [ 0x4c, {
'Revision' : [ 0x0, ['unsigned long']],
'DynamicThrottle' : [ 0x4, ['unsigned char']],
'Spare' : [ 0x5, ['array', 3, ['unsigned char']]],
'DisableCStates' : [ 0x8, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'Reserved' : [ 0x8, ['BitField', dict(start_bit = 1, end_bit = 32, native_type='unsigned long')]],
'PolicyCount' : [ 0xc, ['unsigned long']],
'Policy' : [ 0x10, ['array', 3, ['_PROCESSOR_POWER_POLICY_INFO']]],
} ],
'_OBJECT_HANDLE_COUNT_DATABASE' : [ 0x18, {
'CountEntries' : [ 0x0, ['unsigned long']],
'HandleCountEntries' : [ 0x8, ['array', 1, ['_OBJECT_HANDLE_COUNT_ENTRY']]],
} ],
'_IMAGE_DOS_HEADER' : [ 0x40, {
'e_magic' : [ 0x0, ['unsigned short']],
'e_cblp' : [ 0x2, ['unsigned short']],
'e_cp' : [ 0x4, ['unsigned short']],
'e_crlc' : [ 0x6, ['unsigned short']],
'e_cparhdr' : [ 0x8, ['unsigned short']],
'e_minalloc' : [ 0xa, ['unsigned short']],
'e_maxalloc' : [ 0xc, ['unsigned short']],
'e_ss' : [ 0xe, ['unsigned short']],
'e_sp' : [ 0x10, ['unsigned short']],
'e_csum' : [ 0x12, ['unsigned short']],
'e_ip' : [ 0x14, ['unsigned short']],
'e_cs' : [ 0x16, ['unsigned short']],
'e_lfarlc' : [ 0x18, ['unsigned short']],
'e_ovno' : [ 0x1a, ['unsigned short']],
'e_res' : [ 0x1c, ['array', 4, ['unsigned short']]],
'e_oemid' : [ 0x24, ['unsigned short']],
'e_oeminfo' : [ 0x26, ['unsigned short']],
'e_res2' : [ 0x28, ['array', 10, ['unsigned short']]],
'e_lfanew' : [ 0x3c, ['long']],
} ],
'_OWNER_ENTRY' : [ 0x10, {
'OwnerThread' : [ 0x0, ['unsigned long long']],
'OwnerCount' : [ 0x8, ['long']],
'TableSize' : [ 0x8, ['unsigned long']],
} ],
'_HEAP_VIRTUAL_ALLOC_ENTRY' : [ 0x40, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'ExtraStuff' : [ 0x10, ['_HEAP_ENTRY_EXTRA']],
'CommitSize' : [ 0x20, ['unsigned long long']],
'ReserveSize' : [ 0x28, ['unsigned long long']],
'BusyBlock' : [ 0x30, ['_HEAP_ENTRY']],
} ],
'_RTL_ATOM_TABLE' : [ 0x70, {
'Signature' : [ 0x0, ['unsigned long']],
'CriticalSection' : [ 0x8, ['_RTL_CRITICAL_SECTION']],
'RtlHandleTable' : [ 0x30, ['_RTL_HANDLE_TABLE']],
'NumberOfBuckets' : [ 0x60, ['unsigned long']],
'Buckets' : [ 0x68, ['array', 1, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]]],
} ],
'_TEB64' : [ 0x17d8, {
'NtTib' : [ 0x0, ['_NT_TIB64']],
'EnvironmentPointer' : [ 0x38, ['unsigned long long']],
'ClientId' : [ 0x40, ['_CLIENT_ID64']],
'ActiveRpcHandle' : [ 0x50, ['unsigned long long']],
'ThreadLocalStoragePointer' : [ 0x58, ['unsigned long long']],
'ProcessEnvironmentBlock' : [ 0x60, ['unsigned long long']],
'LastErrorValue' : [ 0x68, ['unsigned long']],
'CountOfOwnedCriticalSections' : [ 0x6c, ['unsigned long']],
'CsrClientThread' : [ 0x70, ['unsigned long long']],
'Win32ThreadInfo' : [ 0x78, ['unsigned long long']],
'User32Reserved' : [ 0x80, ['array', 26, ['unsigned long']]],
'UserReserved' : [ 0xe8, ['array', 5, ['unsigned long']]],
'WOW32Reserved' : [ 0x100, ['unsigned long long']],
'CurrentLocale' : [ 0x108, ['unsigned long']],
'FpSoftwareStatusRegister' : [ 0x10c, ['unsigned long']],
'SystemReserved1' : [ 0x110, ['array', 54, ['unsigned long long']]],
'ExceptionCode' : [ 0x2c0, ['long']],
'ActivationContextStackPointer' : [ 0x2c8, ['unsigned long long']],
'SpareBytes1' : [ 0x2d0, ['array', 28, ['unsigned char']]],
'GdiTebBatch' : [ 0x2f0, ['_GDI_TEB_BATCH64']],
'RealClientId' : [ 0x7d8, ['_CLIENT_ID64']],
'GdiCachedProcessHandle' : [ 0x7e8, ['unsigned long long']],
'GdiClientPID' : [ 0x7f0, ['unsigned long']],
'GdiClientTID' : [ 0x7f4, ['unsigned long']],
'GdiThreadLocalInfo' : [ 0x7f8, ['unsigned long long']],
'Win32ClientInfo' : [ 0x800, ['array', 62, ['unsigned long long']]],
'glDispatchTable' : [ 0x9f0, ['array', 233, ['unsigned long long']]],
'glReserved1' : [ 0x1138, ['array', 29, ['unsigned long long']]],
'glReserved2' : [ 0x1220, ['unsigned long long']],
'glSectionInfo' : [ 0x1228, ['unsigned long long']],
'glSection' : [ 0x1230, ['unsigned long long']],
'glTable' : [ 0x1238, ['unsigned long long']],
'glCurrentRC' : [ 0x1240, ['unsigned long long']],
'glContext' : [ 0x1248, ['unsigned long long']],
'LastStatusValue' : [ 0x1250, ['unsigned long']],
'StaticUnicodeString' : [ 0x1258, ['_STRING64']],
'StaticUnicodeBuffer' : [ 0x1268, ['array', 261, ['unsigned short']]],
'DeallocationStack' : [ 0x1478, ['unsigned long long']],
'TlsSlots' : [ 0x1480, ['array', 64, ['unsigned long long']]],
'TlsLinks' : [ 0x1680, ['LIST_ENTRY64']],
'Vdm' : [ 0x1690, ['unsigned long long']],
'ReservedForNtRpc' : [ 0x1698, ['unsigned long long']],
'DbgSsReserved' : [ 0x16a0, ['array', 2, ['unsigned long long']]],
'HardErrorMode' : [ 0x16b0, ['unsigned long']],
'Instrumentation' : [ 0x16b8, ['array', 14, ['unsigned long long']]],
'SubProcessTag' : [ 0x1728, ['unsigned long long']],
'EtwTraceData' : [ 0x1730, ['unsigned long long']],
'WinSockData' : [ 0x1738, ['unsigned long long']],
'GdiBatchCount' : [ 0x1740, ['unsigned long']],
'InDbgPrint' : [ 0x1744, ['unsigned char']],
'FreeStackOnTermination' : [ 0x1745, ['unsigned char']],
'HasFiberData' : [ 0x1746, ['unsigned char']],
'IdealProcessor' : [ 0x1747, ['unsigned char']],
'GuaranteedStackBytes' : [ 0x1748, ['unsigned long']],
'ReservedForPerf' : [ 0x1750, ['unsigned long long']],
'ReservedForOle' : [ 0x1758, ['unsigned long long']],
'WaitingOnLoaderLock' : [ 0x1760, ['unsigned long']],
'SparePointer1' : [ 0x1768, ['unsigned long long']],
'SoftPatchPtr1' : [ 0x1770, ['unsigned long long']],
'SoftPatchPtr2' : [ 0x1778, ['unsigned long long']],
'TlsExpansionSlots' : [ 0x1780, ['unsigned long long']],
'DeallocationBStore' : [ 0x1788, ['unsigned long long']],
'BStoreLimit' : [ 0x1790, ['unsigned long long']],
'ImpersonationLocale' : [ 0x1798, ['unsigned long']],
'IsImpersonating' : [ 0x179c, ['unsigned long']],
'NlsCache' : [ 0x17a0, ['unsigned long long']],
'pShimData' : [ 0x17a8, ['unsigned long long']],
'HeapVirtualAffinity' : [ 0x17b0, ['unsigned long']],
'CurrentTransactionHandle' : [ 0x17b8, ['unsigned long long']],
'ActiveFrame' : [ 0x17c0, ['unsigned long long']],
'FlsData' : [ 0x17c8, ['unsigned long long']],
'SafeThunkCall' : [ 0x17d0, ['unsigned char']],
'BooleanSpare' : [ 0x17d1, ['array', 3, ['unsigned char']]],
} ],
'_IMAGE_ROM_OPTIONAL_HEADER' : [ 0x38, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'BaseOfData' : [ 0x18, ['unsigned long']],
'BaseOfBss' : [ 0x1c, ['unsigned long']],
'GprMask' : [ 0x20, ['unsigned long']],
'CprMask' : [ 0x24, ['array', 4, ['unsigned long']]],
'GpValue' : [ 0x34, ['unsigned long']],
} ],
'_iobuf' : [ 0x30, {
'_ptr' : [ 0x0, ['pointer64', ['unsigned char']]],
'_cnt' : [ 0x8, ['long']],
'_base' : [ 0x10, ['pointer64', ['unsigned char']]],
'_flag' : [ 0x18, ['long']],
'_file' : [ 0x1c, ['long']],
'_charbuf' : [ 0x20, ['long']],
'_bufsiz' : [ 0x24, ['long']],
'_tmpfname' : [ 0x28, ['pointer64', ['unsigned char']]],
} ],
'_MMPTE_LIST' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'OneEntry' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'filler0' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'filler1' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 32, native_type='unsigned long long')]],
'NextEntry' : [ 0x0, ['BitField', dict(start_bit = 32, end_bit = 64, native_type='unsigned long long')]],
} ],
'_CMHIVE' : [ 0xab8, {
'Hive' : [ 0x0, ['_HHIVE']],
'FileHandles' : [ 0x578, ['array', 3, ['pointer64', ['void']]]],
'NotifyList' : [ 0x590, ['_LIST_ENTRY']],
'HiveList' : [ 0x5a0, ['_LIST_ENTRY']],
'HiveLock' : [ 0x5b0, ['_EX_PUSH_LOCK']],
'ViewLock' : [ 0x5b8, ['pointer64', ['_KGUARDED_MUTEX']]],
'WriterLock' : [ 0x5c0, ['_EX_PUSH_LOCK']],
'FlusherLock' : [ 0x5c8, ['_EX_PUSH_LOCK']],
'SecurityLock' : [ 0x5d0, ['_EX_PUSH_LOCK']],
'LRUViewListHead' : [ 0x5d8, ['_LIST_ENTRY']],
'PinViewListHead' : [ 0x5e8, ['_LIST_ENTRY']],
'FileObject' : [ 0x5f8, ['pointer64', ['_FILE_OBJECT']]],
'FileFullPath' : [ 0x600, ['_UNICODE_STRING']],
'FileUserName' : [ 0x610, ['_UNICODE_STRING']],
'MappedViews' : [ 0x620, ['unsigned short']],
'PinnedViews' : [ 0x622, ['unsigned short']],
'UseCount' : [ 0x624, ['unsigned long']],
'SecurityCount' : [ 0x628, ['unsigned long']],
'SecurityCacheSize' : [ 0x62c, ['unsigned long']],
'SecurityHitHint' : [ 0x630, ['long']],
'SecurityCache' : [ 0x638, ['pointer64', ['_CM_KEY_SECURITY_CACHE_ENTRY']]],
'SecurityHash' : [ 0x640, ['array', 64, ['_LIST_ENTRY']]],
'UnloadEvent' : [ 0xa40, ['pointer64', ['_KEVENT']]],
'RootKcb' : [ 0xa48, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'Frozen' : [ 0xa50, ['unsigned char']],
'UnloadWorkItem' : [ 0xa58, ['pointer64', ['_WORK_QUEUE_ITEM']]],
'GrowOnlyMode' : [ 0xa60, ['unsigned char']],
'GrowOffset' : [ 0xa64, ['unsigned long']],
'KcbConvertListHead' : [ 0xa68, ['_LIST_ENTRY']],
'KnodeConvertListHead' : [ 0xa78, ['_LIST_ENTRY']],
'CellRemapArray' : [ 0xa88, ['pointer64', ['_CM_CELL_REMAP_BLOCK']]],
'Flags' : [ 0xa90, ['unsigned long']],
'TrustClassEntry' : [ 0xa98, ['_LIST_ENTRY']],
'FlushCount' : [ 0xaa8, ['unsigned long']],
'CreatorOwner' : [ 0xab0, ['pointer64', ['_KTHREAD']]],
} ],
'_HANDLE_TRACE_DEBUG_INFO' : [ 0xf0, {
'RefCount' : [ 0x0, ['long']],
'TableSize' : [ 0x4, ['unsigned long']],
'BitMaskFlags' : [ 0x8, ['unsigned long']],
'CloseCompactionLock' : [ 0x10, ['_FAST_MUTEX']],
'CurrentStackIndex' : [ 0x48, ['unsigned long']],
'TraceDb' : [ 0x50, ['array', 1, ['_HANDLE_TRACE_DB_ENTRY']]],
} ],
'_MDL' : [ 0x30, {
'Next' : [ 0x0, ['pointer64', ['_MDL']]],
'Size' : [ 0x8, ['short']],
'MdlFlags' : [ 0xa, ['short']],
'Process' : [ 0x10, ['pointer64', ['_EPROCESS']]],
'MappedSystemVa' : [ 0x18, ['pointer64', ['void']]],
'StartVa' : [ 0x20, ['pointer64', ['void']]],
'ByteCount' : [ 0x28, ['unsigned long']],
'ByteOffset' : [ 0x2c, ['unsigned long']],
} ],
'_HHIVE' : [ 0x578, {
'Signature' : [ 0x0, ['unsigned long']],
'GetCellRoutine' : [ 0x8, ['pointer64', ['void']]],
'ReleaseCellRoutine' : [ 0x10, ['pointer64', ['void']]],
'Allocate' : [ 0x18, ['pointer64', ['void']]],
'Free' : [ 0x20, ['pointer64', ['void']]],
'FileSetSize' : [ 0x28, ['pointer64', ['void']]],
'FileWrite' : [ 0x30, ['pointer64', ['void']]],
'FileRead' : [ 0x38, ['pointer64', ['void']]],
'FileFlush' : [ 0x40, ['pointer64', ['void']]],
'BaseBlock' : [ 0x48, ['pointer64', ['_HBASE_BLOCK']]],
'DirtyVector' : [ 0x50, ['_RTL_BITMAP']],
'DirtyCount' : [ 0x60, ['unsigned long']],
'DirtyAlloc' : [ 0x64, ['unsigned long']],
'BaseBlockAlloc' : [ 0x68, ['unsigned long']],
'Cluster' : [ 0x6c, ['unsigned long']],
'Flat' : [ 0x70, ['unsigned char']],
'ReadOnly' : [ 0x71, ['unsigned char']],
'Log' : [ 0x72, ['unsigned char']],
'DirtyFlag' : [ 0x73, ['unsigned char']],
'HiveFlags' : [ 0x74, ['unsigned long']],
'LogSize' : [ 0x78, ['unsigned long']],
'RefreshCount' : [ 0x7c, ['unsigned long']],
'StorageTypeCount' : [ 0x80, ['unsigned long']],
'Version' : [ 0x84, ['unsigned long']],
'Storage' : [ 0x88, ['array', 2, ['_DUAL']]],
} ],
'_PAGEFAULT_HISTORY' : [ 0x28, {
'CurrentIndex' : [ 0x0, ['unsigned long']],
'MaxIndex' : [ 0x4, ['unsigned long']],
'SpinLock' : [ 0x8, ['unsigned long long']],
'Reserved' : [ 0x10, ['pointer64', ['void']]],
'WatchInfo' : [ 0x18, ['array', 1, ['_PROCESS_WS_WATCH_INFORMATION']]],
} ],
'_RTL_ATOM_TABLE_ENTRY' : [ 0x18, {
'HashLink' : [ 0x0, ['pointer64', ['_RTL_ATOM_TABLE_ENTRY']]],
'HandleIndex' : [ 0x8, ['unsigned short']],
'Atom' : [ 0xa, ['unsigned short']],
'ReferenceCount' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['unsigned char']],
'NameLength' : [ 0xf, ['unsigned char']],
'Name' : [ 0x10, ['array', 1, ['unsigned short']]],
} ],
'_MM_SESSION_SPACE_FLAGS' : [ 0x4, {
'Initialized' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeletePending' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Filler' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_CM_PARTIAL_RESOURCE_LIST' : [ 0x1c, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'PartialDescriptors' : [ 0x8, ['array', 1, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_OBJECT_CREATE_INFORMATION' : [ 0x48, {
'Attributes' : [ 0x0, ['unsigned long']],
'RootDirectory' : [ 0x8, ['pointer64', ['void']]],
'ParseContext' : [ 0x10, ['pointer64', ['void']]],
'ProbeMode' : [ 0x18, ['unsigned char']],
'PagedPoolCharge' : [ 0x1c, ['unsigned long']],
'NonPagedPoolCharge' : [ 0x20, ['unsigned long']],
'SecurityDescriptorCharge' : [ 0x24, ['unsigned long']],
'SecurityDescriptor' : [ 0x28, ['pointer64', ['void']]],
'SecurityQos' : [ 0x30, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'SecurityQualityOfService' : [ 0x38, ['_SECURITY_QUALITY_OF_SERVICE']],
} ],
'_WMI_BUFFER_STATE' : [ 0x4, {
'Free' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'InUse' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Flush' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
} ],
'_MMFREE_POOL_ENTRY' : [ 0x28, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'Size' : [ 0x10, ['unsigned long long']],
'Signature' : [ 0x18, ['unsigned long']],
'Owner' : [ 0x20, ['pointer64', ['_MMFREE_POOL_ENTRY']]],
} ],
'__unnamed_15d3' : [ 0x48, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Wcb' : [ 0x0, ['_WAIT_CONTEXT_BLOCK']],
} ],
'_DEVICE_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'ReferenceCount' : [ 0x4, ['long']],
'DriverObject' : [ 0x8, ['pointer64', ['_DRIVER_OBJECT']]],
'NextDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT']]],
'CurrentIrp' : [ 0x20, ['pointer64', ['_IRP']]],
'Timer' : [ 0x28, ['pointer64', ['_IO_TIMER']]],
'Flags' : [ 0x30, ['unsigned long']],
'Characteristics' : [ 0x34, ['unsigned long']],
'Vpb' : [ 0x38, ['pointer64', ['_VPB']]],
'DeviceExtension' : [ 0x40, ['pointer64', ['void']]],
'DeviceType' : [ 0x48, ['unsigned long']],
'StackSize' : [ 0x4c, ['unsigned char']],
'Queue' : [ 0x50, ['__unnamed_15d3']],
'AlignmentRequirement' : [ 0x98, ['unsigned long']],
'DeviceQueue' : [ 0xa0, ['_KDEVICE_QUEUE']],
'Dpc' : [ 0xc8, ['_KDPC']],
'ActiveThreadCount' : [ 0x108, ['unsigned long']],
'SecurityDescriptor' : [ 0x110, ['pointer64', ['void']]],
'DeviceLock' : [ 0x118, ['_KEVENT']],
'SectorSize' : [ 0x130, ['unsigned short']],
'Spare1' : [ 0x132, ['unsigned short']],
'DeviceObjectExtension' : [ 0x138, ['pointer64', ['_DEVOBJ_EXTENSION']]],
'Reserved' : [ 0x140, ['pointer64', ['void']]],
} ],
'_SECTION_OBJECT_POINTERS' : [ 0x18, {
'DataSectionObject' : [ 0x0, ['pointer64', ['void']]],
'SharedCacheMap' : [ 0x8, ['pointer64', ['void']]],
'ImageSectionObject' : [ 0x10, ['pointer64', ['void']]],
} ],
'_SEP_AUDIT_POLICY' : [ 0x8, {
'PolicyElements' : [ 0x0, ['_SEP_AUDIT_POLICY_CATEGORIES']],
'PolicyOverlay' : [ 0x0, ['_SEP_AUDIT_POLICY_OVERLAY']],
'Overlay' : [ 0x0, ['unsigned long long']],
} ],
'_PEB32' : [ 0x230, {
'InheritedAddressSpace' : [ 0x0, ['unsigned char']],
'ReadImageFileExecOptions' : [ 0x1, ['unsigned char']],
'BeingDebugged' : [ 0x2, ['unsigned char']],
'BitField' : [ 0x3, ['unsigned char']],
'ImageUsesLargePages' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'SpareBits' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
'Mutant' : [ 0x4, ['unsigned long']],
'ImageBaseAddress' : [ 0x8, ['unsigned long']],
'Ldr' : [ 0xc, ['unsigned long']],
'ProcessParameters' : [ 0x10, ['unsigned long']],
'SubSystemData' : [ 0x14, ['unsigned long']],
'ProcessHeap' : [ 0x18, ['unsigned long']],
'FastPebLock' : [ 0x1c, ['unsigned long']],
'AtlThunkSListPtr' : [ 0x20, ['unsigned long']],
'SparePtr2' : [ 0x24, ['unsigned long']],
'EnvironmentUpdateCount' : [ 0x28, ['unsigned long']],
'KernelCallbackTable' : [ 0x2c, ['unsigned long']],
'SystemReserved' : [ 0x30, ['array', 1, ['unsigned long']]],
'SpareUlong' : [ 0x34, ['unsigned long']],
'FreeList' : [ 0x38, ['unsigned long']],
'TlsExpansionCounter' : [ 0x3c, ['unsigned long']],
'TlsBitmap' : [ 0x40, ['unsigned long']],
'TlsBitmapBits' : [ 0x44, ['array', 2, ['unsigned long']]],
'ReadOnlySharedMemoryBase' : [ 0x4c, ['unsigned long']],
'ReadOnlySharedMemoryHeap' : [ 0x50, ['unsigned long']],
'ReadOnlyStaticServerData' : [ 0x54, ['unsigned long']],
'AnsiCodePageData' : [ 0x58, ['unsigned long']],
'OemCodePageData' : [ 0x5c, ['unsigned long']],
'UnicodeCaseTableData' : [ 0x60, ['unsigned long']],
'NumberOfProcessors' : [ 0x64, ['unsigned long']],
'NtGlobalFlag' : [ 0x68, ['unsigned long']],
'CriticalSectionTimeout' : [ 0x70, ['_LARGE_INTEGER']],
'HeapSegmentReserve' : [ 0x78, ['unsigned long']],
'HeapSegmentCommit' : [ 0x7c, ['unsigned long']],
'HeapDeCommitTotalFreeThreshold' : [ 0x80, ['unsigned long']],
'HeapDeCommitFreeBlockThreshold' : [ 0x84, ['unsigned long']],
'NumberOfHeaps' : [ 0x88, ['unsigned long']],
'MaximumNumberOfHeaps' : [ 0x8c, ['unsigned long']],
'ProcessHeaps' : [ 0x90, ['unsigned long']],
'GdiSharedHandleTable' : [ 0x94, ['unsigned long']],
'ProcessStarterHelper' : [ 0x98, ['unsigned long']],
'GdiDCAttributeList' : [ 0x9c, ['unsigned long']],
'LoaderLock' : [ 0xa0, ['unsigned long']],
'OSMajorVersion' : [ 0xa4, ['unsigned long']],
'OSMinorVersion' : [ 0xa8, ['unsigned long']],
'OSBuildNumber' : [ 0xac, ['unsigned short']],
'OSCSDVersion' : [ 0xae, ['unsigned short']],
'OSPlatformId' : [ 0xb0, ['unsigned long']],
'ImageSubsystem' : [ 0xb4, ['unsigned long']],
'ImageSubsystemMajorVersion' : [ 0xb8, ['unsigned long']],
'ImageSubsystemMinorVersion' : [ 0xbc, ['unsigned long']],
'ImageProcessAffinityMask' : [ 0xc0, ['unsigned long']],
'GdiHandleBuffer' : [ 0xc4, ['array', 34, ['unsigned long']]],
'PostProcessInitRoutine' : [ 0x14c, ['unsigned long']],
'TlsExpansionBitmap' : [ 0x150, ['unsigned long']],
'TlsExpansionBitmapBits' : [ 0x154, ['array', 32, ['unsigned long']]],
'SessionId' : [ 0x1d4, ['unsigned long']],
'AppCompatFlags' : [ 0x1d8, ['_ULARGE_INTEGER']],
'AppCompatFlagsUser' : [ 0x1e0, ['_ULARGE_INTEGER']],
'pShimData' : [ 0x1e8, ['unsigned long']],
'AppCompatInfo' : [ 0x1ec, ['unsigned long']],
'CSDVersion' : [ 0x1f0, ['_STRING32']],
'ActivationContextData' : [ 0x1f8, ['unsigned long']],
'ProcessAssemblyStorageMap' : [ 0x1fc, ['unsigned long']],
'SystemDefaultActivationContextData' : [ 0x200, ['unsigned long']],
'SystemAssemblyStorageMap' : [ 0x204, ['unsigned long']],
'MinimumStackCommit' : [ 0x208, ['unsigned long']],
'FlsCallback' : [ 0x20c, ['unsigned long']],
'FlsListHead' : [ 0x210, ['LIST_ENTRY32']],
'FlsBitmap' : [ 0x218, ['unsigned long']],
'FlsBitmapBits' : [ 0x21c, ['array', 4, ['unsigned long']]],
'FlsHighIndex' : [ 0x22c, ['unsigned long']],
} ],
'_MBCB' : [ 0xb8, {
'NodeTypeCode' : [ 0x0, ['short']],
'NodeIsInZone' : [ 0x2, ['short']],
'PagesToWrite' : [ 0x4, ['unsigned long']],
'DirtyPages' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
'BitmapRanges' : [ 0x10, ['_LIST_ENTRY']],
'ResumeWritePage' : [ 0x20, ['long long']],
'BitmapRange1' : [ 0x28, ['_BITMAP_RANGE']],
'BitmapRange2' : [ 0x58, ['_BITMAP_RANGE']],
'BitmapRange3' : [ 0x88, ['_BITMAP_RANGE']],
} ],
'_POWER_CHANNEL_SUMMARY' : [ 0x20, {
'Signature' : [ 0x0, ['unsigned long']],
'TotalCount' : [ 0x4, ['unsigned long']],
'D0Count' : [ 0x8, ['unsigned long']],
'NotifyList' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_CM_VIEW_OF_FILE' : [ 0x40, {
'LRUViewList' : [ 0x0, ['_LIST_ENTRY']],
'PinViewList' : [ 0x10, ['_LIST_ENTRY']],
'FileOffset' : [ 0x20, ['unsigned long']],
'Size' : [ 0x24, ['unsigned long']],
'ViewAddress' : [ 0x28, ['pointer64', ['unsigned long long']]],
'Bcb' : [ 0x30, ['pointer64', ['void']]],
'UseCount' : [ 0x38, ['unsigned long']],
} ],
'_SLIST_ENTRY' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_KDEVICE_QUEUE' : [ 0x28, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceListHead' : [ 0x8, ['_LIST_ENTRY']],
'Lock' : [ 0x18, ['unsigned long long']],
'Busy' : [ 0x20, ['unsigned char']],
'Reserved' : [ 0x20, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='long long')]],
'Hint' : [ 0x20, ['BitField', dict(start_bit = 8, end_bit = 64, native_type='long long')]],
} ],
'_KUSER_SHARED_DATA' : [ 0x378, {
'TickCountLowDeprecated' : [ 0x0, ['unsigned long']],
'TickCountMultiplier' : [ 0x4, ['unsigned long']],
'InterruptTime' : [ 0x8, ['_KSYSTEM_TIME']],
'SystemTime' : [ 0x14, ['_KSYSTEM_TIME']],
'TimeZoneBias' : [ 0x20, ['_KSYSTEM_TIME']],
'ImageNumberLow' : [ 0x2c, ['unsigned short']],
'ImageNumberHigh' : [ 0x2e, ['unsigned short']],
'NtSystemRoot' : [ 0x30, ['array', 260, ['unsigned short']]],
'MaxStackTraceDepth' : [ 0x238, ['unsigned long']],
'CryptoExponent' : [ 0x23c, ['unsigned long']],
'TimeZoneId' : [ 0x240, ['unsigned long']],
'LargePageMinimum' : [ 0x244, ['unsigned long']],
'Reserved2' : [ 0x248, ['array', 7, ['unsigned long']]],
'NtProductType' : [ 0x264, ['Enumeration', dict(target = 'long', choices = {1: 'NtProductWinNt', 2: 'NtProductLanManNt', 3: 'NtProductServer'})]],
'ProductTypeIsValid' : [ 0x268, ['unsigned char']],
'NtMajorVersion' : [ 0x26c, ['unsigned long']],
'NtMinorVersion' : [ 0x270, ['unsigned long']],
'ProcessorFeatures' : [ 0x274, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x2b4, ['unsigned long']],
'Reserved3' : [ 0x2b8, ['unsigned long']],
'TimeSlip' : [ 0x2bc, ['unsigned long']],
'AlternativeArchitecture' : [ 0x2c0, ['Enumeration', dict(target = 'long', choices = {0: 'StandardDesign', 1: 'NEC98x86', 2: 'EndAlternatives'})]],
'SystemExpirationDate' : [ 0x2c8, ['_LARGE_INTEGER']],
'SuiteMask' : [ 0x2d0, ['unsigned long']],
'KdDebuggerEnabled' : [ 0x2d4, ['unsigned char']],
'NXSupportPolicy' : [ 0x2d5, ['unsigned char']],
'ActiveConsoleId' : [ 0x2d8, ['unsigned long']],
'DismountCount' : [ 0x2dc, ['unsigned long']],
'ComPlusPackage' : [ 0x2e0, ['unsigned long']],
'LastSystemRITEventTickCount' : [ 0x2e4, ['unsigned long']],
'NumberOfPhysicalPages' : [ 0x2e8, ['unsigned long']],
'SafeBootMode' : [ 0x2ec, ['unsigned char']],
'TraceLogging' : [ 0x2f0, ['unsigned long']],
'TestRetInstruction' : [ 0x2f8, ['unsigned long long']],
'SystemCall' : [ 0x300, ['unsigned long']],
'SystemCallReturn' : [ 0x304, ['unsigned long']],
'SystemCallPad' : [ 0x308, ['array', 3, ['unsigned long long']]],
'TickCount' : [ 0x320, ['_KSYSTEM_TIME']],
'TickCountQuad' : [ 0x320, ['unsigned long long']],
'Cookie' : [ 0x330, ['unsigned long']],
'Wow64SharedInformation' : [ 0x334, ['array', 16, ['unsigned long']]],
} ],
'_OBJECT_TYPE_INITIALIZER' : [ 0x70, {
'Length' : [ 0x0, ['unsigned short']],
'UseDefaultObject' : [ 0x2, ['unsigned char']],
'CaseInsensitive' : [ 0x3, ['unsigned char']],
'InvalidAttributes' : [ 0x4, ['unsigned long']],
'GenericMapping' : [ 0x8, ['_GENERIC_MAPPING']],
'ValidAccessMask' : [ 0x18, ['unsigned long']],
'SecurityRequired' : [ 0x1c, ['unsigned char']],
'MaintainHandleCount' : [ 0x1d, ['unsigned char']],
'MaintainTypeList' : [ 0x1e, ['unsigned char']],
'PoolType' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'DefaultPagedPoolCharge' : [ 0x24, ['unsigned long']],
'DefaultNonPagedPoolCharge' : [ 0x28, ['unsigned long']],
'DumpProcedure' : [ 0x30, ['pointer64', ['void']]],
'OpenProcedure' : [ 0x38, ['pointer64', ['void']]],
'CloseProcedure' : [ 0x40, ['pointer64', ['void']]],
'DeleteProcedure' : [ 0x48, ['pointer64', ['void']]],
'ParseProcedure' : [ 0x50, ['pointer64', ['void']]],
'SecurityProcedure' : [ 0x58, ['pointer64', ['void']]],
'QueryNameProcedure' : [ 0x60, ['pointer64', ['void']]],
'OkayToCloseProcedure' : [ 0x68, ['pointer64', ['void']]],
} ],
'_WMI_LOGGER_MODE' : [ 0x4, {
'SequentialFile' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'CircularFile' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'AppendFile' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned long')]],
'RealTime' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'DelayOpenFile' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'BufferOnly' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'PrivateLogger' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'AddHeader' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'UseExisting' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'UseGlobalSequence' : [ 0x0, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'UseLocalSequence' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'Unused2' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
} ],
'_KPROCESSOR_STATE' : [ 0x5b0, {
'SpecialRegisters' : [ 0x0, ['_KSPECIAL_REGISTERS']],
'ContextFrame' : [ 0xe0, ['_CONTEXT']],
} ],
'__unnamed_162d' : [ 0x10, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'Secured' : [ 0x0, ['_MMADDRESS_LIST']],
} ],
'__unnamed_1633' : [ 0x8, {
'Banked' : [ 0x0, ['pointer64', ['_MMBANKED_SECTION']]],
'ExtendedInfo' : [ 0x0, ['pointer64', ['_MMEXTEND_INFO']]],
} ],
'_MMVAD_LONG' : [ 0x68, {
'u1' : [ 0x0, ['__unnamed_1180']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_1183']],
'ControlArea' : [ 0x30, ['pointer64', ['_CONTROL_AREA']]],
'FirstPrototypePte' : [ 0x38, ['pointer64', ['_MMPTE']]],
'LastContiguousPte' : [ 0x40, ['pointer64', ['_MMPTE']]],
'u2' : [ 0x48, ['__unnamed_1188']],
'u3' : [ 0x50, ['__unnamed_162d']],
'u4' : [ 0x60, ['__unnamed_1633']],
} ],
'_KEXECUTE_OPTIONS' : [ 0x1, {
'ExecuteDisable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'ExecuteEnable' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'DisableThunkEmulation' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'Permanent' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'ExecuteDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'ImageDispatchEnable' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
} ],
'_POOL_DESCRIPTOR' : [ 0x1048, {
'PoolType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'NonPagedPool', 1: 'PagedPool', 2: 'NonPagedPoolMustSucceed', 3: 'DontUseThisType', 4: 'NonPagedPoolCacheAligned', 5: 'PagedPoolCacheAligned', 6: 'NonPagedPoolCacheAlignedMustS', 7: 'MaxPoolType', 34: 'NonPagedPoolMustSucceedSession', 35: 'DontUseThisTypeSession', 32: 'NonPagedPoolSession', 36: 'NonPagedPoolCacheAlignedSession', 33: 'PagedPoolSession', 38: 'NonPagedPoolCacheAlignedMustSSession', 37: 'PagedPoolCacheAlignedSession'})]],
'PoolIndex' : [ 0x4, ['unsigned long']],
'RunningAllocs' : [ 0x8, ['unsigned long']],
'RunningDeAllocs' : [ 0xc, ['unsigned long']],
'TotalPages' : [ 0x10, ['unsigned long']],
'TotalBigPages' : [ 0x14, ['unsigned long']],
'Threshold' : [ 0x18, ['unsigned long']],
'LockAddress' : [ 0x20, ['pointer64', ['void']]],
'PendingFrees' : [ 0x28, ['pointer64', ['void']]],
'PendingFreeDepth' : [ 0x30, ['long']],
'TotalBytes' : [ 0x38, ['unsigned long long']],
'Spare0' : [ 0x40, ['unsigned long long']],
'ListHeads' : [ 0x48, ['array', 256, ['_LIST_ENTRY']]],
} ],
'_HARDWARE_PTE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'reserved0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 40, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 52, native_type='unsigned long long')]],
'SoftwareWsIndex' : [ 0x0, ['BitField', dict(start_bit = 52, end_bit = 63, native_type='unsigned long long')]],
'NoExecute' : [ 0x0, ['BitField', dict(start_bit = 63, end_bit = 64, native_type='unsigned long long')]],
} ],
'_WOW64_PROCESS' : [ 0x8, {
'Wow64' : [ 0x0, ['pointer64', ['void']]],
} ],
'_PEB_LDR_DATA' : [ 0x48, {
'Length' : [ 0x0, ['unsigned long']],
'Initialized' : [ 0x4, ['unsigned char']],
'SsHandle' : [ 0x8, ['pointer64', ['void']]],
'InLoadOrderModuleList' : [ 0x10, ['_LIST_ENTRY']],
'InMemoryOrderModuleList' : [ 0x20, ['_LIST_ENTRY']],
'InInitializationOrderModuleList' : [ 0x30, ['_LIST_ENTRY']],
'EntryInProgress' : [ 0x40, ['pointer64', ['void']]],
} ],
'_DBGKD_SWITCH_PARTITION' : [ 0x4, {
'Partition' : [ 0x0, ['unsigned long']],
} ],
'_DBGKD_GET_VERSION32' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned short']],
'Flags' : [ 0x6, ['unsigned short']],
'KernBase' : [ 0x8, ['unsigned long']],
'PsLoadedModuleList' : [ 0xc, ['unsigned long']],
'MachineType' : [ 0x10, ['unsigned short']],
'ThCallbackStack' : [ 0x12, ['unsigned short']],
'NextCallback' : [ 0x14, ['unsigned short']],
'FramePointer' : [ 0x16, ['unsigned short']],
'KiCallUserMode' : [ 0x18, ['unsigned long']],
'KeUserCallbackDispatcher' : [ 0x1c, ['unsigned long']],
'BreakpointWithStatus' : [ 0x20, ['unsigned long']],
'DebuggerDataList' : [ 0x24, ['unsigned long']],
} ],
'_MM_PAGED_POOL_INFO' : [ 0x40, {
'PagedPoolAllocationMap' : [ 0x0, ['pointer64', ['_RTL_BITMAP']]],
'EndOfPagedPoolBitmap' : [ 0x8, ['pointer64', ['_RTL_BITMAP']]],
'FirstPteForPagedPool' : [ 0x10, ['pointer64', ['_MMPTE']]],
'LastPteForPagedPool' : [ 0x18, ['pointer64', ['_MMPTE']]],
'NextPdeForPagedPoolExpansion' : [ 0x20, ['pointer64', ['_MMPTE']]],
'PagedPoolHint' : [ 0x28, ['unsigned long']],
'PagedPoolCommit' : [ 0x30, ['unsigned long long']],
'AllocatedPagedPool' : [ 0x38, ['unsigned long long']],
} ],
'_INTERLOCK_SEQ' : [ 0x8, {
'Depth' : [ 0x0, ['unsigned short']],
'FreeEntryOffset' : [ 0x2, ['unsigned short']],
'OffsetAndDepth' : [ 0x0, ['unsigned long']],
'Sequence' : [ 0x4, ['unsigned long']],
'Exchg' : [ 0x0, ['long long']],
} ],
'_VPB' : [ 0x60, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'Flags' : [ 0x4, ['unsigned short']],
'VolumeLabelLength' : [ 0x6, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'RealDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'SerialNumber' : [ 0x18, ['unsigned long']],
'ReferenceCount' : [ 0x1c, ['unsigned long']],
'VolumeLabel' : [ 0x20, ['array', 32, ['unsigned short']]],
} ],
'_CACHE_DESCRIPTOR' : [ 0xc, {
'Level' : [ 0x0, ['unsigned char']],
'Associativity' : [ 0x1, ['unsigned char']],
'LineSize' : [ 0x2, ['unsigned short']],
'Size' : [ 0x4, ['unsigned long']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'CacheUnified', 1: 'CacheInstruction', 2: 'CacheData', 3: 'CacheTrace'})]],
} ],
'_MMSESSION' : [ 0x68, {
'SystemSpaceViewLock' : [ 0x0, ['_KGUARDED_MUTEX']],
'SystemSpaceViewLockPointer' : [ 0x38, ['pointer64', ['_KGUARDED_MUTEX']]],
'SystemSpaceViewStart' : [ 0x40, ['pointer64', ['unsigned char']]],
'SystemSpaceViewTable' : [ 0x48, ['pointer64', ['_MMVIEW']]],
'SystemSpaceHashSize' : [ 0x50, ['unsigned long']],
'SystemSpaceHashEntries' : [ 0x54, ['unsigned long']],
'SystemSpaceHashKey' : [ 0x58, ['unsigned long']],
'BitmapFailures' : [ 0x5c, ['unsigned long']],
'SystemSpaceBitMap' : [ 0x60, ['pointer64', ['_RTL_BITMAP']]],
} ],
'_GENERIC_MAPPING' : [ 0x10, {
'GenericRead' : [ 0x0, ['unsigned long']],
'GenericWrite' : [ 0x4, ['unsigned long']],
'GenericExecute' : [ 0x8, ['unsigned long']],
'GenericAll' : [ 0xc, ['unsigned long']],
} ],
'_DBGKD_RESTORE_BREAKPOINT' : [ 0x4, {
'BreakPointHandle' : [ 0x0, ['unsigned long']],
} ],
'_EXCEPTION_REGISTRATION_RECORD' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_EXCEPTION_REGISTRATION_RECORD']]],
'Handler' : [ 0x8, ['pointer64', ['void']]],
} ],
'_SEP_AUDIT_POLICY_OVERLAY' : [ 0x8, {
'PolicyBits' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 36, native_type='unsigned long long')]],
'SetBit' : [ 0x0, ['BitField', dict(start_bit = 36, end_bit = 37, native_type='unsigned long long')]],
} ],
'_POOL_TRACKER_BIG_PAGES' : [ 0x18, {
'Va' : [ 0x0, ['pointer64', ['void']]],
'Key' : [ 0x8, ['unsigned long']],
'NumberOfPages' : [ 0xc, ['unsigned long']],
'QuotaObject' : [ 0x10, ['pointer64', ['void']]],
} ],
'_PROCESS_WS_WATCH_INFORMATION' : [ 0x10, {
'FaultingPc' : [ 0x0, ['pointer64', ['void']]],
'FaultingVa' : [ 0x8, ['pointer64', ['void']]],
} ],
'_MMPTE_SUBSECTION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Unused0' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Unused1' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 16, native_type='unsigned long long')]],
'SubsectionAddress' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 64, native_type='long long')]],
} ],
'_VI_DEADLOCK_NODE' : [ 0xd0, {
'Parent' : [ 0x0, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ChildrenList' : [ 0x8, ['_LIST_ENTRY']],
'SiblingsList' : [ 0x18, ['_LIST_ENTRY']],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'Root' : [ 0x38, ['pointer64', ['_VI_DEADLOCK_RESOURCE']]],
'ThreadEntry' : [ 0x40, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'Active' : [ 0x48, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'OnlyTryAcquireUsed' : [ 0x48, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'ReleasedOutOfOrder' : [ 0x48, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'SequenceNumber' : [ 0x48, ['BitField', dict(start_bit = 3, end_bit = 32, native_type='unsigned long')]],
'StackTrace' : [ 0x50, ['array', 8, ['pointer64', ['void']]]],
'ParentStackTrace' : [ 0x90, ['array', 8, ['pointer64', ['void']]]],
} ],
'_SECURITY_QUALITY_OF_SERVICE' : [ 0xc, {
'Length' : [ 0x0, ['unsigned long']],
'ImpersonationLevel' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'ContextTrackingMode' : [ 0x8, ['unsigned char']],
'EffectiveOnly' : [ 0x9, ['unsigned char']],
} ],
'_UNEXPECTED_INTERRUPT' : [ 0x10, {
'PushImmOp' : [ 0x0, ['unsigned char']],
'PushImm' : [ 0x1, ['unsigned long']],
'PushRbp' : [ 0x5, ['unsigned char']],
'JmpOp' : [ 0x6, ['unsigned char']],
'JmpOffset' : [ 0x7, ['long']],
} ],
'_CONTEXT' : [ 0x4d0, {
'P1Home' : [ 0x0, ['unsigned long long']],
'P2Home' : [ 0x8, ['unsigned long long']],
'P3Home' : [ 0x10, ['unsigned long long']],
'P4Home' : [ 0x18, ['unsigned long long']],
'P5Home' : [ 0x20, ['unsigned long long']],
'P6Home' : [ 0x28, ['unsigned long long']],
'ContextFlags' : [ 0x30, ['unsigned long']],
'MxCsr' : [ 0x34, ['unsigned long']],
'SegCs' : [ 0x38, ['unsigned short']],
'SegDs' : [ 0x3a, ['unsigned short']],
'SegEs' : [ 0x3c, ['unsigned short']],
'SegFs' : [ 0x3e, ['unsigned short']],
'SegGs' : [ 0x40, ['unsigned short']],
'SegSs' : [ 0x42, ['unsigned short']],
'EFlags' : [ 0x44, ['unsigned long']],
'Dr0' : [ 0x48, ['unsigned long long']],
'Dr1' : [ 0x50, ['unsigned long long']],
'Dr2' : [ 0x58, ['unsigned long long']],
'Dr3' : [ 0x60, ['unsigned long long']],
'Dr6' : [ 0x68, ['unsigned long long']],
'Dr7' : [ 0x70, ['unsigned long long']],
'Rax' : [ 0x78, ['unsigned long long']],
'Rcx' : [ 0x80, ['unsigned long long']],
'Rdx' : [ 0x88, ['unsigned long long']],
'Rbx' : [ 0x90, ['unsigned long long']],
'Rsp' : [ 0x98, ['unsigned long long']],
'Rbp' : [ 0xa0, ['unsigned long long']],
'Rsi' : [ 0xa8, ['unsigned long long']],
'Rdi' : [ 0xb0, ['unsigned long long']],
'R8' : [ 0xb8, ['unsigned long long']],
'R9' : [ 0xc0, ['unsigned long long']],
'R10' : [ 0xc8, ['unsigned long long']],
'R11' : [ 0xd0, ['unsigned long long']],
'R12' : [ 0xd8, ['unsigned long long']],
'R13' : [ 0xe0, ['unsigned long long']],
'R14' : [ 0xe8, ['unsigned long long']],
'R15' : [ 0xf0, ['unsigned long long']],
'Rip' : [ 0xf8, ['unsigned long long']],
'FltSave' : [ 0x100, ['_XMM_SAVE_AREA32']],
'Header' : [ 0x100, ['array', 2, ['_M128A']]],
'Legacy' : [ 0x120, ['array', 8, ['_M128A']]],
'Xmm0' : [ 0x1a0, ['_M128A']],
'Xmm1' : [ 0x1b0, ['_M128A']],
'Xmm2' : [ 0x1c0, ['_M128A']],
'Xmm3' : [ 0x1d0, ['_M128A']],
'Xmm4' : [ 0x1e0, ['_M128A']],
'Xmm5' : [ 0x1f0, ['_M128A']],
'Xmm6' : [ 0x200, ['_M128A']],
'Xmm7' : [ 0x210, ['_M128A']],
'Xmm8' : [ 0x220, ['_M128A']],
'Xmm9' : [ 0x230, ['_M128A']],
'Xmm10' : [ 0x240, ['_M128A']],
'Xmm11' : [ 0x250, ['_M128A']],
'Xmm12' : [ 0x260, ['_M128A']],
'Xmm13' : [ 0x270, ['_M128A']],
'Xmm14' : [ 0x280, ['_M128A']],
'Xmm15' : [ 0x290, ['_M128A']],
'VectorRegister' : [ 0x300, ['array', 26, ['_M128A']]],
'VectorControl' : [ 0x4a0, ['unsigned long long']],
'DebugControl' : [ 0x4a8, ['unsigned long long']],
'LastBranchToRip' : [ 0x4b0, ['unsigned long long']],
'LastBranchFromRip' : [ 0x4b8, ['unsigned long long']],
'LastExceptionToRip' : [ 0x4c0, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x4c8, ['unsigned long long']],
} ],
'_MMPTE_HARDWARE_LARGEPAGE' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Accessed' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long long')]],
'Dirty' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long long')]],
'LargePage' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long long')]],
'Global' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'CopyOnWrite' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'reserved0' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PAT' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long long')]],
'reserved1' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 21, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 40, native_type='unsigned long long')]],
'reserved2' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 64, native_type='unsigned long long')]],
} ],
'_DBGKD_QUERY_SPECIAL_CALLS' : [ 0x4, {
'NumberOfSpecialCalls' : [ 0x0, ['unsigned long']],
} ],
'CMP_OFFSET_ARRAY' : [ 0x18, {
'FileOffset' : [ 0x0, ['unsigned long']],
'DataBuffer' : [ 0x8, ['pointer64', ['void']]],
'DataLength' : [ 0x10, ['unsigned long']],
} ],
'_PCI_PDO_EXTENSION' : [ 0x120, {
'Next' : [ 0x0, ['pointer64', ['_PCI_PDO_EXTENSION']]],
'ExtensionType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_Location', 1768116287: 'PciInterface_AgpTarget'})]],
'IrpDispatchTable' : [ 0x10, ['pointer64', ['_PCI_MJ_DISPATCH_TABLE']]],
'DeviceState' : [ 0x18, ['unsigned char']],
'TentativeNextState' : [ 0x19, ['unsigned char']],
'SecondaryExtLock' : [ 0x20, ['_KEVENT']],
'Slot' : [ 0x38, ['_PCI_SLOT_NUMBER']],
'PhysicalDeviceObject' : [ 0x40, ['pointer64', ['_DEVICE_OBJECT']]],
'ParentFdoExtension' : [ 0x48, ['pointer64', ['_PCI_FDO_EXTENSION']]],
'SecondaryExtension' : [ 0x50, ['_SINGLE_LIST_ENTRY']],
'BusInterfaceReferenceCount' : [ 0x58, ['unsigned long']],
'AgpInterfaceReferenceCount' : [ 0x5c, ['unsigned long']],
'VendorId' : [ 0x60, ['unsigned short']],
'DeviceId' : [ 0x62, ['unsigned short']],
'SubsystemVendorId' : [ 0x64, ['unsigned short']],
'SubsystemId' : [ 0x66, ['unsigned short']],
'RevisionId' : [ 0x68, ['unsigned char']],
'ProgIf' : [ 0x69, ['unsigned char']],
'SubClass' : [ 0x6a, ['unsigned char']],
'BaseClass' : [ 0x6b, ['unsigned char']],
'AdditionalResourceCount' : [ 0x6c, ['unsigned char']],
'AdjustedInterruptLine' : [ 0x6d, ['unsigned char']],
'InterruptPin' : [ 0x6e, ['unsigned char']],
'RawInterruptLine' : [ 0x6f, ['unsigned char']],
'CapabilitiesPtr' : [ 0x70, ['unsigned char']],
'SavedLatencyTimer' : [ 0x71, ['unsigned char']],
'SavedCacheLineSize' : [ 0x72, ['unsigned char']],
'HeaderType' : [ 0x73, ['unsigned char']],
'NotPresent' : [ 0x74, ['unsigned char']],
'ReportedMissing' : [ 0x75, ['unsigned char']],
'ExpectedWritebackFailure' : [ 0x76, ['unsigned char']],
'NoTouchPmeEnable' : [ 0x77, ['unsigned char']],
'LegacyDriver' : [ 0x78, ['unsigned char']],
'UpdateHardware' : [ 0x79, ['unsigned char']],
'MovedDevice' : [ 0x7a, ['unsigned char']],
'DisablePowerDown' : [ 0x7b, ['unsigned char']],
'NeedsHotPlugConfiguration' : [ 0x7c, ['unsigned char']],
'IDEInNativeMode' : [ 0x7d, ['unsigned char']],
'BIOSAllowsIDESwitchToNativeMode' : [ 0x7e, ['unsigned char']],
'IoSpaceUnderNativeIdeControl' : [ 0x7f, ['unsigned char']],
'OnDebugPath' : [ 0x80, ['unsigned char']],
'IoSpaceNotRequired' : [ 0x81, ['unsigned char']],
'PowerState' : [ 0x88, ['PCI_POWER_STATE']],
'Dependent' : [ 0xd8, ['PCI_HEADER_TYPE_DEPENDENT']],
'HackFlags' : [ 0xe0, ['unsigned long long']],
'Resources' : [ 0xe8, ['pointer64', ['PCI_FUNCTION_RESOURCES']]],
'BridgeFdoExtension' : [ 0xf0, ['pointer64', ['_PCI_FDO_EXTENSION']]],
'NextBridge' : [ 0xf8, ['pointer64', ['_PCI_PDO_EXTENSION']]],
'NextHashEntry' : [ 0x100, ['pointer64', ['_PCI_PDO_EXTENSION']]],
'Lock' : [ 0x108, ['_PCI_LOCK']],
'PowerCapabilities' : [ 0x118, ['_PCI_PMC']],
'TargetAgpCapabilityId' : [ 0x11a, ['unsigned char']],
'CommandEnables' : [ 0x11c, ['unsigned short']],
'InitialCommand' : [ 0x11e, ['unsigned short']],
} ],
'_HMAP_DIRECTORY' : [ 0x2000, {
'Directory' : [ 0x0, ['array', 1024, ['pointer64', ['_HMAP_TABLE']]]],
} ],
'_NT_TIB32' : [ 0x1c, {
'ExceptionList' : [ 0x0, ['unsigned long']],
'StackBase' : [ 0x4, ['unsigned long']],
'StackLimit' : [ 0x8, ['unsigned long']],
'SubSystemTib' : [ 0xc, ['unsigned long']],
'FiberData' : [ 0x10, ['unsigned long']],
'Version' : [ 0x10, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x14, ['unsigned long']],
'Self' : [ 0x18, ['unsigned long']],
} ],
'_SECURITY_DESCRIPTOR' : [ 0x28, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x8, ['pointer64', ['void']]],
'Group' : [ 0x10, ['pointer64', ['void']]],
'Sacl' : [ 0x18, ['pointer64', ['_ACL']]],
'Dacl' : [ 0x20, ['pointer64', ['_ACL']]],
} ],
'__unnamed_16a5' : [ 0x10, {
'UserData' : [ 0x0, ['pointer64', ['void']]],
'Owner' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_16a7' : [ 0x10, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
} ],
'_RTLP_RANGE_LIST_ENTRY' : [ 0x38, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'Allocated' : [ 0x10, ['__unnamed_16a5']],
'Merged' : [ 0x10, ['__unnamed_16a7']],
'Attributes' : [ 0x20, ['unsigned char']],
'PublicFlags' : [ 0x21, ['unsigned char']],
'PrivateFlags' : [ 0x22, ['unsigned short']],
'ListEntry' : [ 0x28, ['_LIST_ENTRY']],
} ],
'_KAPC_STATE' : [ 0x30, {
'ApcListHead' : [ 0x0, ['array', 2, ['_LIST_ENTRY']]],
'Process' : [ 0x20, ['pointer64', ['_KPROCESS']]],
'KernelApcInProgress' : [ 0x28, ['unsigned char']],
'KernelApcPending' : [ 0x29, ['unsigned char']],
'UserApcPending' : [ 0x2a, ['unsigned char']],
} ],
'_HEAP_STOP_ON_VALUES' : [ 0x30, {
'AllocAddress' : [ 0x0, ['unsigned long long']],
'AllocTag' : [ 0x8, ['_HEAP_STOP_ON_TAG']],
'ReAllocAddress' : [ 0x10, ['unsigned long long']],
'ReAllocTag' : [ 0x18, ['_HEAP_STOP_ON_TAG']],
'FreeAddress' : [ 0x20, ['unsigned long long']],
'FreeTag' : [ 0x28, ['_HEAP_STOP_ON_TAG']],
} ],
'_DEVICE_RELATIONS' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'Objects' : [ 0x8, ['array', 1, ['pointer64', ['_DEVICE_OBJECT']]]],
} ],
'_DEVICE_MAP' : [ 0x38, {
'DosDevicesDirectory' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY']]],
'GlobalDosDevicesDirectory' : [ 0x8, ['pointer64', ['_OBJECT_DIRECTORY']]],
'ReferenceCount' : [ 0x10, ['unsigned long']],
'DriveMap' : [ 0x14, ['unsigned long']],
'DriveType' : [ 0x18, ['array', 32, ['unsigned char']]],
} ],
'_HEAP_PSEUDO_TAG_ENTRY' : [ 0x10, {
'Allocs' : [ 0x0, ['unsigned long']],
'Frees' : [ 0x4, ['unsigned long']],
'Size' : [ 0x8, ['unsigned long long']],
} ],
'_IO_RESOURCE_LIST' : [ 0x28, {
'Version' : [ 0x0, ['unsigned short']],
'Revision' : [ 0x2, ['unsigned short']],
'Count' : [ 0x4, ['unsigned long']],
'Descriptors' : [ 0x8, ['array', 1, ['_IO_RESOURCE_DESCRIPTOR']]],
} ],
'_MMBANKED_SECTION' : [ 0x38, {
'BasePhysicalPage' : [ 0x0, ['unsigned long long']],
'BasedPte' : [ 0x8, ['pointer64', ['_MMPTE']]],
'BankSize' : [ 0x10, ['unsigned long']],
'BankShift' : [ 0x14, ['unsigned long']],
'BankedRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'CurrentMappedPte' : [ 0x28, ['pointer64', ['_MMPTE']]],
'BankTemplate' : [ 0x30, ['array', 1, ['_MMPTE']]],
} ],
'_RTL_CRITICAL_SECTION' : [ 0x28, {
'DebugInfo' : [ 0x0, ['pointer64', ['_RTL_CRITICAL_SECTION_DEBUG']]],
'LockCount' : [ 0x8, ['long']],
'RecursionCount' : [ 0xc, ['long']],
'OwningThread' : [ 0x10, ['pointer64', ['void']]],
'LockSemaphore' : [ 0x18, ['pointer64', ['void']]],
'SpinCount' : [ 0x20, ['unsigned long long']],
} ],
'_KTSS64' : [ 0x68, {
'Reserved0' : [ 0x0, ['unsigned long']],
'Rsp0' : [ 0x4, ['unsigned long long']],
'Rsp1' : [ 0xc, ['unsigned long long']],
'Rsp2' : [ 0x14, ['unsigned long long']],
'Ist' : [ 0x1c, ['array', 8, ['unsigned long long']]],
'Reserved1' : [ 0x5c, ['unsigned long long']],
'Reserved2' : [ 0x64, ['unsigned short']],
'IoMapBase' : [ 0x66, ['unsigned short']],
} ],
'__unnamed_16d2' : [ 0x5, {
'Acquired' : [ 0x0, ['unsigned char']],
'CacheLineSize' : [ 0x1, ['unsigned char']],
'LatencyTimer' : [ 0x2, ['unsigned char']],
'EnablePERR' : [ 0x3, ['unsigned char']],
'EnableSERR' : [ 0x4, ['unsigned char']],
} ],
'_PCI_FDO_EXTENSION' : [ 0x130, {
'List' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'ExtensionType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_Location', 1768116287: 'PciInterface_AgpTarget'})]],
'IrpDispatchTable' : [ 0x10, ['pointer64', ['_PCI_MJ_DISPATCH_TABLE']]],
'DeviceState' : [ 0x18, ['unsigned char']],
'TentativeNextState' : [ 0x19, ['unsigned char']],
'SecondaryExtLock' : [ 0x20, ['_KEVENT']],
'PhysicalDeviceObject' : [ 0x38, ['pointer64', ['_DEVICE_OBJECT']]],
'FunctionalDeviceObject' : [ 0x40, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDeviceObject' : [ 0x48, ['pointer64', ['_DEVICE_OBJECT']]],
'ChildListLock' : [ 0x50, ['_KEVENT']],
'ChildPdoList' : [ 0x68, ['pointer64', ['_PCI_PDO_EXTENSION']]],
'BusRootFdoExtension' : [ 0x70, ['pointer64', ['_PCI_FDO_EXTENSION']]],
'ParentFdoExtension' : [ 0x78, ['pointer64', ['_PCI_FDO_EXTENSION']]],
'ChildBridgePdoList' : [ 0x80, ['pointer64', ['_PCI_PDO_EXTENSION']]],
'PciBusInterface' : [ 0x88, ['pointer64', ['_PCI_BUS_INTERFACE_STANDARD']]],
'MaxSubordinateBus' : [ 0x90, ['unsigned char']],
'BusHandler' : [ 0x98, ['pointer64', ['_BUS_HANDLER']]],
'BaseBus' : [ 0xa0, ['unsigned char']],
'Fake' : [ 0xa1, ['unsigned char']],
'ChildDelete' : [ 0xa2, ['unsigned char']],
'Scanned' : [ 0xa3, ['unsigned char']],
'ArbitersInitialized' : [ 0xa4, ['unsigned char']],
'BrokenVideoHackApplied' : [ 0xa5, ['unsigned char']],
'Hibernated' : [ 0xa6, ['unsigned char']],
'PowerState' : [ 0xa8, ['PCI_POWER_STATE']],
'SecondaryExtension' : [ 0xf8, ['_SINGLE_LIST_ENTRY']],
'ChildWaitWakeCount' : [ 0x100, ['unsigned long']],
'PreservedConfig' : [ 0x108, ['pointer64', ['_PCI_COMMON_CONFIG']]],
'Lock' : [ 0x110, ['_PCI_LOCK']],
'HotPlugParameters' : [ 0x120, ['__unnamed_16d2']],
'BusHackFlags' : [ 0x128, ['unsigned long']],
} ],
'__unnamed_16d6' : [ 0xc, {
'Start' : [ 0x0, ['_LARGE_INTEGER']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_16d8' : [ 0x10, {
'Level' : [ 0x0, ['unsigned long']],
'Vector' : [ 0x4, ['unsigned long']],
'Affinity' : [ 0x8, ['unsigned long long']],
} ],
'__unnamed_16da' : [ 0xc, {
'Channel' : [ 0x0, ['unsigned long']],
'Port' : [ 0x4, ['unsigned long']],
'Reserved1' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_16dc' : [ 0xc, {
'Data' : [ 0x0, ['array', 3, ['unsigned long']]],
} ],
'__unnamed_16de' : [ 0xc, {
'Start' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_16e0' : [ 0xc, {
'DataSize' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_16e2' : [ 0x10, {
'Generic' : [ 0x0, ['__unnamed_16d6']],
'Port' : [ 0x0, ['__unnamed_16d6']],
'Interrupt' : [ 0x0, ['__unnamed_16d8']],
'Memory' : [ 0x0, ['__unnamed_16d6']],
'Dma' : [ 0x0, ['__unnamed_16da']],
'DevicePrivate' : [ 0x0, ['__unnamed_16dc']],
'BusNumber' : [ 0x0, ['__unnamed_16de']],
'DeviceSpecificData' : [ 0x0, ['__unnamed_16e0']],
} ],
'_CM_PARTIAL_RESOURCE_DESCRIPTOR' : [ 0x14, {
'Type' : [ 0x0, ['unsigned char']],
'ShareDisposition' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned short']],
'u' : [ 0x4, ['__unnamed_16e2']],
} ],
'_SYSPTES_HEADER' : [ 0x18, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Count' : [ 0x10, ['unsigned long long']],
} ],
'_WAIT_CONTEXT_BLOCK' : [ 0x48, {
'WaitQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DeviceRoutine' : [ 0x18, ['pointer64', ['void']]],
'DeviceContext' : [ 0x20, ['pointer64', ['void']]],
'NumberOfMapRegisters' : [ 0x28, ['unsigned long']],
'DeviceObject' : [ 0x30, ['pointer64', ['void']]],
'CurrentIrp' : [ 0x38, ['pointer64', ['void']]],
'BufferChainingDpc' : [ 0x40, ['pointer64', ['_KDPC']]],
} ],
'_REQUEST_MAILBOX' : [ 0x40, {
'RequestSummary' : [ 0x0, ['long long']],
'RequestPacket' : [ 0x8, ['_KREQUEST_PACKET']],
'Virtual' : [ 0x8, ['array', 7, ['pointer64', ['void']]]],
} ],
'_CM_KEY_CONTROL_BLOCK' : [ 0xb0, {
'RefCount' : [ 0x0, ['unsigned long']],
'ExtFlags' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'PrivateAlloc' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'Delete' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'DelayedCloseIndex' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 22, native_type='unsigned long')]],
'TotalLevels' : [ 0x4, ['BitField', dict(start_bit = 22, end_bit = 32, native_type='unsigned long')]],
'KeyHash' : [ 0x8, ['_CM_KEY_HASH']],
'ConvKey' : [ 0x8, ['unsigned long']],
'NextHash' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x18, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x20, ['unsigned long']],
'ParentKcb' : [ 0x28, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NameBlock' : [ 0x30, ['pointer64', ['_CM_NAME_CONTROL_BLOCK']]],
'CachedSecurity' : [ 0x38, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
'ValueCache' : [ 0x40, ['_CACHED_CHILD_LIST']],
'IndexHint' : [ 0x50, ['pointer64', ['_CM_INDEX_HINT_BLOCK']]],
'HashKey' : [ 0x50, ['unsigned long']],
'SubKeyCount' : [ 0x50, ['unsigned long']],
'KeyBodyListHead' : [ 0x58, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x58, ['_LIST_ENTRY']],
'KeyBodyArray' : [ 0x68, ['array', 4, ['pointer64', ['_CM_KEY_BODY']]]],
'DelayCloseEntry' : [ 0x88, ['pointer64', ['void']]],
'KcbLastWriteTime' : [ 0x90, ['_LARGE_INTEGER']],
'KcbMaxNameLen' : [ 0x98, ['unsigned short']],
'KcbMaxValueNameLen' : [ 0x9a, ['unsigned short']],
'KcbMaxValueDataLen' : [ 0x9c, ['unsigned long']],
'KcbUserFlags' : [ 0xa0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'KcbVirtControlFlags' : [ 0xa0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'KcbDebug' : [ 0xa0, ['BitField', dict(start_bit = 8, end_bit = 16, native_type='unsigned long')]],
'Flags' : [ 0xa0, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'RealKeyName' : [ 0xa8, ['pointer64', ['unsigned char']]],
} ],
'_M128A' : [ 0x10, {
'Low' : [ 0x0, ['unsigned long long']],
'High' : [ 0x8, ['long long']],
} ],
'_PCI_BUS_INTERFACE_STANDARD' : [ 0x40, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'ReadConfig' : [ 0x20, ['pointer64', ['void']]],
'WriteConfig' : [ 0x28, ['pointer64', ['void']]],
'PinToLine' : [ 0x30, ['pointer64', ['void']]],
'LineToPin' : [ 0x38, ['pointer64', ['void']]],
} ],
'_WORK_QUEUE_ITEM' : [ 0x20, {
'List' : [ 0x0, ['_LIST_ENTRY']],
'WorkerRoutine' : [ 0x10, ['pointer64', ['void']]],
'Parameter' : [ 0x18, ['pointer64', ['void']]],
} ],
'_PI_RESOURCE_ARBITER_ENTRY' : [ 0x70, {
'DeviceArbiterList' : [ 0x0, ['_LIST_ENTRY']],
'ResourceType' : [ 0x10, ['unsigned char']],
'ArbiterInterface' : [ 0x18, ['pointer64', ['_ARBITER_INTERFACE']]],
'Level' : [ 0x20, ['unsigned long']],
'ResourceList' : [ 0x28, ['_LIST_ENTRY']],
'BestResourceList' : [ 0x38, ['_LIST_ENTRY']],
'BestConfig' : [ 0x48, ['_LIST_ENTRY']],
'ActiveArbiterList' : [ 0x58, ['_LIST_ENTRY']],
'State' : [ 0x68, ['unsigned char']],
'ResourcesChanged' : [ 0x69, ['unsigned char']],
} ],
'_SEP_AUDIT_POLICY_CATEGORIES' : [ 0x8, {
'System' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
'Logon' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 8, native_type='unsigned long')]],
'ObjectAccess' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 12, native_type='unsigned long')]],
'PrivilegeUse' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 16, native_type='unsigned long')]],
'DetailedTracking' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'PolicyChange' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 24, native_type='unsigned long')]],
'AccountManagement' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 28, native_type='unsigned long')]],
'DirectoryServiceAccess' : [ 0x0, ['BitField', dict(start_bit = 28, end_bit = 32, native_type='unsigned long')]],
'AccountLogon' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 4, native_type='unsigned long')]],
} ],
'_CM_KEY_HASH' : [ 0x20, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_KEY_HASH']]],
'KeyHive' : [ 0x10, ['pointer64', ['_HHIVE']]],
'KeyCell' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1726' : [ 0x8, {
'MasterIrp' : [ 0x0, ['pointer64', ['_IRP']]],
'IrpCount' : [ 0x0, ['long']],
'SystemBuffer' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_172b' : [ 0x10, {
'UserApcRoutine' : [ 0x0, ['pointer64', ['void']]],
'UserApcContext' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_172d' : [ 0x10, {
'AsynchronousParameters' : [ 0x0, ['__unnamed_172b']],
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
} ],
'__unnamed_1735' : [ 0x50, {
'DeviceQueueEntry' : [ 0x0, ['_KDEVICE_QUEUE_ENTRY']],
'DriverContext' : [ 0x0, ['array', 4, ['pointer64', ['void']]]],
'Thread' : [ 0x20, ['pointer64', ['_ETHREAD']]],
'AuxiliaryBuffer' : [ 0x28, ['pointer64', ['unsigned char']]],
'ListEntry' : [ 0x30, ['_LIST_ENTRY']],
'CurrentStackLocation' : [ 0x40, ['pointer64', ['_IO_STACK_LOCATION']]],
'PacketType' : [ 0x40, ['unsigned long']],
'OriginalFileObject' : [ 0x48, ['pointer64', ['_FILE_OBJECT']]],
} ],
'__unnamed_1737' : [ 0x58, {
'Overlay' : [ 0x0, ['__unnamed_1735']],
'Apc' : [ 0x0, ['_KAPC']],
'CompletionKey' : [ 0x0, ['pointer64', ['void']]],
} ],
'_IRP' : [ 0xd0, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'MdlAddress' : [ 0x8, ['pointer64', ['_MDL']]],
'Flags' : [ 0x10, ['unsigned long']],
'AssociatedIrp' : [ 0x18, ['__unnamed_1726']],
'ThreadListEntry' : [ 0x20, ['_LIST_ENTRY']],
'IoStatus' : [ 0x30, ['_IO_STATUS_BLOCK']],
'RequestorMode' : [ 0x40, ['unsigned char']],
'PendingReturned' : [ 0x41, ['unsigned char']],
'StackCount' : [ 0x42, ['unsigned char']],
'CurrentLocation' : [ 0x43, ['unsigned char']],
'Cancel' : [ 0x44, ['unsigned char']],
'CancelIrql' : [ 0x45, ['unsigned char']],
'ApcEnvironment' : [ 0x46, ['unsigned char']],
'AllocationFlags' : [ 0x47, ['unsigned char']],
'UserIosb' : [ 0x48, ['pointer64', ['_IO_STATUS_BLOCK']]],
'UserEvent' : [ 0x50, ['pointer64', ['_KEVENT']]],
'Overlay' : [ 0x58, ['__unnamed_172d']],
'CancelRoutine' : [ 0x68, ['pointer64', ['void']]],
'UserBuffer' : [ 0x70, ['pointer64', ['void']]],
'Tail' : [ 0x78, ['__unnamed_1737']],
} ],
'_PCI_LOCK' : [ 0x10, {
'Atom' : [ 0x0, ['unsigned long long']],
'OldIrql' : [ 0x8, ['unsigned char']],
} ],
'_CM_KEY_SECURITY_CACHE_ENTRY' : [ 0x10, {
'Cell' : [ 0x0, ['unsigned long']],
'CachedSecurity' : [ 0x8, ['pointer64', ['_CM_KEY_SECURITY_CACHE']]],
} ],
'_GDI_TEB_BATCH32' : [ 0x4e0, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x4, ['unsigned long']],
'Buffer' : [ 0x8, ['array', 310, ['unsigned long']]],
} ],
'__unnamed_1744' : [ 0x4, {
'PhysicalAddress' : [ 0x0, ['unsigned long']],
'VirtualSize' : [ 0x0, ['unsigned long']],
} ],
'_IMAGE_SECTION_HEADER' : [ 0x28, {
'Name' : [ 0x0, ['array', 8, ['unsigned char']]],
'Misc' : [ 0x8, ['__unnamed_1744']],
'VirtualAddress' : [ 0xc, ['unsigned long']],
'SizeOfRawData' : [ 0x10, ['unsigned long']],
'PointerToRawData' : [ 0x14, ['unsigned long']],
'PointerToRelocations' : [ 0x18, ['unsigned long']],
'PointerToLinenumbers' : [ 0x1c, ['unsigned long']],
'NumberOfRelocations' : [ 0x20, ['unsigned short']],
'NumberOfLinenumbers' : [ 0x22, ['unsigned short']],
'Characteristics' : [ 0x24, ['unsigned long']],
} ],
'__unnamed_174a' : [ 0x4, {
'Level' : [ 0x0, ['unsigned long']],
} ],
'_POP_ACTION_TRIGGER' : [ 0x10, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PolicyDeviceSystemButton', 1: 'PolicyDeviceThermalZone', 2: 'PolicyDeviceBattery', 3: 'PolicyInitiatePowerActionAPI', 4: 'PolicySetPowerStateAPI', 5: 'PolicyImmediateDozeS4', 6: 'PolicySystemIdle'})]],
'Flags' : [ 0x4, ['unsigned char']],
'Spare' : [ 0x5, ['array', 3, ['unsigned char']]],
'Battery' : [ 0x8, ['__unnamed_174a']],
'Wait' : [ 0x8, ['pointer64', ['_POP_TRIGGER_WAIT']]],
} ],
'_ETIMER' : [ 0x108, {
'KeTimer' : [ 0x0, ['_KTIMER']],
'TimerApc' : [ 0x40, ['_KAPC']],
'TimerDpc' : [ 0x98, ['_KDPC']],
'ActiveTimerListEntry' : [ 0xd8, ['_LIST_ENTRY']],
'Lock' : [ 0xe8, ['unsigned long long']],
'Period' : [ 0xf0, ['long']],
'ApcAssociated' : [ 0xf4, ['unsigned char']],
'WakeTimer' : [ 0xf5, ['unsigned char']],
'WakeTimerListEntry' : [ 0xf8, ['_LIST_ENTRY']],
} ],
'_DBGKD_BREAKPOINTEX' : [ 0x8, {
'BreakPointCount' : [ 0x0, ['unsigned long']],
'ContinueStatus' : [ 0x4, ['long']],
} ],
'_IMAGE_OPTIONAL_HEADER64' : [ 0xf0, {
'Magic' : [ 0x0, ['unsigned short']],
'MajorLinkerVersion' : [ 0x2, ['unsigned char']],
'MinorLinkerVersion' : [ 0x3, ['unsigned char']],
'SizeOfCode' : [ 0x4, ['unsigned long']],
'SizeOfInitializedData' : [ 0x8, ['unsigned long']],
'SizeOfUninitializedData' : [ 0xc, ['unsigned long']],
'AddressOfEntryPoint' : [ 0x10, ['unsigned long']],
'BaseOfCode' : [ 0x14, ['unsigned long']],
'ImageBase' : [ 0x18, ['unsigned long long']],
'SectionAlignment' : [ 0x20, ['unsigned long']],
'FileAlignment' : [ 0x24, ['unsigned long']],
'MajorOperatingSystemVersion' : [ 0x28, ['unsigned short']],
'MinorOperatingSystemVersion' : [ 0x2a, ['unsigned short']],
'MajorImageVersion' : [ 0x2c, ['unsigned short']],
'MinorImageVersion' : [ 0x2e, ['unsigned short']],
'MajorSubsystemVersion' : [ 0x30, ['unsigned short']],
'MinorSubsystemVersion' : [ 0x32, ['unsigned short']],
'Win32VersionValue' : [ 0x34, ['unsigned long']],
'SizeOfImage' : [ 0x38, ['unsigned long']],
'SizeOfHeaders' : [ 0x3c, ['unsigned long']],
'CheckSum' : [ 0x40, ['unsigned long']],
'Subsystem' : [ 0x44, ['unsigned short']],
'DllCharacteristics' : [ 0x46, ['unsigned short']],
'SizeOfStackReserve' : [ 0x48, ['unsigned long long']],
'SizeOfStackCommit' : [ 0x50, ['unsigned long long']],
'SizeOfHeapReserve' : [ 0x58, ['unsigned long long']],
'SizeOfHeapCommit' : [ 0x60, ['unsigned long long']],
'LoaderFlags' : [ 0x68, ['unsigned long']],
'NumberOfRvaAndSizes' : [ 0x6c, ['unsigned long']],
'DataDirectory' : [ 0x70, ['array', 16, ['_IMAGE_DATA_DIRECTORY']]],
} ],
'_CM_CELL_REMAP_BLOCK' : [ 0x8, {
'OldCell' : [ 0x0, ['unsigned long']],
'NewCell' : [ 0x4, ['unsigned long']],
} ],
'_PCI_PMC' : [ 0x2, {
'Version' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 3, native_type='unsigned char')]],
'PMEClock' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'Rsvd1' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'DeviceSpecificInitialization' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'Rsvd2' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 8, native_type='unsigned char')]],
'Support' : [ 0x1, ['_PM_SUPPORT']],
} ],
'_DBGKD_CONTINUE' : [ 0x4, {
'ContinueStatus' : [ 0x0, ['long']],
} ],
'__unnamed_1764' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'Long' : [ 0x0, ['unsigned long long']],
'e1' : [ 0x0, ['_MMWSLENTRY']],
} ],
'_MMWSLE' : [ 0x8, {
'u1' : [ 0x0, ['__unnamed_1764']],
} ],
'_EXCEPTION_POINTERS' : [ 0x10, {
'ExceptionRecord' : [ 0x0, ['pointer64', ['_EXCEPTION_RECORD']]],
'ContextRecord' : [ 0x8, ['pointer64', ['_CONTEXT']]],
} ],
'__unnamed_176c' : [ 0x8, {
'Balance' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 2, native_type='long long')]],
'Parent' : [ 0x0, ['pointer64', ['_MMADDRESS_NODE']]],
} ],
'_MMADDRESS_NODE' : [ 0x28, {
'u1' : [ 0x0, ['__unnamed_176c']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMADDRESS_NODE']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMADDRESS_NODE']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
} ],
'_RTL_USER_PROCESS_PARAMETERS' : [ 0x3f0, {
'MaximumLength' : [ 0x0, ['unsigned long']],
'Length' : [ 0x4, ['unsigned long']],
'Flags' : [ 0x8, ['unsigned long']],
'DebugFlags' : [ 0xc, ['unsigned long']],
'ConsoleHandle' : [ 0x10, ['pointer64', ['void']]],
'ConsoleFlags' : [ 0x18, ['unsigned long']],
'StandardInput' : [ 0x20, ['pointer64', ['void']]],
'StandardOutput' : [ 0x28, ['pointer64', ['void']]],
'StandardError' : [ 0x30, ['pointer64', ['void']]],
'CurrentDirectory' : [ 0x38, ['_CURDIR']],
'DllPath' : [ 0x50, ['_UNICODE_STRING']],
'ImagePathName' : [ 0x60, ['_UNICODE_STRING']],
'CommandLine' : [ 0x70, ['_UNICODE_STRING']],
'Environment' : [ 0x80, ['pointer64', ['void']]],
'StartingX' : [ 0x88, ['unsigned long']],
'StartingY' : [ 0x8c, ['unsigned long']],
'CountX' : [ 0x90, ['unsigned long']],
'CountY' : [ 0x94, ['unsigned long']],
'CountCharsX' : [ 0x98, ['unsigned long']],
'CountCharsY' : [ 0x9c, ['unsigned long']],
'FillAttribute' : [ 0xa0, ['unsigned long']],
'WindowFlags' : [ 0xa4, ['unsigned long']],
'ShowWindowFlags' : [ 0xa8, ['unsigned long']],
'WindowTitle' : [ 0xb0, ['_UNICODE_STRING']],
'DesktopInfo' : [ 0xc0, ['_UNICODE_STRING']],
'ShellInfo' : [ 0xd0, ['_UNICODE_STRING']],
'RuntimeData' : [ 0xe0, ['_UNICODE_STRING']],
'CurrentDirectores' : [ 0xf0, ['array', 32, ['_RTL_DRIVE_LETTER_CURDIR']]],
} ],
'_CACHE_MANAGER_CALLBACKS' : [ 0x20, {
'AcquireForLazyWrite' : [ 0x0, ['pointer64', ['void']]],
'ReleaseFromLazyWrite' : [ 0x8, ['pointer64', ['void']]],
'AcquireForReadAhead' : [ 0x10, ['pointer64', ['void']]],
'ReleaseFromReadAhead' : [ 0x18, ['pointer64', ['void']]],
} ],
'_KSPECIAL_REGISTERS' : [ 0xd8, {
'Cr0' : [ 0x0, ['unsigned long long']],
'Cr2' : [ 0x8, ['unsigned long long']],
'Cr3' : [ 0x10, ['unsigned long long']],
'Cr4' : [ 0x18, ['unsigned long long']],
'KernelDr0' : [ 0x20, ['unsigned long long']],
'KernelDr1' : [ 0x28, ['unsigned long long']],
'KernelDr2' : [ 0x30, ['unsigned long long']],
'KernelDr3' : [ 0x38, ['unsigned long long']],
'KernelDr6' : [ 0x40, ['unsigned long long']],
'KernelDr7' : [ 0x48, ['unsigned long long']],
'Gdtr' : [ 0x50, ['_KDESCRIPTOR']],
'Idtr' : [ 0x60, ['_KDESCRIPTOR']],
'Tr' : [ 0x70, ['unsigned short']],
'Ldtr' : [ 0x72, ['unsigned short']],
'MxCsr' : [ 0x74, ['unsigned long']],
'DebugControl' : [ 0x78, ['unsigned long long']],
'LastBranchToRip' : [ 0x80, ['unsigned long long']],
'LastBranchFromRip' : [ 0x88, ['unsigned long long']],
'LastExceptionToRip' : [ 0x90, ['unsigned long long']],
'LastExceptionFromRip' : [ 0x98, ['unsigned long long']],
'Cr8' : [ 0xa0, ['unsigned long long']],
'MsrGsBase' : [ 0xa8, ['unsigned long long']],
'MsrGsSwap' : [ 0xb0, ['unsigned long long']],
'MsrStar' : [ 0xb8, ['unsigned long long']],
'MsrLStar' : [ 0xc0, ['unsigned long long']],
'MsrCStar' : [ 0xc8, ['unsigned long long']],
'MsrSyscallMask' : [ 0xd0, ['unsigned long long']],
} ],
'_CELL_DATA' : [ 0x50, {
'u' : [ 0x0, ['_u']],
} ],
'_SE_AUDIT_PROCESS_CREATION_INFO' : [ 0x8, {
'ImageFileName' : [ 0x0, ['pointer64', ['_OBJECT_NAME_INFORMATION']]],
} ],
'_HEAP_ENTRY_EXTRA' : [ 0x10, {
'AllocatorBackTraceIndex' : [ 0x0, ['unsigned short']],
'TagIndex' : [ 0x2, ['unsigned short']],
'Settable' : [ 0x8, ['unsigned long long']],
'ZeroInit' : [ 0x0, ['unsigned long long']],
'ZeroInit1' : [ 0x8, ['unsigned long long']],
} ],
'_VI_DEADLOCK_RESOURCE' : [ 0xf8, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'VfDeadlockUnknown', 1: 'VfDeadlockMutex', 2: 'VfDeadlockMutexAbandoned', 3: 'VfDeadlockFastMutex', 4: 'VfDeadlockFastMutexUnsafe', 5: 'VfDeadlockSpinLock', 6: 'VfDeadlockQueuedSpinLock', 7: 'VfDeadlockTypeMaximum'})]],
'NodeCount' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'RecursionCount' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 32, native_type='unsigned long')]],
'ResourceAddress' : [ 0x8, ['pointer64', ['void']]],
'ThreadOwner' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_THREAD']]],
'ResourceList' : [ 0x18, ['_LIST_ENTRY']],
'HashChainList' : [ 0x28, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x28, ['_LIST_ENTRY']],
'StackTrace' : [ 0x38, ['array', 8, ['pointer64', ['void']]]],
'LastAcquireTrace' : [ 0x78, ['array', 8, ['pointer64', ['void']]]],
'LastReleaseTrace' : [ 0xb8, ['array', 8, ['pointer64', ['void']]]],
} ],
'_OBJECT_HANDLE_COUNT_ENTRY' : [ 0x10, {
'Process' : [ 0x0, ['pointer64', ['_EPROCESS']]],
'HandleCount' : [ 0x8, ['unsigned long']],
} ],
'_CLIENT_ID' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['pointer64', ['void']]],
'UniqueThread' : [ 0x8, ['pointer64', ['void']]],
} ],
'_PEB_FREE_BLOCK' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['_PEB_FREE_BLOCK']]],
'Size' : [ 0x8, ['unsigned long']],
} ],
'_PO_DEVICE_NOTIFY' : [ 0x48, {
'Link' : [ 0x0, ['_LIST_ENTRY']],
'TargetDevice' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'WakeNeeded' : [ 0x18, ['unsigned char']],
'OrderLevel' : [ 0x19, ['unsigned char']],
'DeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'Node' : [ 0x28, ['pointer64', ['void']]],
'DeviceName' : [ 0x30, ['pointer64', ['unsigned short']]],
'DriverName' : [ 0x38, ['pointer64', ['unsigned short']]],
'ChildCount' : [ 0x40, ['unsigned long']],
'ActiveChild' : [ 0x44, ['unsigned long']],
} ],
'_MMPFNLIST' : [ 0x20, {
'Total' : [ 0x0, ['unsigned long long']],
'ListName' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'ZeroedPageList', 1: 'FreePageList', 2: 'StandbyPageList', 3: 'ModifiedPageList', 4: 'ModifiedNoWritePageList', 5: 'BadPageList', 6: 'ActiveAndValid', 7: 'TransitionPage'})]],
'Flink' : [ 0x10, ['unsigned long long']],
'Blink' : [ 0x18, ['unsigned long long']],
} ],
'__unnamed_1795' : [ 0x4, {
'Spare' : [ 0x0, ['array', 4, ['unsigned char']]],
} ],
'__unnamed_1797' : [ 0x4, {
'PrimaryBus' : [ 0x0, ['unsigned char']],
'SecondaryBus' : [ 0x1, ['unsigned char']],
'SubordinateBus' : [ 0x2, ['unsigned char']],
'SubtractiveDecode' : [ 0x3, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'IsaBitSet' : [ 0x3, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'VgaBitSet' : [ 0x3, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'WeChangedBusNumbers' : [ 0x3, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'IsaBitRequired' : [ 0x3, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
} ],
'PCI_HEADER_TYPE_DEPENDENT' : [ 0x4, {
'type0' : [ 0x0, ['__unnamed_1795']],
'type1' : [ 0x0, ['__unnamed_1797']],
'type2' : [ 0x0, ['__unnamed_1797']],
} ],
'_DBGKD_GET_SET_BUS_DATA' : [ 0x14, {
'BusDataType' : [ 0x0, ['unsigned long']],
'BusNumber' : [ 0x4, ['unsigned long']],
'SlotNumber' : [ 0x8, ['unsigned long']],
'Offset' : [ 0xc, ['unsigned long']],
'Length' : [ 0x10, ['unsigned long']],
} ],
'_KINTERRUPT' : [ 0x80, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'InterruptListEntry' : [ 0x8, ['_LIST_ENTRY']],
'ServiceRoutine' : [ 0x18, ['pointer64', ['void']]],
'ServiceContext' : [ 0x20, ['pointer64', ['void']]],
'SpinLock' : [ 0x28, ['unsigned long long']],
'TickCount' : [ 0x30, ['unsigned long']],
'ActualLock' : [ 0x38, ['pointer64', ['unsigned long long']]],
'DispatchAddress' : [ 0x40, ['pointer64', ['void']]],
'Vector' : [ 0x48, ['unsigned long']],
'Irql' : [ 0x4c, ['unsigned char']],
'SynchronizeIrql' : [ 0x4d, ['unsigned char']],
'FloatingSave' : [ 0x4e, ['unsigned char']],
'Connected' : [ 0x4f, ['unsigned char']],
'Number' : [ 0x50, ['unsigned char']],
'ShareVector' : [ 0x51, ['unsigned char']],
'Mode' : [ 0x54, ['Enumeration', dict(target = 'long', choices = {0: 'LevelSensitive', 1: 'Latched'})]],
'ServiceCount' : [ 0x58, ['unsigned long']],
'DispatchCount' : [ 0x5c, ['unsigned long']],
'TrapFrame' : [ 0x60, ['pointer64', ['_KTRAP_FRAME']]],
'Reserved' : [ 0x68, ['pointer64', ['void']]],
'DispatchCode' : [ 0x70, ['array', 4, ['unsigned long']]],
} ],
'_SECURITY_CLIENT_CONTEXT' : [ 0x48, {
'SecurityQos' : [ 0x0, ['_SECURITY_QUALITY_OF_SERVICE']],
'ClientToken' : [ 0x10, ['pointer64', ['void']]],
'DirectlyAccessClientToken' : [ 0x18, ['unsigned char']],
'DirectAccessEffectiveOnly' : [ 0x19, ['unsigned char']],
'ServerIsRemote' : [ 0x1a, ['unsigned char']],
'ClientTokenControl' : [ 0x1c, ['_TOKEN_CONTROL']],
} ],
'_BITMAP_RANGE' : [ 0x30, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'BasePage' : [ 0x10, ['long long']],
'FirstDirtyPage' : [ 0x18, ['unsigned long']],
'LastDirtyPage' : [ 0x1c, ['unsigned long']],
'DirtyPages' : [ 0x20, ['unsigned long']],
'Bitmap' : [ 0x28, ['pointer64', ['unsigned long']]],
} ],
'_PCI_ARBITER_INSTANCE' : [ 0x190, {
'Header' : [ 0x0, ['PCI_SECONDARY_EXTENSION']],
'Interface' : [ 0x18, ['pointer64', ['_PCI_INTERFACE']]],
'BusFdoExtension' : [ 0x20, ['pointer64', ['_PCI_FDO_EXTENSION']]],
'InstanceName' : [ 0x28, ['array', 24, ['unsigned short']]],
'CommonInstance' : [ 0x58, ['_ARBITER_INSTANCE']],
} ],
'_NT_TIB64' : [ 0x38, {
'ExceptionList' : [ 0x0, ['unsigned long long']],
'StackBase' : [ 0x8, ['unsigned long long']],
'StackLimit' : [ 0x10, ['unsigned long long']],
'SubSystemTib' : [ 0x18, ['unsigned long long']],
'FiberData' : [ 0x20, ['unsigned long long']],
'Version' : [ 0x20, ['unsigned long']],
'ArbitraryUserPointer' : [ 0x28, ['unsigned long long']],
'Self' : [ 0x30, ['unsigned long long']],
} ],
'_HANDLE_TRACE_DB_ENTRY' : [ 0xa0, {
'ClientId' : [ 0x0, ['_CLIENT_ID']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
'Type' : [ 0x18, ['unsigned long']],
'StackTrace' : [ 0x20, ['array', 16, ['pointer64', ['void']]]],
} ],
'_BUS_EXTENSION_LIST' : [ 0x10, {
'Next' : [ 0x0, ['pointer64', ['void']]],
'BusExtension' : [ 0x8, ['pointer64', ['_PI_BUS_EXTENSION']]],
} ],
'_PCI_MJ_DISPATCH_TABLE' : [ 0x40, {
'PnpIrpMaximumMinorFunction' : [ 0x0, ['unsigned long']],
'PnpIrpDispatchTable' : [ 0x8, ['pointer64', ['_PCI_MN_DISPATCH_TABLE']]],
'PowerIrpMaximumMinorFunction' : [ 0x10, ['unsigned long']],
'PowerIrpDispatchTable' : [ 0x18, ['pointer64', ['_PCI_MN_DISPATCH_TABLE']]],
'SystemControlIrpDispatchStyle' : [ 0x20, ['Enumeration', dict(target = 'long', choices = {0: 'IRP_COMPLETE', 1: 'IRP_DOWNWARD', 2: 'IRP_UPWARD', 3: 'IRP_DISPATCH'})]],
'SystemControlIrpDispatchFunction' : [ 0x28, ['pointer64', ['void']]],
'OtherIrpDispatchStyle' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'IRP_COMPLETE', 1: 'IRP_DOWNWARD', 2: 'IRP_UPWARD', 3: 'IRP_DISPATCH'})]],
'OtherIrpDispatchFunction' : [ 0x38, ['pointer64', ['void']]],
} ],
'_POP_TRIGGER_WAIT' : [ 0x38, {
'Event' : [ 0x0, ['_KEVENT']],
'Status' : [ 0x18, ['long']],
'Link' : [ 0x20, ['_LIST_ENTRY']],
'Trigger' : [ 0x30, ['pointer64', ['_POP_ACTION_TRIGGER']]],
} ],
'_IO_TIMER' : [ 0x30, {
'Type' : [ 0x0, ['short']],
'TimerFlag' : [ 0x2, ['short']],
'TimerList' : [ 0x8, ['_LIST_ENTRY']],
'TimerRoutine' : [ 0x18, ['pointer64', ['void']]],
'Context' : [ 0x20, ['pointer64', ['void']]],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'_MMWSLENTRY' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'LockedInWs' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'LockedInMemory' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 8, native_type='unsigned long long')]],
'Hashed' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long long')]],
'Direct' : [ 0x0, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long long')]],
'Age' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 12, native_type='unsigned long long')]],
'VirtualPageNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 64, native_type='unsigned long long')]],
} ],
'__unnamed_17da' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['unsigned char']],
'Flags1' : [ 0x1, ['unsigned char']],
'Flags2' : [ 0x2, ['unsigned char']],
'BaseHigh' : [ 0x3, ['unsigned char']],
} ],
'__unnamed_17de' : [ 0x4, {
'BaseMiddle' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 8, native_type='unsigned long')]],
'Type' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 13, native_type='unsigned long')]],
'Dpl' : [ 0x0, ['BitField', dict(start_bit = 13, end_bit = 15, native_type='unsigned long')]],
'Present' : [ 0x0, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'LimitHigh' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'System' : [ 0x0, ['BitField', dict(start_bit = 20, end_bit = 21, native_type='unsigned long')]],
'LongMode' : [ 0x0, ['BitField', dict(start_bit = 21, end_bit = 22, native_type='unsigned long')]],
'DefaultBig' : [ 0x0, ['BitField', dict(start_bit = 22, end_bit = 23, native_type='unsigned long')]],
'Granularity' : [ 0x0, ['BitField', dict(start_bit = 23, end_bit = 24, native_type='unsigned long')]],
'BaseHigh' : [ 0x0, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
} ],
'_KGDTENTRY64' : [ 0x10, {
'LimitLow' : [ 0x0, ['unsigned short']],
'BaseLow' : [ 0x2, ['unsigned short']],
'Bytes' : [ 0x4, ['__unnamed_17da']],
'Bits' : [ 0x4, ['__unnamed_17de']],
'BaseUpper' : [ 0x8, ['unsigned long']],
'MustBeZero' : [ 0xc, ['unsigned long']],
'Alignment' : [ 0x0, ['unsigned long long']],
} ],
'_OBJECT_DIRECTORY' : [ 0x140, {
'HashBuckets' : [ 0x0, ['array', 37, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]]],
'Lock' : [ 0x128, ['_EX_PUSH_LOCK']],
'DeviceMap' : [ 0x130, ['pointer64', ['_DEVICE_MAP']]],
'SessionId' : [ 0x138, ['unsigned long']],
} ],
'_WMI_CLIENT_CONTEXT' : [ 0x4, {
'ProcessorNumber' : [ 0x0, ['unsigned char']],
'Alignment' : [ 0x1, ['unsigned char']],
'LoggerId' : [ 0x2, ['unsigned short']],
} ],
'_HEAP_LOOKASIDE' : [ 0x40, {
'ListHead' : [ 0x0, ['_SLIST_HEADER']],
'Depth' : [ 0x10, ['unsigned short']],
'MaximumDepth' : [ 0x12, ['unsigned short']],
'TotalAllocates' : [ 0x14, ['unsigned long']],
'AllocateMisses' : [ 0x18, ['unsigned long']],
'TotalFrees' : [ 0x1c, ['unsigned long']],
'FreeMisses' : [ 0x20, ['unsigned long']],
'LastTotalAllocates' : [ 0x24, ['unsigned long']],
'LastAllocateMisses' : [ 0x28, ['unsigned long']],
'Counters' : [ 0x2c, ['array', 2, ['unsigned long']]],
} ],
'_CLIENT_ID64' : [ 0x10, {
'UniqueProcess' : [ 0x0, ['unsigned long long']],
'UniqueThread' : [ 0x8, ['unsigned long long']],
} ],
'_KDPC_DATA' : [ 0x20, {
'DpcListHead' : [ 0x0, ['_LIST_ENTRY']],
'DpcLock' : [ 0x10, ['unsigned long long']],
'DpcQueueDepth' : [ 0x18, ['long']],
'DpcCount' : [ 0x1c, ['unsigned long']],
} ],
'_ARBITER_INTERFACE' : [ 0x30, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
'ArbiterHandler' : [ 0x20, ['pointer64', ['void']]],
'Flags' : [ 0x28, ['unsigned long']],
} ],
'_AMD64_DBGKD_CONTROL_SET' : [ 0x1c, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long long']],
'CurrentSymbolStart' : [ 0xc, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0x14, ['unsigned long long']],
} ],
'_CALL_PERFORMANCE_DATA' : [ 0x408, {
'SpinLock' : [ 0x0, ['unsigned long long']],
'HashTable' : [ 0x8, ['array', 64, ['_LIST_ENTRY']]],
} ],
'_MMWSL' : [ 0x80, {
'FirstFree' : [ 0x0, ['unsigned long']],
'FirstDynamic' : [ 0x4, ['unsigned long']],
'LastEntry' : [ 0x8, ['unsigned long']],
'NextSlot' : [ 0xc, ['unsigned long']],
'Wsle' : [ 0x10, ['pointer64', ['_MMWSLE']]],
'LastInitializedWsle' : [ 0x18, ['unsigned long']],
'NonDirectCount' : [ 0x1c, ['unsigned long']],
'HashTable' : [ 0x20, ['pointer64', ['_MMWSLE_HASH']]],
'HashTableSize' : [ 0x28, ['unsigned long']],
'NumberOfCommittedPageTables' : [ 0x2c, ['unsigned long']],
'HashTableStart' : [ 0x30, ['pointer64', ['void']]],
'HighestPermittedHashAddress' : [ 0x38, ['pointer64', ['void']]],
'NumberOfImageWaiters' : [ 0x40, ['unsigned long']],
'VadBitMapHint' : [ 0x44, ['unsigned long']],
'HighestUserAddress' : [ 0x48, ['pointer64', ['void']]],
'MaximumUserPageTablePages' : [ 0x50, ['unsigned long']],
'MaximumUserPageDirectoryPages' : [ 0x54, ['unsigned long']],
'CommittedPageTables' : [ 0x58, ['pointer64', ['unsigned long']]],
'NumberOfCommittedPageDirectories' : [ 0x60, ['unsigned long']],
'CommittedPageDirectories' : [ 0x68, ['pointer64', ['unsigned long']]],
'NumberOfCommittedPageDirectoryParents' : [ 0x70, ['unsigned long']],
'CommittedPageDirectoryParents' : [ 0x78, ['array', 1, ['unsigned long long']]],
} ],
'_ACTIVATION_CONTEXT_STACK' : [ 0x28, {
'ActiveFrame' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'FrameListCache' : [ 0x8, ['_LIST_ENTRY']],
'Flags' : [ 0x18, ['unsigned long']],
'NextCookieSequenceNumber' : [ 0x1c, ['unsigned long']],
'StackId' : [ 0x20, ['unsigned long']],
} ],
'_RTL_DRIVE_LETTER_CURDIR' : [ 0x18, {
'Flags' : [ 0x0, ['unsigned short']],
'Length' : [ 0x2, ['unsigned short']],
'TimeStamp' : [ 0x4, ['unsigned long']],
'DosPath' : [ 0x8, ['_STRING']],
} ],
'PCI_FUNCTION_RESOURCES' : [ 0x170, {
'Limit' : [ 0x0, ['array', 7, ['_IO_RESOURCE_DESCRIPTOR']]],
'Current' : [ 0xe0, ['array', 7, ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'_WNODE_HEADER' : [ 0x30, {
'BufferSize' : [ 0x0, ['unsigned long']],
'ProviderId' : [ 0x4, ['unsigned long']],
'HistoricalContext' : [ 0x8, ['unsigned long long']],
'Version' : [ 0x8, ['unsigned long']],
'Linkage' : [ 0xc, ['unsigned long']],
'CountLost' : [ 0x10, ['unsigned long']],
'KernelHandle' : [ 0x10, ['pointer64', ['void']]],
'TimeStamp' : [ 0x10, ['_LARGE_INTEGER']],
'Guid' : [ 0x18, ['_GUID']],
'ClientContext' : [ 0x28, ['unsigned long']],
'Flags' : [ 0x2c, ['unsigned long']],
} ],
'__unnamed_1811' : [ 0x8, {
'ImageCommitment' : [ 0x0, ['unsigned long long']],
'CreatingProcess' : [ 0x0, ['pointer64', ['_EPROCESS']]],
} ],
'__unnamed_1815' : [ 0x8, {
'ImageInformation' : [ 0x0, ['pointer64', ['_SECTION_IMAGE_INFORMATION']]],
'FirstMappedVa' : [ 0x0, ['pointer64', ['void']]],
} ],
'_SEGMENT' : [ 0x68, {
'ControlArea' : [ 0x0, ['pointer64', ['_CONTROL_AREA']]],
'TotalNumberOfPtes' : [ 0x8, ['unsigned long']],
'NonExtendedPtes' : [ 0xc, ['unsigned long']],
'Spare0' : [ 0x10, ['unsigned long']],
'SizeOfSegment' : [ 0x18, ['unsigned long long']],
'SegmentPteTemplate' : [ 0x20, ['_MMPTE']],
'NumberOfCommittedPages' : [ 0x28, ['unsigned long long']],
'ExtendInfo' : [ 0x30, ['pointer64', ['_MMEXTEND_INFO']]],
'SegmentFlags' : [ 0x38, ['_SEGMENT_FLAGS']],
'BasedAddress' : [ 0x40, ['pointer64', ['void']]],
'u1' : [ 0x48, ['__unnamed_1811']],
'u2' : [ 0x50, ['__unnamed_1815']],
'PrototypePte' : [ 0x58, ['pointer64', ['_MMPTE']]],
'ThePtes' : [ 0x60, ['array', 1, ['_MMPTE']]],
} ],
'_PCI_COMMON_EXTENSION' : [ 0x38, {
'Next' : [ 0x0, ['pointer64', ['void']]],
'ExtensionType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_Location', 1768116287: 'PciInterface_AgpTarget'})]],
'IrpDispatchTable' : [ 0x10, ['pointer64', ['_PCI_MJ_DISPATCH_TABLE']]],
'DeviceState' : [ 0x18, ['unsigned char']],
'TentativeNextState' : [ 0x19, ['unsigned char']],
'SecondaryExtLock' : [ 0x20, ['_KEVENT']],
} ],
'_MI_VERIFIER_DRIVER_ENTRY' : [ 0xa0, {
'Links' : [ 0x0, ['_LIST_ENTRY']],
'Loads' : [ 0x10, ['unsigned long']],
'Unloads' : [ 0x14, ['unsigned long']],
'BaseName' : [ 0x18, ['_UNICODE_STRING']],
'StartAddress' : [ 0x28, ['pointer64', ['void']]],
'EndAddress' : [ 0x30, ['pointer64', ['void']]],
'Flags' : [ 0x38, ['unsigned long']],
'Signature' : [ 0x40, ['unsigned long long']],
'PoolPageHeaders' : [ 0x50, ['_SLIST_HEADER']],
'PoolTrackers' : [ 0x60, ['_SLIST_HEADER']],
'CurrentPagedPoolAllocations' : [ 0x70, ['unsigned long']],
'CurrentNonPagedPoolAllocations' : [ 0x74, ['unsigned long']],
'PeakPagedPoolAllocations' : [ 0x78, ['unsigned long']],
'PeakNonPagedPoolAllocations' : [ 0x7c, ['unsigned long']],
'PagedBytes' : [ 0x80, ['unsigned long long']],
'NonPagedBytes' : [ 0x88, ['unsigned long long']],
'PeakPagedBytes' : [ 0x90, ['unsigned long long']],
'PeakNonPagedBytes' : [ 0x98, ['unsigned long long']],
} ],
'_PRIVATE_CACHE_MAP' : [ 0x60, {
'NodeTypeCode' : [ 0x0, ['short']],
'Flags' : [ 0x0, ['_PRIVATE_CACHE_MAP_FLAGS']],
'UlongFlags' : [ 0x0, ['unsigned long']],
'ReadAheadMask' : [ 0x4, ['unsigned long']],
'FileObject' : [ 0x8, ['pointer64', ['_FILE_OBJECT']]],
'FileOffset1' : [ 0x10, ['_LARGE_INTEGER']],
'BeyondLastByte1' : [ 0x18, ['_LARGE_INTEGER']],
'FileOffset2' : [ 0x20, ['_LARGE_INTEGER']],
'BeyondLastByte2' : [ 0x28, ['_LARGE_INTEGER']],
'ReadAheadOffset' : [ 0x30, ['array', 2, ['_LARGE_INTEGER']]],
'ReadAheadLength' : [ 0x40, ['array', 2, ['unsigned long']]],
'ReadAheadSpinLock' : [ 0x48, ['unsigned long long']],
'PrivateLinks' : [ 0x50, ['_LIST_ENTRY']],
} ],
'_RTL_HANDLE_TABLE' : [ 0x30, {
'MaximumNumberOfHandles' : [ 0x0, ['unsigned long']],
'SizeOfHandleTableEntry' : [ 0x4, ['unsigned long']],
'Reserved' : [ 0x8, ['array', 2, ['unsigned long']]],
'FreeHandles' : [ 0x10, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'CommittedHandles' : [ 0x18, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'UnCommittedHandles' : [ 0x20, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
'MaxReservedHandles' : [ 0x28, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_POP_IDLE_HANDLER' : [ 0x28, {
'Latency' : [ 0x0, ['unsigned long']],
'TimeCheck' : [ 0x4, ['unsigned long']],
'DemoteLimit' : [ 0x8, ['unsigned long']],
'PromoteLimit' : [ 0xc, ['unsigned long']],
'PromoteCount' : [ 0x10, ['unsigned long']],
'Demote' : [ 0x14, ['unsigned char']],
'Promote' : [ 0x15, ['unsigned char']],
'PromotePercent' : [ 0x16, ['unsigned char']],
'DemotePercent' : [ 0x17, ['unsigned char']],
'State' : [ 0x18, ['unsigned char']],
'Spare' : [ 0x19, ['array', 3, ['unsigned char']]],
'IdleFunction' : [ 0x20, ['pointer64', ['void']]],
} ],
'SYSTEM_POWER_CAPABILITIES' : [ 0x4c, {
'PowerButtonPresent' : [ 0x0, ['unsigned char']],
'SleepButtonPresent' : [ 0x1, ['unsigned char']],
'LidPresent' : [ 0x2, ['unsigned char']],
'SystemS1' : [ 0x3, ['unsigned char']],
'SystemS2' : [ 0x4, ['unsigned char']],
'SystemS3' : [ 0x5, ['unsigned char']],
'SystemS4' : [ 0x6, ['unsigned char']],
'SystemS5' : [ 0x7, ['unsigned char']],
'HiberFilePresent' : [ 0x8, ['unsigned char']],
'FullWake' : [ 0x9, ['unsigned char']],
'VideoDimPresent' : [ 0xa, ['unsigned char']],
'ApmPresent' : [ 0xb, ['unsigned char']],
'UpsPresent' : [ 0xc, ['unsigned char']],
'ThermalControl' : [ 0xd, ['unsigned char']],
'ProcessorThrottle' : [ 0xe, ['unsigned char']],
'ProcessorMinThrottle' : [ 0xf, ['unsigned char']],
'ProcessorMaxThrottle' : [ 0x10, ['unsigned char']],
'spare2' : [ 0x11, ['array', 4, ['unsigned char']]],
'DiskSpinDown' : [ 0x15, ['unsigned char']],
'spare3' : [ 0x16, ['array', 8, ['unsigned char']]],
'SystemBatteriesPresent' : [ 0x1e, ['unsigned char']],
'BatteriesAreShortTerm' : [ 0x1f, ['unsigned char']],
'BatteryScale' : [ 0x20, ['array', 3, ['BATTERY_REPORTING_SCALE']]],
'AcOnLineWake' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'SoftLidWake' : [ 0x3c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'RtcWake' : [ 0x40, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'MinDeviceWakeState' : [ 0x44, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DefaultLowLatencyWake' : [ 0x48, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_DEVOBJ_EXTENSION' : [ 0x50, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['unsigned short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'PowerFlags' : [ 0x10, ['unsigned long']],
'Dope' : [ 0x18, ['pointer64', ['_DEVICE_OBJECT_POWER_EXTENSION']]],
'ExtensionFlags' : [ 0x20, ['unsigned long']],
'DeviceNode' : [ 0x28, ['pointer64', ['void']]],
'AttachedTo' : [ 0x30, ['pointer64', ['_DEVICE_OBJECT']]],
'StartIoCount' : [ 0x38, ['long']],
'StartIoKey' : [ 0x3c, ['long']],
'StartIoFlags' : [ 0x40, ['unsigned long']],
'Vpb' : [ 0x48, ['pointer64', ['_VPB']]],
} ],
'_DBGKD_GET_VERSION64' : [ 0x28, {
'MajorVersion' : [ 0x0, ['unsigned short']],
'MinorVersion' : [ 0x2, ['unsigned short']],
'ProtocolVersion' : [ 0x4, ['unsigned char']],
'KdSecondaryVersion' : [ 0x5, ['unsigned char']],
'Flags' : [ 0x6, ['unsigned short']],
'MachineType' : [ 0x8, ['unsigned short']],
'MaxPacketType' : [ 0xa, ['unsigned char']],
'MaxStateChange' : [ 0xb, ['unsigned char']],
'MaxManipulate' : [ 0xc, ['unsigned char']],
'Simulation' : [ 0xd, ['unsigned char']],
'Unused' : [ 0xe, ['array', 1, ['unsigned short']]],
'KernBase' : [ 0x10, ['unsigned long long']],
'PsLoadedModuleList' : [ 0x18, ['unsigned long long']],
'DebuggerDataList' : [ 0x20, ['unsigned long long']],
} ],
'_STRING32' : [ 0x8, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x4, ['unsigned long']],
} ],
'_MMVIEW' : [ 0x10, {
'Entry' : [ 0x0, ['unsigned long long']],
'ControlArea' : [ 0x8, ['pointer64', ['_CONTROL_AREA']]],
} ],
'_KSYSTEM_TIME' : [ 0xc, {
'LowPart' : [ 0x0, ['unsigned long']],
'High1Time' : [ 0x4, ['long']],
'High2Time' : [ 0x8, ['long']],
} ],
'PCI_SECONDARY_EXTENSION' : [ 0x18, {
'List' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'ExtensionType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_Location', 1768116287: 'PciInterface_AgpTarget'})]],
'Destructor' : [ 0x10, ['pointer64', ['void']]],
} ],
'__unnamed_1842' : [ 0x30, {
'type0' : [ 0x0, ['_PCI_HEADER_TYPE_0']],
'type1' : [ 0x0, ['_PCI_HEADER_TYPE_1']],
'type2' : [ 0x0, ['_PCI_HEADER_TYPE_2']],
} ],
'_PCI_COMMON_CONFIG' : [ 0x100, {
'VendorID' : [ 0x0, ['unsigned short']],
'DeviceID' : [ 0x2, ['unsigned short']],
'Command' : [ 0x4, ['unsigned short']],
'Status' : [ 0x6, ['unsigned short']],
'RevisionID' : [ 0x8, ['unsigned char']],
'ProgIf' : [ 0x9, ['unsigned char']],
'SubClass' : [ 0xa, ['unsigned char']],
'BaseClass' : [ 0xb, ['unsigned char']],
'CacheLineSize' : [ 0xc, ['unsigned char']],
'LatencyTimer' : [ 0xd, ['unsigned char']],
'HeaderType' : [ 0xe, ['unsigned char']],
'BIST' : [ 0xf, ['unsigned char']],
'u' : [ 0x10, ['__unnamed_1842']],
'DeviceSpecific' : [ 0x40, ['array', 192, ['unsigned char']]],
} ],
'_HEAP_FREE_ENTRY_EXTRA' : [ 0x4, {
'TagIndex' : [ 0x0, ['unsigned short']],
'FreeBackTraceIndex' : [ 0x2, ['unsigned short']],
} ],
'_SECTION_IMAGE_INFORMATION' : [ 0x40, {
'TransferAddress' : [ 0x0, ['pointer64', ['void']]],
'ZeroBits' : [ 0x8, ['unsigned long']],
'MaximumStackSize' : [ 0x10, ['unsigned long long']],
'CommittedStackSize' : [ 0x18, ['unsigned long long']],
'SubSystemType' : [ 0x20, ['unsigned long']],
'SubSystemMinorVersion' : [ 0x24, ['unsigned short']],
'SubSystemMajorVersion' : [ 0x26, ['unsigned short']],
'SubSystemVersion' : [ 0x24, ['unsigned long']],
'GpValue' : [ 0x28, ['unsigned long']],
'ImageCharacteristics' : [ 0x2c, ['unsigned short']],
'DllCharacteristics' : [ 0x2e, ['unsigned short']],
'Machine' : [ 0x30, ['unsigned short']],
'ImageContainsCode' : [ 0x32, ['unsigned char']],
'Spare1' : [ 0x33, ['unsigned char']],
'LoaderFlags' : [ 0x34, ['unsigned long']],
'ImageFileSize' : [ 0x38, ['unsigned long']],
'Reserved' : [ 0x3c, ['array', 1, ['unsigned long']]],
} ],
'_POOL_TRACKER_TABLE' : [ 0x28, {
'Key' : [ 0x0, ['unsigned long']],
'NonPagedAllocs' : [ 0x4, ['unsigned long']],
'NonPagedFrees' : [ 0x8, ['unsigned long']],
'NonPagedBytes' : [ 0x10, ['unsigned long long']],
'PagedAllocs' : [ 0x18, ['unsigned long']],
'PagedFrees' : [ 0x1c, ['unsigned long']],
'PagedBytes' : [ 0x20, ['unsigned long long']],
} ],
'_KNODE' : [ 0x40, {
'DeadStackList' : [ 0x0, ['_SLIST_HEADER']],
'PfnDereferenceSListHead' : [ 0x10, ['_SLIST_HEADER']],
'Alignment' : [ 0x10, ['unsigned long long']],
'ProcessorMask' : [ 0x18, ['unsigned long long']],
'Color' : [ 0x20, ['unsigned char']],
'Seed' : [ 0x21, ['unsigned char']],
'NodeNumber' : [ 0x22, ['unsigned char']],
'Flags' : [ 0x23, ['_flags']],
'MmShiftedColor' : [ 0x24, ['unsigned long']],
'FreeCount' : [ 0x28, ['array', 2, ['unsigned long long']]],
'PfnDeferredList' : [ 0x38, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_PHYSICAL_MEMORY_DESCRIPTOR' : [ 0x20, {
'NumberOfRuns' : [ 0x0, ['unsigned long']],
'NumberOfPages' : [ 0x8, ['unsigned long long']],
'Run' : [ 0x10, ['array', 1, ['_PHYSICAL_MEMORY_RUN']]],
} ],
'_SEGMENT_FLAGS' : [ 0x8, {
'TotalNumberOfPtes4132' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 10, native_type='unsigned long long')]],
'ExtraSharedWowSubsections' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'LargePages' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'Spare' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 64, native_type='unsigned long long')]],
} ],
'_PI_BUS_EXTENSION' : [ 0x70, {
'Flags' : [ 0x0, ['unsigned long']],
'NumberCSNs' : [ 0x4, ['unsigned char']],
'ReadDataPort' : [ 0x8, ['pointer64', ['unsigned char']]],
'DataPortMapped' : [ 0x10, ['unsigned char']],
'AddressPort' : [ 0x18, ['pointer64', ['unsigned char']]],
'AddrPortMapped' : [ 0x20, ['unsigned char']],
'CommandPort' : [ 0x28, ['pointer64', ['unsigned char']]],
'CmdPortMapped' : [ 0x30, ['unsigned char']],
'NextSlotNumber' : [ 0x34, ['unsigned long']],
'DeviceList' : [ 0x38, ['_SINGLE_LIST_ENTRY']],
'CardList' : [ 0x40, ['_SINGLE_LIST_ENTRY']],
'PhysicalBusDevice' : [ 0x48, ['pointer64', ['_DEVICE_OBJECT']]],
'FunctionalBusDevice' : [ 0x50, ['pointer64', ['_DEVICE_OBJECT']]],
'AttachedDevice' : [ 0x58, ['pointer64', ['_DEVICE_OBJECT']]],
'BusNumber' : [ 0x60, ['unsigned long']],
'SystemPowerState' : [ 0x64, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DevicePowerState' : [ 0x68, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
} ],
'_CLIENT_ID32' : [ 0x8, {
'UniqueProcess' : [ 0x0, ['unsigned long']],
'UniqueThread' : [ 0x4, ['unsigned long']],
} ],
'_VI_DEADLOCK_THREAD' : [ 0x30, {
'Thread' : [ 0x0, ['pointer64', ['_KTHREAD']]],
'CurrentSpinNode' : [ 0x8, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'CurrentOtherNode' : [ 0x10, ['pointer64', ['_VI_DEADLOCK_NODE']]],
'ListEntry' : [ 0x18, ['_LIST_ENTRY']],
'FreeListEntry' : [ 0x18, ['_LIST_ENTRY']],
'NodeCount' : [ 0x28, ['unsigned long']],
'PagingCount' : [ 0x2c, ['unsigned long']],
} ],
'_MMEXTEND_INFO' : [ 0x10, {
'CommittedSize' : [ 0x0, ['unsigned long long']],
'ReferenceCount' : [ 0x8, ['unsigned long']],
} ],
'_GDI_TEB_BATCH64' : [ 0x4e8, {
'Offset' : [ 0x0, ['unsigned long']],
'HDC' : [ 0x8, ['unsigned long long']],
'Buffer' : [ 0x10, ['array', 310, ['unsigned long']]],
} ],
'_IMAGE_DEBUG_DIRECTORY' : [ 0x1c, {
'Characteristics' : [ 0x0, ['unsigned long']],
'TimeDateStamp' : [ 0x4, ['unsigned long']],
'MajorVersion' : [ 0x8, ['unsigned short']],
'MinorVersion' : [ 0xa, ['unsigned short']],
'Type' : [ 0xc, ['unsigned long']],
'SizeOfData' : [ 0x10, ['unsigned long']],
'AddressOfRawData' : [ 0x14, ['unsigned long']],
'PointerToRawData' : [ 0x18, ['unsigned long']],
} ],
'_PCI_INTERFACE' : [ 0x28, {
'InterfaceType' : [ 0x0, ['pointer64', ['_GUID']]],
'MinSize' : [ 0x8, ['unsigned short']],
'MinVersion' : [ 0xa, ['unsigned short']],
'MaxVersion' : [ 0xc, ['unsigned short']],
'Flags' : [ 0xe, ['unsigned short']],
'ReferenceCount' : [ 0x10, ['long']],
'Signature' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {1768116272: 'PciPdoExtensionType', 1768116273: 'PciFdoExtensionType', 1768116274: 'PciArb_Io', 1768116275: 'PciArb_Memory', 1768116276: 'PciArb_Interrupt', 1768116277: 'PciArb_BusNumber', 1768116278: 'PciTrans_Interrupt', 1768116279: 'PciInterface_BusHandler', 1768116280: 'PciInterface_IntRouteHandler', 1768116281: 'PciInterface_PciCb', 1768116282: 'PciInterface_LegacyDeviceDetection', 1768116283: 'PciInterface_PmeHandler', 1768116284: 'PciInterface_DevicePresent', 1768116285: 'PciInterface_NativeIde', 1768116286: 'PciInterface_Location', 1768116287: 'PciInterface_AgpTarget'})]],
'Constructor' : [ 0x18, ['pointer64', ['void']]],
'Initializer' : [ 0x20, ['pointer64', ['void']]],
} ],
'_POP_POWER_ACTION' : [ 0x50, {
'Updates' : [ 0x0, ['unsigned char']],
'State' : [ 0x1, ['unsigned char']],
'Shutdown' : [ 0x2, ['unsigned char']],
'Action' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'LightestState' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Flags' : [ 0xc, ['unsigned long']],
'Status' : [ 0x10, ['long']],
'IrpMinor' : [ 0x14, ['unsigned char']],
'SystemState' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'NextSystemState' : [ 0x1c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'ShutdownBugCode' : [ 0x20, ['pointer64', ['_POP_SHUTDOWN_BUG_CHECK']]],
'DevState' : [ 0x28, ['pointer64', ['_POP_DEVICE_SYS_STATE']]],
'HiberContext' : [ 0x30, ['pointer64', ['_POP_HIBER_CONTEXT']]],
'LastWakeState' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'WakeTime' : [ 0x40, ['unsigned long long']],
'SleepTime' : [ 0x48, ['unsigned long long']],
} ],
'_LPCP_MESSAGE' : [ 0x50, {
'Entry' : [ 0x0, ['_LIST_ENTRY']],
'FreeEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Reserved0' : [ 0x8, ['unsigned long']],
'SenderPort' : [ 0x10, ['pointer64', ['void']]],
'RepliedToThread' : [ 0x18, ['pointer64', ['_ETHREAD']]],
'PortContext' : [ 0x20, ['pointer64', ['void']]],
'Request' : [ 0x28, ['_PORT_MESSAGE']],
} ],
'_MMVAD_SHORT' : [ 0x30, {
'u1' : [ 0x0, ['__unnamed_1180']],
'LeftChild' : [ 0x8, ['pointer64', ['_MMVAD']]],
'RightChild' : [ 0x10, ['pointer64', ['_MMVAD']]],
'StartingVpn' : [ 0x18, ['unsigned long long']],
'EndingVpn' : [ 0x20, ['unsigned long long']],
'u' : [ 0x28, ['__unnamed_1183']],
} ],
'__unnamed_188b' : [ 0x2c, {
'InitialPrivilegeSet' : [ 0x0, ['_INITIAL_PRIVILEGE_SET']],
'PrivilegeSet' : [ 0x0, ['_PRIVILEGE_SET']],
} ],
'_ACCESS_STATE' : [ 0xa0, {
'OperationID' : [ 0x0, ['_LUID']],
'SecurityEvaluated' : [ 0x8, ['unsigned char']],
'GenerateAudit' : [ 0x9, ['unsigned char']],
'GenerateOnClose' : [ 0xa, ['unsigned char']],
'PrivilegesAllocated' : [ 0xb, ['unsigned char']],
'Flags' : [ 0xc, ['unsigned long']],
'RemainingDesiredAccess' : [ 0x10, ['unsigned long']],
'PreviouslyGrantedAccess' : [ 0x14, ['unsigned long']],
'OriginalDesiredAccess' : [ 0x18, ['unsigned long']],
'SubjectSecurityContext' : [ 0x20, ['_SECURITY_SUBJECT_CONTEXT']],
'SecurityDescriptor' : [ 0x40, ['pointer64', ['void']]],
'AuxData' : [ 0x48, ['pointer64', ['void']]],
'Privileges' : [ 0x50, ['__unnamed_188b']],
'AuditPrivileges' : [ 0x7c, ['unsigned char']],
'ObjectName' : [ 0x80, ['_UNICODE_STRING']],
'ObjectTypeName' : [ 0x90, ['_UNICODE_STRING']],
} ],
'_PNP_DEVICE_EVENT_ENTRY' : [ 0x88, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'Argument' : [ 0x10, ['unsigned long']],
'CallerEvent' : [ 0x18, ['pointer64', ['_KEVENT']]],
'Callback' : [ 0x20, ['pointer64', ['void']]],
'Context' : [ 0x28, ['pointer64', ['void']]],
'VetoType' : [ 0x30, ['pointer64', ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]]],
'VetoName' : [ 0x38, ['pointer64', ['_UNICODE_STRING']]],
'Data' : [ 0x40, ['_PLUGPLAY_EVENT_BLOCK']],
} ],
'_PRIVATE_CACHE_MAP_FLAGS' : [ 0x4, {
'DontUse' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'ReadAheadActive' : [ 0x0, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'ReadAheadEnabled' : [ 0x0, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Available' : [ 0x0, ['BitField', dict(start_bit = 18, end_bit = 32, native_type='unsigned long')]],
} ],
'_PNP_DEVICE_EVENT_LIST' : [ 0x88, {
'Status' : [ 0x0, ['long']],
'EventQueueMutex' : [ 0x8, ['_KMUTANT']],
'Lock' : [ 0x40, ['_KGUARDED_MUTEX']],
'List' : [ 0x78, ['_LIST_ENTRY']],
} ],
'_MMPTE_TRANSITION' : [ 0x8, {
'Valid' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long long')]],
'Write' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long long')]],
'Owner' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long long')]],
'WriteThrough' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long long')]],
'CacheDisable' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long long')]],
'Protection' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 10, native_type='unsigned long long')]],
'Prototype' : [ 0x0, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long long')]],
'Transition' : [ 0x0, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long long')]],
'PageFrameNumber' : [ 0x0, ['BitField', dict(start_bit = 12, end_bit = 40, native_type='unsigned long long')]],
'Unused' : [ 0x0, ['BitField', dict(start_bit = 40, end_bit = 64, native_type='unsigned long long')]],
} ],
'_KREQUEST_PACKET' : [ 0x20, {
'CurrentPacket' : [ 0x0, ['array', 3, ['pointer64', ['void']]]],
'WorkerRoutine' : [ 0x18, ['pointer64', ['void']]],
} ],
'_STRING' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['pointer64', ['unsigned char']]],
} ],
'_flags' : [ 0x1, {
'Removable' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'Fill' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 8, native_type='unsigned char')]],
} ],
'_CM_KEY_SECURITY_CACHE' : [ 0x38, {
'Cell' : [ 0x0, ['unsigned long']],
'ConvKey' : [ 0x4, ['unsigned long']],
'List' : [ 0x8, ['_LIST_ENTRY']],
'DescriptorLength' : [ 0x18, ['unsigned long']],
'RealRefCount' : [ 0x1c, ['unsigned long']],
'Descriptor' : [ 0x20, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
'_PROCESSOR_POWER_POLICY_INFO' : [ 0x14, {
'TimeCheck' : [ 0x0, ['unsigned long']],
'DemoteLimit' : [ 0x4, ['unsigned long']],
'PromoteLimit' : [ 0x8, ['unsigned long']],
'DemotePercent' : [ 0xc, ['unsigned char']],
'PromotePercent' : [ 0xd, ['unsigned char']],
'Spare' : [ 0xe, ['array', 2, ['unsigned char']]],
'AllowDemotion' : [ 0x10, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'AllowPromotion' : [ 0x10, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'Reserved' : [ 0x10, ['BitField', dict(start_bit = 2, end_bit = 32, native_type='unsigned long')]],
} ],
'_ARBITER_INSTANCE' : [ 0x138, {
'Signature' : [ 0x0, ['unsigned long']],
'MutexEvent' : [ 0x8, ['pointer64', ['_KEVENT']]],
'Name' : [ 0x10, ['pointer64', ['unsigned short']]],
'ResourceType' : [ 0x18, ['long']],
'Allocation' : [ 0x20, ['pointer64', ['_RTL_RANGE_LIST']]],
'PossibleAllocation' : [ 0x28, ['pointer64', ['_RTL_RANGE_LIST']]],
'OrderingList' : [ 0x30, ['_ARBITER_ORDERING_LIST']],
'ReservedList' : [ 0x40, ['_ARBITER_ORDERING_LIST']],
'ReferenceCount' : [ 0x50, ['long']],
'Interface' : [ 0x58, ['pointer64', ['_ARBITER_INTERFACE']]],
'AllocationStackMaxSize' : [ 0x60, ['unsigned long']],
'AllocationStack' : [ 0x68, ['pointer64', ['_ARBITER_ALLOCATION_STATE']]],
'UnpackRequirement' : [ 0x70, ['pointer64', ['void']]],
'PackResource' : [ 0x78, ['pointer64', ['void']]],
'UnpackResource' : [ 0x80, ['pointer64', ['void']]],
'ScoreRequirement' : [ 0x88, ['pointer64', ['void']]],
'TestAllocation' : [ 0x90, ['pointer64', ['void']]],
'RetestAllocation' : [ 0x98, ['pointer64', ['void']]],
'CommitAllocation' : [ 0xa0, ['pointer64', ['void']]],
'RollbackAllocation' : [ 0xa8, ['pointer64', ['void']]],
'BootAllocation' : [ 0xb0, ['pointer64', ['void']]],
'QueryArbitrate' : [ 0xb8, ['pointer64', ['void']]],
'QueryConflict' : [ 0xc0, ['pointer64', ['void']]],
'AddReserved' : [ 0xc8, ['pointer64', ['void']]],
'StartArbiter' : [ 0xd0, ['pointer64', ['void']]],
'PreprocessEntry' : [ 0xd8, ['pointer64', ['void']]],
'AllocateEntry' : [ 0xe0, ['pointer64', ['void']]],
'GetNextAllocationRange' : [ 0xe8, ['pointer64', ['void']]],
'FindSuitableRange' : [ 0xf0, ['pointer64', ['void']]],
'AddAllocation' : [ 0xf8, ['pointer64', ['void']]],
'BacktrackAllocation' : [ 0x100, ['pointer64', ['void']]],
'OverrideConflict' : [ 0x108, ['pointer64', ['void']]],
'TransactionInProgress' : [ 0x110, ['unsigned char']],
'Extension' : [ 0x118, ['pointer64', ['void']]],
'BusDeviceObject' : [ 0x120, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictCallbackContext' : [ 0x128, ['pointer64', ['void']]],
'ConflictCallback' : [ 0x130, ['pointer64', ['void']]],
} ],
'_BUS_HANDLER' : [ 0xb8, {
'Version' : [ 0x0, ['unsigned long']],
'InterfaceType' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'ConfigurationType' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'Cmos', 1: 'EisaConfiguration', 2: 'Pos', 3: 'CbusConfiguration', 4: 'PCIConfiguration', 5: 'VMEConfiguration', 6: 'NuBusConfiguration', 7: 'PCMCIAConfiguration', 8: 'MPIConfiguration', 9: 'MPSAConfiguration', 10: 'PNPISAConfiguration', 11: 'SgiInternalConfiguration', 12: 'MaximumBusDataType', -1: 'ConfigurationSpaceUndefined'})]],
'BusNumber' : [ 0xc, ['unsigned long']],
'DeviceObject' : [ 0x10, ['pointer64', ['_DEVICE_OBJECT']]],
'ParentHandler' : [ 0x18, ['pointer64', ['_BUS_HANDLER']]],
'BusData' : [ 0x20, ['pointer64', ['void']]],
'DeviceControlExtensionSize' : [ 0x28, ['unsigned long']],
'BusAddresses' : [ 0x30, ['pointer64', ['_SUPPORTED_RANGES']]],
'Reserved' : [ 0x38, ['array', 4, ['unsigned long']]],
'GetBusData' : [ 0x48, ['pointer64', ['void']]],
'SetBusData' : [ 0x50, ['pointer64', ['void']]],
'AdjustResourceList' : [ 0x58, ['pointer64', ['void']]],
'AssignSlotResources' : [ 0x60, ['pointer64', ['void']]],
'GetInterruptVector' : [ 0x68, ['pointer64', ['void']]],
'TranslateBusAddress' : [ 0x70, ['pointer64', ['void']]],
'Spare1' : [ 0x78, ['pointer64', ['void']]],
'Spare2' : [ 0x80, ['pointer64', ['void']]],
'Spare3' : [ 0x88, ['pointer64', ['void']]],
'Spare4' : [ 0x90, ['pointer64', ['void']]],
'Spare5' : [ 0x98, ['pointer64', ['void']]],
'Spare6' : [ 0xa0, ['pointer64', ['void']]],
'Spare7' : [ 0xa8, ['pointer64', ['void']]],
'Spare8' : [ 0xb0, ['pointer64', ['void']]],
} ],
'SYSTEM_POWER_LEVEL' : [ 0x18, {
'Enable' : [ 0x0, ['unsigned char']],
'Spare' : [ 0x1, ['array', 3, ['unsigned char']]],
'BatteryLevel' : [ 0x4, ['unsigned long']],
'PowerPolicy' : [ 0x8, ['POWER_ACTION_POLICY']],
'MinSystemState' : [ 0x14, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'_PCI_MN_DISPATCH_TABLE' : [ 0x10, {
'DispatchStyle' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'IRP_COMPLETE', 1: 'IRP_DOWNWARD', 2: 'IRP_UPWARD', 3: 'IRP_DISPATCH'})]],
'DispatchFunction' : [ 0x8, ['pointer64', ['void']]],
} ],
'_POP_DEVICE_SYS_STATE' : [ 0xba8, {
'IrpMinor' : [ 0x0, ['unsigned char']],
'SystemState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'Event' : [ 0x8, ['_KEVENT']],
'SpinLock' : [ 0x20, ['unsigned long long']],
'Thread' : [ 0x28, ['pointer64', ['_KTHREAD']]],
'GetNewDeviceList' : [ 0x30, ['unsigned char']],
'Order' : [ 0x38, ['_PO_DEVICE_NOTIFY_ORDER']],
'Status' : [ 0x448, ['long']],
'FailedDevice' : [ 0x450, ['pointer64', ['_DEVICE_OBJECT']]],
'Waking' : [ 0x458, ['unsigned char']],
'Cancelled' : [ 0x459, ['unsigned char']],
'IgnoreErrors' : [ 0x45a, ['unsigned char']],
'IgnoreNotImplemented' : [ 0x45b, ['unsigned char']],
'WaitAny' : [ 0x45c, ['unsigned char']],
'WaitAll' : [ 0x45d, ['unsigned char']],
'PresentIrpQueue' : [ 0x460, ['_LIST_ENTRY']],
'Head' : [ 0x470, ['_POP_DEVICE_POWER_IRP']],
'PowerIrpState' : [ 0x4c8, ['array', 20, ['_POP_DEVICE_POWER_IRP']]],
} ],
'_OBJECT_DUMP_CONTROL' : [ 0x10, {
'Stream' : [ 0x0, ['pointer64', ['void']]],
'Detail' : [ 0x8, ['unsigned long']],
} ],
'_SECURITY_SUBJECT_CONTEXT' : [ 0x20, {
'ClientToken' : [ 0x0, ['pointer64', ['void']]],
'ImpersonationLevel' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SecurityAnonymous', 1: 'SecurityIdentification', 2: 'SecurityImpersonation', 3: 'SecurityDelegation'})]],
'PrimaryToken' : [ 0x10, ['pointer64', ['void']]],
'ProcessAuditId' : [ 0x18, ['pointer64', ['void']]],
} ],
'_HEAP_STOP_ON_TAG' : [ 0x4, {
'HeapAndTagIndex' : [ 0x0, ['unsigned long']],
'TagIndex' : [ 0x0, ['unsigned short']],
'HeapIndex' : [ 0x2, ['unsigned short']],
} ],
'_MMWSLE_HASH' : [ 0x10, {
'Key' : [ 0x0, ['pointer64', ['void']]],
'Index' : [ 0x8, ['unsigned long']],
} ],
'_CM_NAME_CONTROL_BLOCK' : [ 0x20, {
'Compressed' : [ 0x0, ['unsigned char']],
'RefCount' : [ 0x2, ['unsigned short']],
'NameHash' : [ 0x8, ['_CM_NAME_HASH']],
'ConvKey' : [ 0x8, ['unsigned long']],
'NextHash' : [ 0x10, ['pointer64', ['_CM_KEY_HASH']]],
'NameLength' : [ 0x18, ['unsigned short']],
'Name' : [ 0x1a, ['array', 1, ['unsigned short']]],
} ],
'_CM_KEY_BODY' : [ 0x30, {
'Type' : [ 0x0, ['unsigned long']],
'KeyControlBlock' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'NotifyBlock' : [ 0x10, ['pointer64', ['_CM_NOTIFY_BLOCK']]],
'ProcessID' : [ 0x18, ['pointer64', ['void']]],
'KeyBodyList' : [ 0x20, ['_LIST_ENTRY']],
} ],
'_HANDLE_TABLE_ENTRY' : [ 0x10, {
'Object' : [ 0x0, ['pointer64', ['void']]],
'ObAttributes' : [ 0x0, ['unsigned long']],
'InfoTable' : [ 0x0, ['pointer64', ['_HANDLE_TABLE_ENTRY_INFO']]],
'Value' : [ 0x0, ['unsigned long long']],
'GrantedAccess' : [ 0x8, ['unsigned long']],
'GrantedAccessIndex' : [ 0x8, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0xa, ['unsigned short']],
'NextFreeTableEntry' : [ 0x8, ['long']],
} ],
'_HEAP_USERDATA_HEADER' : [ 0x20, {
'SFreeListEntry' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'SubSegment' : [ 0x0, ['pointer64', ['_HEAP_SUBSEGMENT']]],
'HeapHandle' : [ 0x8, ['pointer64', ['void']]],
'SizeIndex' : [ 0x10, ['unsigned long long']],
'Signature' : [ 0x18, ['unsigned long long']],
} ],
'_LPCP_PORT_OBJECT' : [ 0x100, {
'ConnectionPort' : [ 0x0, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'ConnectedPort' : [ 0x8, ['pointer64', ['_LPCP_PORT_OBJECT']]],
'MsgQueue' : [ 0x10, ['_LPCP_PORT_QUEUE']],
'Creator' : [ 0x30, ['_CLIENT_ID']],
'ClientSectionBase' : [ 0x40, ['pointer64', ['void']]],
'ServerSectionBase' : [ 0x48, ['pointer64', ['void']]],
'PortContext' : [ 0x50, ['pointer64', ['void']]],
'ClientThread' : [ 0x58, ['pointer64', ['_ETHREAD']]],
'SecurityQos' : [ 0x60, ['_SECURITY_QUALITY_OF_SERVICE']],
'StaticSecurity' : [ 0x70, ['_SECURITY_CLIENT_CONTEXT']],
'LpcReplyChainHead' : [ 0xb8, ['_LIST_ENTRY']],
'LpcDataInfoChainHead' : [ 0xc8, ['_LIST_ENTRY']],
'ServerProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MappingProcess' : [ 0xd8, ['pointer64', ['_EPROCESS']]],
'MaxMessageLength' : [ 0xe0, ['unsigned short']],
'MaxConnectionInfoLength' : [ 0xe2, ['unsigned short']],
'Flags' : [ 0xe4, ['unsigned long']],
'WaitEvent' : [ 0xe8, ['_KEVENT']],
} ],
'PCI_POWER_STATE' : [ 0x50, {
'CurrentSystemState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'CurrentDeviceState' : [ 0x4, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'SystemWakeLevel' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWakeLevel' : [ 0xc, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'SystemStateMapping' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'WaitWakeIrp' : [ 0x30, ['pointer64', ['_IRP']]],
'SavedCancelRoutine' : [ 0x38, ['pointer64', ['void']]],
'Paging' : [ 0x40, ['long']],
'Hibernate' : [ 0x44, ['long']],
'CrashDump' : [ 0x48, ['long']],
} ],
'_STRING64' : [ 0x10, {
'Length' : [ 0x0, ['unsigned short']],
'MaximumLength' : [ 0x2, ['unsigned short']],
'Buffer' : [ 0x8, ['unsigned long long']],
} ],
'_POOL_HACKER' : [ 0x30, {
'Header' : [ 0x0, ['_POOL_HEADER']],
'Contents' : [ 0x10, ['array', 8, ['unsigned long']]],
} ],
'_CM_INDEX_HINT_BLOCK' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'HashKey' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_TOKEN_CONTROL' : [ 0x28, {
'TokenId' : [ 0x0, ['_LUID']],
'AuthenticationId' : [ 0x8, ['_LUID']],
'ModifiedId' : [ 0x10, ['_LUID']],
'TokenSource' : [ 0x18, ['_TOKEN_SOURCE']],
} ],
'__unnamed_1930' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'FileAttributes' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'EaLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1934' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_NAMED_PIPE_CREATE_PARAMETERS']]],
} ],
'__unnamed_1938' : [ 0x20, {
'SecurityContext' : [ 0x0, ['pointer64', ['_IO_SECURITY_CONTEXT']]],
'Options' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0x10, ['unsigned short']],
'ShareAccess' : [ 0x12, ['unsigned short']],
'Parameters' : [ 0x18, ['pointer64', ['_MAILSLOT_CREATE_PARAMETERS']]],
} ],
'__unnamed_193a' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_193e' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileName' : [ 0x8, ['pointer64', ['_UNICODE_STRING']]],
'FileInformationClass' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileMaximumInformation'})]],
'FileIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1940' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'CompletionFilter' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1942' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileMaximumInformation'})]],
} ],
'__unnamed_1944' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'FileInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileDirectoryInformation', 2: 'FileFullDirectoryInformation', 3: 'FileBothDirectoryInformation', 4: 'FileBasicInformation', 5: 'FileStandardInformation', 6: 'FileInternalInformation', 7: 'FileEaInformation', 8: 'FileAccessInformation', 9: 'FileNameInformation', 10: 'FileRenameInformation', 11: 'FileLinkInformation', 12: 'FileNamesInformation', 13: 'FileDispositionInformation', 14: 'FilePositionInformation', 15: 'FileFullEaInformation', 16: 'FileModeInformation', 17: 'FileAlignmentInformation', 18: 'FileAllInformation', 19: 'FileAllocationInformation', 20: 'FileEndOfFileInformation', 21: 'FileAlternateNameInformation', 22: 'FileStreamInformation', 23: 'FilePipeInformation', 24: 'FilePipeLocalInformation', 25: 'FilePipeRemoteInformation', 26: 'FileMailslotQueryInformation', 27: 'FileMailslotSetInformation', 28: 'FileCompressionInformation', 29: 'FileObjectIdInformation', 30: 'FileCompletionInformation', 31: 'FileMoveClusterInformation', 32: 'FileQuotaInformation', 33: 'FileReparsePointInformation', 34: 'FileNetworkOpenInformation', 35: 'FileAttributeTagInformation', 36: 'FileTrackingInformation', 37: 'FileIdBothDirectoryInformation', 38: 'FileIdFullDirectoryInformation', 39: 'FileValidDataLengthInformation', 40: 'FileShortNameInformation', 41: 'FileIoCompletionNotificationInformation', 42: 'FileMaximumInformation'})]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'ReplaceIfExists' : [ 0x18, ['unsigned char']],
'AdvanceOnly' : [ 0x19, ['unsigned char']],
'ClusterCount' : [ 0x18, ['unsigned long']],
'DeleteHandle' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1946' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'EaList' : [ 0x8, ['pointer64', ['void']]],
'EaListLength' : [ 0x10, ['unsigned long']],
'EaIndex' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1948' : [ 0x4, {
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_194c' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'FsInformationClass' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {1: 'FileFsVolumeInformation', 2: 'FileFsLabelInformation', 3: 'FileFsSizeInformation', 4: 'FileFsDeviceInformation', 5: 'FileFsAttributeInformation', 6: 'FileFsControlInformation', 7: 'FileFsFullSizeInformation', 8: 'FileFsObjectIdInformation', 9: 'FileFsDriverPathInformation', 10: 'FileFsMaximumInformation'})]],
} ],
'__unnamed_194e' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'FsControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1950' : [ 0x18, {
'Length' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'Key' : [ 0x8, ['unsigned long']],
'ByteOffset' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1952' : [ 0x20, {
'OutputBufferLength' : [ 0x0, ['unsigned long']],
'InputBufferLength' : [ 0x8, ['unsigned long']],
'IoControlCode' : [ 0x10, ['unsigned long']],
'Type3InputBuffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_1954' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'Length' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_1956' : [ 0x10, {
'SecurityInformation' : [ 0x0, ['unsigned long']],
'SecurityDescriptor' : [ 0x8, ['pointer64', ['void']]],
} ],
'__unnamed_1958' : [ 0x10, {
'Vpb' : [ 0x0, ['pointer64', ['_VPB']]],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_195c' : [ 0x8, {
'Srb' : [ 0x0, ['pointer64', ['_SCSI_REQUEST_BLOCK']]],
} ],
'__unnamed_1960' : [ 0x20, {
'Length' : [ 0x0, ['unsigned long']],
'StartSid' : [ 0x8, ['pointer64', ['void']]],
'SidList' : [ 0x10, ['pointer64', ['_FILE_GET_QUOTA_INFORMATION']]],
'SidListLength' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1964' : [ 0x4, {
'Type' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusRelations', 1: 'EjectionRelations', 2: 'PowerRelations', 3: 'RemovalRelations', 4: 'TargetDeviceRelation', 5: 'SingleBusRelations'})]],
} ],
'__unnamed_1966' : [ 0x20, {
'InterfaceType' : [ 0x0, ['pointer64', ['_GUID']]],
'Size' : [ 0x8, ['unsigned short']],
'Version' : [ 0xa, ['unsigned short']],
'Interface' : [ 0x10, ['pointer64', ['_INTERFACE']]],
'InterfaceSpecificData' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_196a' : [ 0x8, {
'Capabilities' : [ 0x0, ['pointer64', ['_DEVICE_CAPABILITIES']]],
} ],
'__unnamed_196c' : [ 0x8, {
'IoResourceRequirementList' : [ 0x0, ['pointer64', ['_IO_RESOURCE_REQUIREMENTS_LIST']]],
} ],
'__unnamed_196e' : [ 0x20, {
'WhichSpace' : [ 0x0, ['unsigned long']],
'Buffer' : [ 0x8, ['pointer64', ['void']]],
'Offset' : [ 0x10, ['unsigned long']],
'Length' : [ 0x18, ['unsigned long']],
} ],
'__unnamed_1970' : [ 0x1, {
'Lock' : [ 0x0, ['unsigned char']],
} ],
'__unnamed_1974' : [ 0x4, {
'IdType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'BusQueryDeviceID', 1: 'BusQueryHardwareIDs', 2: 'BusQueryCompatibleIDs', 3: 'BusQueryInstanceID', 4: 'BusQueryDeviceSerialNumber'})]],
} ],
'__unnamed_1978' : [ 0x10, {
'DeviceTextType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceTextDescription', 1: 'DeviceTextLocationInformation'})]],
'LocaleId' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_197c' : [ 0x10, {
'InPath' : [ 0x0, ['unsigned char']],
'Reserved' : [ 0x1, ['array', 3, ['unsigned char']]],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'__unnamed_197e' : [ 0x4, {
'PowerState' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
} ],
'__unnamed_1982' : [ 0x8, {
'PowerSequence' : [ 0x0, ['pointer64', ['_POWER_SEQUENCE']]],
} ],
'__unnamed_1986' : [ 0x20, {
'SystemContext' : [ 0x0, ['unsigned long']],
'Type' : [ 0x8, ['Enumeration', dict(target = 'long', choices = {0: 'SystemPowerState', 1: 'DevicePowerState'})]],
'State' : [ 0x10, ['_POWER_STATE']],
'ShutdownType' : [ 0x18, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
} ],
'__unnamed_1988' : [ 0x10, {
'AllocatedResources' : [ 0x0, ['pointer64', ['_CM_RESOURCE_LIST']]],
'AllocatedResourcesTranslated' : [ 0x8, ['pointer64', ['_CM_RESOURCE_LIST']]],
} ],
'__unnamed_198a' : [ 0x20, {
'ProviderId' : [ 0x0, ['unsigned long long']],
'DataPath' : [ 0x8, ['pointer64', ['void']]],
'BufferSize' : [ 0x10, ['unsigned long']],
'Buffer' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_198c' : [ 0x20, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
} ],
'__unnamed_198e' : [ 0x20, {
'Create' : [ 0x0, ['__unnamed_1930']],
'CreatePipe' : [ 0x0, ['__unnamed_1934']],
'CreateMailslot' : [ 0x0, ['__unnamed_1938']],
'Read' : [ 0x0, ['__unnamed_193a']],
'Write' : [ 0x0, ['__unnamed_193a']],
'QueryDirectory' : [ 0x0, ['__unnamed_193e']],
'NotifyDirectory' : [ 0x0, ['__unnamed_1940']],
'QueryFile' : [ 0x0, ['__unnamed_1942']],
'SetFile' : [ 0x0, ['__unnamed_1944']],
'QueryEa' : [ 0x0, ['__unnamed_1946']],
'SetEa' : [ 0x0, ['__unnamed_1948']],
'QueryVolume' : [ 0x0, ['__unnamed_194c']],
'SetVolume' : [ 0x0, ['__unnamed_194c']],
'FileSystemControl' : [ 0x0, ['__unnamed_194e']],
'LockControl' : [ 0x0, ['__unnamed_1950']],
'DeviceIoControl' : [ 0x0, ['__unnamed_1952']],
'QuerySecurity' : [ 0x0, ['__unnamed_1954']],
'SetSecurity' : [ 0x0, ['__unnamed_1956']],
'MountVolume' : [ 0x0, ['__unnamed_1958']],
'VerifyVolume' : [ 0x0, ['__unnamed_1958']],
'Scsi' : [ 0x0, ['__unnamed_195c']],
'QueryQuota' : [ 0x0, ['__unnamed_1960']],
'SetQuota' : [ 0x0, ['__unnamed_1948']],
'QueryDeviceRelations' : [ 0x0, ['__unnamed_1964']],
'QueryInterface' : [ 0x0, ['__unnamed_1966']],
'DeviceCapabilities' : [ 0x0, ['__unnamed_196a']],
'FilterResourceRequirements' : [ 0x0, ['__unnamed_196c']],
'ReadWriteConfig' : [ 0x0, ['__unnamed_196e']],
'SetLock' : [ 0x0, ['__unnamed_1970']],
'QueryId' : [ 0x0, ['__unnamed_1974']],
'QueryDeviceText' : [ 0x0, ['__unnamed_1978']],
'UsageNotification' : [ 0x0, ['__unnamed_197c']],
'WaitWake' : [ 0x0, ['__unnamed_197e']],
'PowerSequence' : [ 0x0, ['__unnamed_1982']],
'Power' : [ 0x0, ['__unnamed_1986']],
'StartDevice' : [ 0x0, ['__unnamed_1988']],
'WMI' : [ 0x0, ['__unnamed_198a']],
'Others' : [ 0x0, ['__unnamed_198c']],
} ],
'_IO_STACK_LOCATION' : [ 0x48, {
'MajorFunction' : [ 0x0, ['unsigned char']],
'MinorFunction' : [ 0x1, ['unsigned char']],
'Flags' : [ 0x2, ['unsigned char']],
'Control' : [ 0x3, ['unsigned char']],
'Parameters' : [ 0x8, ['__unnamed_198e']],
'DeviceObject' : [ 0x28, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x30, ['pointer64', ['_FILE_OBJECT']]],
'CompletionRoutine' : [ 0x38, ['pointer64', ['void']]],
'Context' : [ 0x40, ['pointer64', ['void']]],
} ],
'__unnamed_1995' : [ 0x18, {
'Length' : [ 0x0, ['unsigned long']],
'Alignment' : [ 0x4, ['unsigned long']],
'MinimumAddress' : [ 0x8, ['_LARGE_INTEGER']],
'MaximumAddress' : [ 0x10, ['_LARGE_INTEGER']],
} ],
'__unnamed_1997' : [ 0x8, {
'MinimumVector' : [ 0x0, ['unsigned long']],
'MaximumVector' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1999' : [ 0x8, {
'MinimumChannel' : [ 0x0, ['unsigned long']],
'MaximumChannel' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_199b' : [ 0x10, {
'Length' : [ 0x0, ['unsigned long']],
'MinBusNumber' : [ 0x4, ['unsigned long']],
'MaxBusNumber' : [ 0x8, ['unsigned long']],
'Reserved' : [ 0xc, ['unsigned long']],
} ],
'__unnamed_199d' : [ 0xc, {
'Priority' : [ 0x0, ['unsigned long']],
'Reserved1' : [ 0x4, ['unsigned long']],
'Reserved2' : [ 0x8, ['unsigned long']],
} ],
'__unnamed_199f' : [ 0x18, {
'Port' : [ 0x0, ['__unnamed_1995']],
'Memory' : [ 0x0, ['__unnamed_1995']],
'Interrupt' : [ 0x0, ['__unnamed_1997']],
'Dma' : [ 0x0, ['__unnamed_1999']],
'Generic' : [ 0x0, ['__unnamed_1995']],
'DevicePrivate' : [ 0x0, ['__unnamed_16dc']],
'BusNumber' : [ 0x0, ['__unnamed_199b']],
'ConfigData' : [ 0x0, ['__unnamed_199d']],
} ],
'_IO_RESOURCE_DESCRIPTOR' : [ 0x20, {
'Option' : [ 0x0, ['unsigned char']],
'Type' : [ 0x1, ['unsigned char']],
'ShareDisposition' : [ 0x2, ['unsigned char']],
'Spare1' : [ 0x3, ['unsigned char']],
'Flags' : [ 0x4, ['unsigned short']],
'Spare2' : [ 0x6, ['unsigned short']],
'u' : [ 0x8, ['__unnamed_199f']],
} ],
'_MI_VERIFIER_POOL_HEADER' : [ 0x8, {
'VerifierPoolEntry' : [ 0x0, ['pointer64', ['_VI_POOL_ENTRY']]],
} ],
'__unnamed_19a8' : [ 0x4, {
'DataLength' : [ 0x0, ['short']],
'TotalLength' : [ 0x2, ['short']],
} ],
'__unnamed_19aa' : [ 0x4, {
's1' : [ 0x0, ['__unnamed_19a8']],
'Length' : [ 0x0, ['unsigned long']],
} ],
'__unnamed_19ac' : [ 0x4, {
'Type' : [ 0x0, ['short']],
'DataInfoOffset' : [ 0x2, ['short']],
} ],
'__unnamed_19ae' : [ 0x4, {
's2' : [ 0x0, ['__unnamed_19ac']],
'ZeroInit' : [ 0x0, ['unsigned long']],
} ],
'_PORT_MESSAGE' : [ 0x28, {
'u1' : [ 0x0, ['__unnamed_19aa']],
'u2' : [ 0x4, ['__unnamed_19ae']],
'ClientId' : [ 0x8, ['_CLIENT_ID']],
'DoNotUseThisField' : [ 0x8, ['double']],
'MessageId' : [ 0x18, ['unsigned long']],
'ClientViewSize' : [ 0x20, ['unsigned long long']],
'CallbackId' : [ 0x20, ['unsigned long']],
} ],
'_DBGKD_ANY_CONTROL_SET' : [ 0x1c, {
'X86ControlSet' : [ 0x0, ['_X86_DBGKD_CONTROL_SET']],
'AlphaControlSet' : [ 0x0, ['unsigned long']],
'IA64ControlSet' : [ 0x0, ['_IA64_DBGKD_CONTROL_SET']],
'Amd64ControlSet' : [ 0x0, ['_AMD64_DBGKD_CONTROL_SET']],
} ],
'_ARBITER_ORDERING_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned short']],
'Maximum' : [ 0x2, ['unsigned short']],
'Orderings' : [ 0x8, ['pointer64', ['_ARBITER_ORDERING']]],
} ],
'_HBASE_BLOCK' : [ 0x1000, {
'Signature' : [ 0x0, ['unsigned long']],
'Sequence1' : [ 0x4, ['unsigned long']],
'Sequence2' : [ 0x8, ['unsigned long']],
'TimeStamp' : [ 0xc, ['_LARGE_INTEGER']],
'Major' : [ 0x14, ['unsigned long']],
'Minor' : [ 0x18, ['unsigned long']],
'Type' : [ 0x1c, ['unsigned long']],
'Format' : [ 0x20, ['unsigned long']],
'RootCell' : [ 0x24, ['unsigned long']],
'Length' : [ 0x28, ['unsigned long']],
'Cluster' : [ 0x2c, ['unsigned long']],
'FileName' : [ 0x30, ['array', 64, ['unsigned char']]],
'Reserved1' : [ 0x70, ['array', 99, ['unsigned long']]],
'CheckSum' : [ 0x1fc, ['unsigned long']],
'Reserved2' : [ 0x200, ['array', 894, ['unsigned long']]],
'BootType' : [ 0xff8, ['unsigned long']],
'BootRecover' : [ 0xffc, ['unsigned long']],
} ],
'_DUAL' : [ 0x278, {
'Length' : [ 0x0, ['unsigned long']],
'Map' : [ 0x8, ['pointer64', ['_HMAP_DIRECTORY']]],
'SmallDir' : [ 0x10, ['pointer64', ['_HMAP_TABLE']]],
'Guard' : [ 0x18, ['unsigned long']],
'FreeDisplay' : [ 0x20, ['array', 24, ['_FREE_DISPLAY']]],
'FreeSummary' : [ 0x260, ['unsigned long']],
'FreeBins' : [ 0x268, ['_LIST_ENTRY']],
} ],
'_VI_POOL_ENTRY' : [ 0x20, {
'PageHeader' : [ 0x0, ['_VI_POOL_PAGE_HEADER']],
'InUse' : [ 0x0, ['_VI_POOL_ENTRY_INUSE']],
'NextFree' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
} ],
'_LPCP_PORT_QUEUE' : [ 0x20, {
'NonPagedPortQueue' : [ 0x0, ['pointer64', ['_LPCP_NONPAGED_PORT_QUEUE']]],
'Semaphore' : [ 0x8, ['pointer64', ['_KSEMAPHORE']]],
'ReceiveHead' : [ 0x10, ['_LIST_ENTRY']],
} ],
'_INITIAL_PRIVILEGE_SET' : [ 0x2c, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 3, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_POP_HIBER_CONTEXT' : [ 0x150, {
'WriteToFile' : [ 0x0, ['unsigned char']],
'ReserveLoaderMemory' : [ 0x1, ['unsigned char']],
'ReserveFreeMemory' : [ 0x2, ['unsigned char']],
'VerifyOnWake' : [ 0x3, ['unsigned char']],
'Reset' : [ 0x4, ['unsigned char']],
'HiberFlags' : [ 0x5, ['unsigned char']],
'LinkFile' : [ 0x6, ['unsigned char']],
'LinkFileHandle' : [ 0x8, ['pointer64', ['void']]],
'Lock' : [ 0x10, ['unsigned long long']],
'MapFrozen' : [ 0x18, ['unsigned char']],
'MemoryMap' : [ 0x20, ['_RTL_BITMAP']],
'ClonedRanges' : [ 0x30, ['_LIST_ENTRY']],
'ClonedRangeCount' : [ 0x40, ['unsigned long']],
'NextCloneRange' : [ 0x48, ['pointer64', ['_LIST_ENTRY']]],
'NextPreserve' : [ 0x50, ['unsigned long long']],
'LoaderMdl' : [ 0x58, ['pointer64', ['_MDL']]],
'Clones' : [ 0x60, ['pointer64', ['_MDL']]],
'NextClone' : [ 0x68, ['pointer64', ['unsigned char']]],
'NoClones' : [ 0x70, ['unsigned long long']],
'Spares' : [ 0x78, ['pointer64', ['_MDL']]],
'PagesOut' : [ 0x80, ['unsigned long long']],
'IoPage' : [ 0x88, ['pointer64', ['void']]],
'CurrentMcb' : [ 0x90, ['pointer64', ['void']]],
'DumpStack' : [ 0x98, ['pointer64', ['_DUMP_STACK_CONTEXT']]],
'WakeState' : [ 0xa0, ['pointer64', ['_KPROCESSOR_STATE']]],
'NoRanges' : [ 0xa8, ['unsigned long']],
'HiberVa' : [ 0xb0, ['unsigned long long']],
'HiberPte' : [ 0xb8, ['_LARGE_INTEGER']],
'Status' : [ 0xc0, ['long']],
'MemoryImage' : [ 0xc8, ['pointer64', ['PO_MEMORY_IMAGE']]],
'TableHead' : [ 0xd0, ['pointer64', ['_PO_MEMORY_RANGE_ARRAY']]],
'CompressionWorkspace' : [ 0xd8, ['pointer64', ['unsigned char']]],
'CompressedWriteBuffer' : [ 0xe0, ['pointer64', ['unsigned char']]],
'PerformanceStats' : [ 0xe8, ['pointer64', ['unsigned long']]],
'CompressionBlock' : [ 0xf0, ['pointer64', ['void']]],
'DmaIO' : [ 0xf8, ['pointer64', ['void']]],
'TemporaryHeap' : [ 0x100, ['pointer64', ['void']]],
'PerfInfo' : [ 0x108, ['_PO_HIBER_PERF']],
} ],
'_FILE_GET_QUOTA_INFORMATION' : [ 0x14, {
'NextEntryOffset' : [ 0x0, ['unsigned long']],
'SidLength' : [ 0x4, ['unsigned long']],
'Sid' : [ 0x8, ['_SID']],
} ],
'_MMADDRESS_LIST' : [ 0x10, {
'StartVpn' : [ 0x0, ['unsigned long long']],
'EndVpn' : [ 0x8, ['unsigned long long']],
} ],
'_OBJECT_NAME_INFORMATION' : [ 0x10, {
'Name' : [ 0x0, ['_UNICODE_STRING']],
} ],
'_KDESCRIPTOR' : [ 0x10, {
'Pad' : [ 0x0, ['array', 3, ['unsigned short']]],
'Limit' : [ 0x6, ['unsigned short']],
'Base' : [ 0x8, ['pointer64', ['void']]],
} ],
'_DUMP_STACK_CONTEXT' : [ 0x110, {
'Init' : [ 0x0, ['_DUMP_INITIALIZATION_CONTEXT']],
'PartitionOffset' : [ 0xa0, ['_LARGE_INTEGER']],
'DumpPointers' : [ 0xa8, ['pointer64', ['void']]],
'PointersLength' : [ 0xb0, ['unsigned long']],
'ModulePrefix' : [ 0xb8, ['pointer64', ['unsigned short']]],
'DriverList' : [ 0xc0, ['_LIST_ENTRY']],
'InitMsg' : [ 0xd0, ['_STRING']],
'ProgMsg' : [ 0xe0, ['_STRING']],
'DoneMsg' : [ 0xf0, ['_STRING']],
'FileObject' : [ 0x100, ['pointer64', ['void']]],
'UsageType' : [ 0x108, ['Enumeration', dict(target = 'long', choices = {0: 'DeviceUsageTypeUndefined', 1: 'DeviceUsageTypePaging', 2: 'DeviceUsageTypeHibernation', 3: 'DeviceUsageTypeDumpFile'})]],
} ],
'_POP_SHUTDOWN_BUG_CHECK' : [ 0x28, {
'Code' : [ 0x0, ['unsigned long']],
'Parameter1' : [ 0x8, ['unsigned long long']],
'Parameter2' : [ 0x10, ['unsigned long long']],
'Parameter3' : [ 0x18, ['unsigned long long']],
'Parameter4' : [ 0x20, ['unsigned long long']],
} ],
'__unnamed_19e9' : [ 0x4, {
'DeviceNumber' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 5, native_type='unsigned long')]],
'FunctionNumber' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 8, native_type='unsigned long')]],
'Reserved' : [ 0x0, ['BitField', dict(start_bit = 8, end_bit = 32, native_type='unsigned long')]],
} ],
'__unnamed_19eb' : [ 0x4, {
'bits' : [ 0x0, ['__unnamed_19e9']],
'AsULONG' : [ 0x0, ['unsigned long']],
} ],
'_PCI_SLOT_NUMBER' : [ 0x4, {
'u' : [ 0x0, ['__unnamed_19eb']],
} ],
'_CM_NOTIFY_BLOCK' : [ 0x58, {
'HiveList' : [ 0x0, ['_LIST_ENTRY']],
'PostList' : [ 0x10, ['_LIST_ENTRY']],
'KeyControlBlock' : [ 0x20, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
'KeyBody' : [ 0x28, ['pointer64', ['_CM_KEY_BODY']]],
'Filter' : [ 0x30, ['BitField', dict(start_bit = 0, end_bit = 30, native_type='unsigned long')]],
'WatchTree' : [ 0x30, ['BitField', dict(start_bit = 30, end_bit = 31, native_type='unsigned long')]],
'NotifyPending' : [ 0x30, ['BitField', dict(start_bit = 31, end_bit = 32, native_type='unsigned long')]],
'SubjectContext' : [ 0x38, ['_SECURITY_SUBJECT_CONTEXT']],
} ],
'_SID' : [ 0xc, {
'Revision' : [ 0x0, ['unsigned char']],
'SubAuthorityCount' : [ 0x1, ['unsigned char']],
'IdentifierAuthority' : [ 0x2, ['_SID_IDENTIFIER_AUTHORITY']],
'SubAuthority' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_RTL_HANDLE_TABLE_ENTRY' : [ 0x8, {
'Flags' : [ 0x0, ['unsigned long']],
'NextFree' : [ 0x0, ['pointer64', ['_RTL_HANDLE_TABLE_ENTRY']]],
} ],
'_VI_POOL_ENTRY_INUSE' : [ 0x20, {
'VirtualAddress' : [ 0x0, ['pointer64', ['void']]],
'CallingAddress' : [ 0x8, ['pointer64', ['void']]],
'NumberOfBytes' : [ 0x10, ['unsigned long long']],
'Tag' : [ 0x18, ['unsigned long long']],
} ],
'_INTERFACE' : [ 0x20, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'Context' : [ 0x8, ['pointer64', ['void']]],
'InterfaceReference' : [ 0x10, ['pointer64', ['void']]],
'InterfaceDereference' : [ 0x18, ['pointer64', ['void']]],
} ],
'_SUPPORTED_RANGES' : [ 0xc0, {
'Version' : [ 0x0, ['unsigned short']],
'Sorted' : [ 0x2, ['unsigned char']],
'Reserved' : [ 0x3, ['unsigned char']],
'NoIO' : [ 0x4, ['unsigned long']],
'IO' : [ 0x8, ['_SUPPORTED_RANGE']],
'NoMemory' : [ 0x30, ['unsigned long']],
'Memory' : [ 0x38, ['_SUPPORTED_RANGE']],
'NoPrefetchMemory' : [ 0x60, ['unsigned long']],
'PrefetchMemory' : [ 0x68, ['_SUPPORTED_RANGE']],
'NoDma' : [ 0x90, ['unsigned long']],
'Dma' : [ 0x98, ['_SUPPORTED_RANGE']],
} ],
'_DRIVER_OBJECT' : [ 0x150, {
'Type' : [ 0x0, ['short']],
'Size' : [ 0x2, ['short']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'Flags' : [ 0x10, ['unsigned long']],
'DriverStart' : [ 0x18, ['pointer64', ['void']]],
'DriverSize' : [ 0x20, ['unsigned long']],
'DriverSection' : [ 0x28, ['pointer64', ['void']]],
'DriverExtension' : [ 0x30, ['pointer64', ['_DRIVER_EXTENSION']]],
'DriverName' : [ 0x38, ['_UNICODE_STRING']],
'HardwareDatabase' : [ 0x48, ['pointer64', ['_UNICODE_STRING']]],
'FastIoDispatch' : [ 0x50, ['pointer64', ['_FAST_IO_DISPATCH']]],
'DriverInit' : [ 0x58, ['pointer64', ['void']]],
'DriverStartIo' : [ 0x60, ['pointer64', ['void']]],
'DriverUnload' : [ 0x68, ['pointer64', ['void']]],
'MajorFunction' : [ 0x70, ['array', 28, ['pointer64', ['void']]]],
} ],
'_SID_IDENTIFIER_AUTHORITY' : [ 0x6, {
'Value' : [ 0x0, ['array', 6, ['unsigned char']]],
} ],
'_SECURITY_DESCRIPTOR_RELATIVE' : [ 0x14, {
'Revision' : [ 0x0, ['unsigned char']],
'Sbz1' : [ 0x1, ['unsigned char']],
'Control' : [ 0x2, ['unsigned short']],
'Owner' : [ 0x4, ['unsigned long']],
'Group' : [ 0x8, ['unsigned long']],
'Sacl' : [ 0xc, ['unsigned long']],
'Dacl' : [ 0x10, ['unsigned long']],
} ],
'_DRIVER_EXTENSION' : [ 0x38, {
'DriverObject' : [ 0x0, ['pointer64', ['_DRIVER_OBJECT']]],
'AddDevice' : [ 0x8, ['pointer64', ['void']]],
'Count' : [ 0x10, ['unsigned long']],
'ServiceKeyName' : [ 0x18, ['_UNICODE_STRING']],
'ClientDriverExtension' : [ 0x28, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'FsFilterCallbacks' : [ 0x30, ['pointer64', ['_FS_FILTER_CALLBACKS']]],
} ],
'_PM_SUPPORT' : [ 0x1, {
'Rsvd2' : [ 0x0, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned char')]],
'D1' : [ 0x0, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned char')]],
'D2' : [ 0x0, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned char')]],
'PMED0' : [ 0x0, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned char')]],
'PMED1' : [ 0x0, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned char')]],
'PMED2' : [ 0x0, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned char')]],
'PMED3Hot' : [ 0x0, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned char')]],
'PMED3Cold' : [ 0x0, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned char')]],
} ],
'__unnamed_1a1a' : [ 0x18, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
'AllocateFromCount' : [ 0x8, ['unsigned long']],
'AllocateFrom' : [ 0x10, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
} ],
'__unnamed_1a1c' : [ 0x8, {
'ArbitrationList' : [ 0x0, ['pointer64', ['_LIST_ENTRY']]],
} ],
'__unnamed_1a20' : [ 0x8, {
'AllocatedResources' : [ 0x0, ['pointer64', ['pointer64', ['_CM_PARTIAL_RESOURCE_LIST']]]],
} ],
'__unnamed_1a22' : [ 0x20, {
'PhysicalDeviceObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'ConflictingResource' : [ 0x8, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'ConflictCount' : [ 0x10, ['pointer64', ['unsigned long']]],
'Conflicts' : [ 0x18, ['pointer64', ['pointer64', ['_ARBITER_CONFLICT_INFO']]]],
} ],
'__unnamed_1a24' : [ 0x8, {
'ReserveDevice' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
} ],
'__unnamed_1a26' : [ 0x20, {
'TestAllocation' : [ 0x0, ['__unnamed_1a1a']],
'RetestAllocation' : [ 0x0, ['__unnamed_1a1a']],
'BootAllocation' : [ 0x0, ['__unnamed_1a1c']],
'QueryAllocatedResources' : [ 0x0, ['__unnamed_1a20']],
'QueryConflict' : [ 0x0, ['__unnamed_1a22']],
'QueryArbitrate' : [ 0x0, ['__unnamed_1a1c']],
'AddReserved' : [ 0x0, ['__unnamed_1a24']],
} ],
'_ARBITER_PARAMETERS' : [ 0x20, {
'Parameters' : [ 0x0, ['__unnamed_1a26']],
} ],
'POWER_ACTION_POLICY' : [ 0xc, {
'Action' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PowerActionNone', 1: 'PowerActionReserved', 2: 'PowerActionSleep', 3: 'PowerActionHibernate', 4: 'PowerActionShutdown', 5: 'PowerActionShutdownReset', 6: 'PowerActionShutdownOff', 7: 'PowerActionWarmEject'})]],
'Flags' : [ 0x4, ['unsigned long']],
'EventCode' : [ 0x8, ['unsigned long']],
} ],
'_HANDLE_TABLE_ENTRY_INFO' : [ 0x4, {
'AuditMask' : [ 0x0, ['unsigned long']],
} ],
'_POWER_SEQUENCE' : [ 0xc, {
'SequenceD1' : [ 0x0, ['unsigned long']],
'SequenceD2' : [ 0x4, ['unsigned long']],
'SequenceD3' : [ 0x8, ['unsigned long']],
} ],
'_IMAGE_DATA_DIRECTORY' : [ 0x8, {
'VirtualAddress' : [ 0x0, ['unsigned long']],
'Size' : [ 0x4, ['unsigned long']],
} ],
'PO_MEMORY_IMAGE' : [ 0xc0, {
'Signature' : [ 0x0, ['unsigned long']],
'Version' : [ 0x4, ['unsigned long']],
'CheckSum' : [ 0x8, ['unsigned long']],
'LengthSelf' : [ 0xc, ['unsigned long']],
'PageSelf' : [ 0x10, ['unsigned long long']],
'PageSize' : [ 0x18, ['unsigned long']],
'ImageType' : [ 0x1c, ['unsigned long']],
'SystemTime' : [ 0x20, ['_LARGE_INTEGER']],
'InterruptTime' : [ 0x28, ['unsigned long long']],
'FeatureFlags' : [ 0x30, ['unsigned long']],
'HiberFlags' : [ 0x34, ['unsigned char']],
'spare' : [ 0x35, ['array', 3, ['unsigned char']]],
'NoHiberPtes' : [ 0x38, ['unsigned long']],
'HiberVa' : [ 0x40, ['unsigned long long']],
'HiberPte' : [ 0x48, ['_LARGE_INTEGER']],
'NoFreePages' : [ 0x50, ['unsigned long']],
'FreeMapCheck' : [ 0x54, ['unsigned long']],
'WakeCheck' : [ 0x58, ['unsigned long']],
'TotalPages' : [ 0x60, ['unsigned long long']],
'FirstTablePage' : [ 0x68, ['unsigned long long']],
'LastFilePage' : [ 0x70, ['unsigned long long']],
'PerfInfo' : [ 0x78, ['_PO_HIBER_PERF']],
} ],
'BATTERY_REPORTING_SCALE' : [ 0x8, {
'Granularity' : [ 0x0, ['unsigned long']],
'Capacity' : [ 0x4, ['unsigned long']],
} ],
'_CURDIR' : [ 0x18, {
'DosPath' : [ 0x0, ['_UNICODE_STRING']],
'Handle' : [ 0x10, ['pointer64', ['void']]],
} ],
'_PO_HIBER_PERF' : [ 0x48, {
'IoTicks' : [ 0x0, ['unsigned long long']],
'InitTicks' : [ 0x8, ['unsigned long long']],
'CopyTicks' : [ 0x10, ['unsigned long long']],
'StartCount' : [ 0x18, ['unsigned long long']],
'ElapsedTime' : [ 0x20, ['unsigned long']],
'IoTime' : [ 0x24, ['unsigned long']],
'CopyTime' : [ 0x28, ['unsigned long']],
'InitTime' : [ 0x2c, ['unsigned long']],
'PagesWritten' : [ 0x30, ['unsigned long']],
'PagesProcessed' : [ 0x34, ['unsigned long']],
'BytesCopied' : [ 0x38, ['unsigned long']],
'DumpCount' : [ 0x3c, ['unsigned long']],
'FileRuns' : [ 0x40, ['unsigned long']],
} ],
'_FREE_DISPLAY' : [ 0x18, {
'RealVectorSize' : [ 0x0, ['unsigned long']],
'Display' : [ 0x8, ['_RTL_BITMAP']],
} ],
'_KDEVICE_QUEUE_ENTRY' : [ 0x18, {
'DeviceListEntry' : [ 0x0, ['_LIST_ENTRY']],
'SortKey' : [ 0x10, ['unsigned long']],
'Inserted' : [ 0x14, ['unsigned char']],
} ],
'_DEVICE_CAPABILITIES' : [ 0x40, {
'Size' : [ 0x0, ['unsigned short']],
'Version' : [ 0x2, ['unsigned short']],
'DeviceD1' : [ 0x4, ['BitField', dict(start_bit = 0, end_bit = 1, native_type='unsigned long')]],
'DeviceD2' : [ 0x4, ['BitField', dict(start_bit = 1, end_bit = 2, native_type='unsigned long')]],
'LockSupported' : [ 0x4, ['BitField', dict(start_bit = 2, end_bit = 3, native_type='unsigned long')]],
'EjectSupported' : [ 0x4, ['BitField', dict(start_bit = 3, end_bit = 4, native_type='unsigned long')]],
'Removable' : [ 0x4, ['BitField', dict(start_bit = 4, end_bit = 5, native_type='unsigned long')]],
'DockDevice' : [ 0x4, ['BitField', dict(start_bit = 5, end_bit = 6, native_type='unsigned long')]],
'UniqueID' : [ 0x4, ['BitField', dict(start_bit = 6, end_bit = 7, native_type='unsigned long')]],
'SilentInstall' : [ 0x4, ['BitField', dict(start_bit = 7, end_bit = 8, native_type='unsigned long')]],
'RawDeviceOK' : [ 0x4, ['BitField', dict(start_bit = 8, end_bit = 9, native_type='unsigned long')]],
'SurpriseRemovalOK' : [ 0x4, ['BitField', dict(start_bit = 9, end_bit = 10, native_type='unsigned long')]],
'WakeFromD0' : [ 0x4, ['BitField', dict(start_bit = 10, end_bit = 11, native_type='unsigned long')]],
'WakeFromD1' : [ 0x4, ['BitField', dict(start_bit = 11, end_bit = 12, native_type='unsigned long')]],
'WakeFromD2' : [ 0x4, ['BitField', dict(start_bit = 12, end_bit = 13, native_type='unsigned long')]],
'WakeFromD3' : [ 0x4, ['BitField', dict(start_bit = 13, end_bit = 14, native_type='unsigned long')]],
'HardwareDisabled' : [ 0x4, ['BitField', dict(start_bit = 14, end_bit = 15, native_type='unsigned long')]],
'NonDynamic' : [ 0x4, ['BitField', dict(start_bit = 15, end_bit = 16, native_type='unsigned long')]],
'WarmEjectSupported' : [ 0x4, ['BitField', dict(start_bit = 16, end_bit = 17, native_type='unsigned long')]],
'NoDisplayInUI' : [ 0x4, ['BitField', dict(start_bit = 17, end_bit = 18, native_type='unsigned long')]],
'Reserved' : [ 0x4, ['BitField', dict(start_bit = 18, end_bit = 32, native_type='unsigned long')]],
'Address' : [ 0x8, ['unsigned long']],
'UINumber' : [ 0xc, ['unsigned long']],
'DeviceState' : [ 0x10, ['array', -28, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]]],
'SystemWake' : [ 0x2c, ['Enumeration', dict(target = 'long', choices = {0: 'PowerSystemUnspecified', 1: 'PowerSystemWorking', 2: 'PowerSystemSleeping1', 3: 'PowerSystemSleeping2', 4: 'PowerSystemSleeping3', 5: 'PowerSystemHibernate', 6: 'PowerSystemShutdown', 7: 'PowerSystemMaximum'})]],
'DeviceWake' : [ 0x30, ['Enumeration', dict(target = 'long', choices = {0: 'PowerDeviceUnspecified', 1: 'PowerDeviceD0', 2: 'PowerDeviceD1', 3: 'PowerDeviceD2', 4: 'PowerDeviceD3', 5: 'PowerDeviceMaximum'})]],
'D1Latency' : [ 0x34, ['unsigned long']],
'D2Latency' : [ 0x38, ['unsigned long']],
'D3Latency' : [ 0x3c, ['unsigned long']],
} ],
'_VI_POOL_PAGE_HEADER' : [ 0x18, {
'NextPage' : [ 0x0, ['pointer64', ['_SLIST_ENTRY']]],
'VerifierEntry' : [ 0x8, ['pointer64', ['void']]],
'Signature' : [ 0x10, ['unsigned long long']],
} ],
'_RTL_RANGE_LIST' : [ 0x20, {
'ListHead' : [ 0x0, ['_LIST_ENTRY']],
'Flags' : [ 0x10, ['unsigned long']],
'Count' : [ 0x14, ['unsigned long']],
'Stamp' : [ 0x18, ['unsigned long']],
} ],
'_RTL_CRITICAL_SECTION_DEBUG' : [ 0x30, {
'Type' : [ 0x0, ['unsigned short']],
'CreatorBackTraceIndex' : [ 0x2, ['unsigned short']],
'CriticalSection' : [ 0x8, ['pointer64', ['_RTL_CRITICAL_SECTION']]],
'ProcessLocksList' : [ 0x10, ['_LIST_ENTRY']],
'EntryCount' : [ 0x20, ['unsigned long']],
'ContentionCount' : [ 0x24, ['unsigned long']],
'Spare' : [ 0x28, ['array', 2, ['unsigned long']]],
} ],
'__unnamed_1a48' : [ 0x14, {
'ClassGuid' : [ 0x0, ['_GUID']],
'SymbolicLinkName' : [ 0x10, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1a4a' : [ 0x2, {
'DeviceIds' : [ 0x0, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1a4c' : [ 0x2, {
'DeviceId' : [ 0x0, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1a4e' : [ 0x10, {
'NotificationStructure' : [ 0x0, ['pointer64', ['void']]],
'DeviceIds' : [ 0x8, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1a50' : [ 0x8, {
'Notification' : [ 0x0, ['pointer64', ['void']]],
} ],
'__unnamed_1a52' : [ 0x8, {
'NotificationCode' : [ 0x0, ['unsigned long']],
'NotificationData' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1a54' : [ 0x8, {
'VetoType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'PNP_VetoTypeUnknown', 1: 'PNP_VetoLegacyDevice', 2: 'PNP_VetoPendingClose', 3: 'PNP_VetoWindowsApp', 4: 'PNP_VetoWindowsService', 5: 'PNP_VetoOutstandingOpen', 6: 'PNP_VetoDevice', 7: 'PNP_VetoDriver', 8: 'PNP_VetoIllegalDeviceRequest', 9: 'PNP_VetoInsufficientPower', 10: 'PNP_VetoNonDisableable', 11: 'PNP_VetoLegacyDriver', 12: 'PNP_VetoInsufficientRights'})]],
'DeviceIdVetoNameBuffer' : [ 0x4, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1a56' : [ 0x10, {
'BlockedDriverGuid' : [ 0x0, ['_GUID']],
} ],
'__unnamed_1a58' : [ 0x2, {
'ParentId' : [ 0x0, ['array', 1, ['unsigned short']]],
} ],
'__unnamed_1a5a' : [ 0x18, {
'DeviceClass' : [ 0x0, ['__unnamed_1a48']],
'TargetDevice' : [ 0x0, ['__unnamed_1a4a']],
'InstallDevice' : [ 0x0, ['__unnamed_1a4c']],
'CustomNotification' : [ 0x0, ['__unnamed_1a4e']],
'ProfileNotification' : [ 0x0, ['__unnamed_1a50']],
'PowerNotification' : [ 0x0, ['__unnamed_1a52']],
'VetoNotification' : [ 0x0, ['__unnamed_1a54']],
'BlockedDriverNotification' : [ 0x0, ['__unnamed_1a56']],
'InvalidIDNotification' : [ 0x0, ['__unnamed_1a58']],
} ],
'_PLUGPLAY_EVENT_BLOCK' : [ 0x48, {
'EventGuid' : [ 0x0, ['_GUID']],
'EventCategory' : [ 0x10, ['Enumeration', dict(target = 'long', choices = {0: 'HardwareProfileChangeEvent', 1: 'TargetDeviceChangeEvent', 2: 'DeviceClassChangeEvent', 3: 'CustomDeviceEvent', 4: 'DeviceInstallEvent', 5: 'DeviceArrivalEvent', 6: 'PowerEvent', 7: 'VetoEvent', 8: 'BlockedDriverEvent', 9: 'InvalidIDEvent', 10: 'MaxPlugEventCategory'})]],
'Result' : [ 0x18, ['pointer64', ['unsigned long']]],
'Flags' : [ 0x20, ['unsigned long']],
'TotalSize' : [ 0x24, ['unsigned long']],
'DeviceObject' : [ 0x28, ['pointer64', ['void']]],
'u' : [ 0x30, ['__unnamed_1a5a']],
} ],
'_CACHED_CHILD_LIST' : [ 0x10, {
'Count' : [ 0x0, ['unsigned long']],
'ValueList' : [ 0x8, ['unsigned long long']],
'RealKcb' : [ 0x8, ['pointer64', ['_CM_KEY_CONTROL_BLOCK']]],
} ],
'_PO_MEMORY_RANGE_ARRAY' : [ 0x20, {
'Range' : [ 0x0, ['_PO_MEMORY_RANGE_ARRAY_RANGE']],
'Link' : [ 0x0, ['_PO_MEMORY_RANGE_ARRAY_LINK']],
} ],
'__unnamed_1a71' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned long']],
'CheckSum' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1a73' : [ 0x10, {
'DiskId' : [ 0x0, ['_GUID']],
} ],
'__unnamed_1a75' : [ 0x10, {
'Mbr' : [ 0x0, ['__unnamed_1a71']],
'Gpt' : [ 0x0, ['__unnamed_1a73']],
} ],
'_DUMP_INITIALIZATION_CONTEXT' : [ 0xa0, {
'Length' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'MemoryBlock' : [ 0x8, ['pointer64', ['void']]],
'CommonBuffer' : [ 0x10, ['array', 2, ['pointer64', ['void']]]],
'PhysicalAddress' : [ 0x20, ['array', 2, ['_LARGE_INTEGER']]],
'StallRoutine' : [ 0x30, ['pointer64', ['void']]],
'OpenRoutine' : [ 0x38, ['pointer64', ['void']]],
'WriteRoutine' : [ 0x40, ['pointer64', ['void']]],
'FinishRoutine' : [ 0x48, ['pointer64', ['void']]],
'AdapterObject' : [ 0x50, ['pointer64', ['_ADAPTER_OBJECT']]],
'MappedRegisterBase' : [ 0x58, ['pointer64', ['void']]],
'PortConfiguration' : [ 0x60, ['pointer64', ['void']]],
'CrashDump' : [ 0x68, ['unsigned char']],
'MaximumTransferSize' : [ 0x6c, ['unsigned long']],
'CommonBufferSize' : [ 0x70, ['unsigned long']],
'TargetAddress' : [ 0x78, ['pointer64', ['void']]],
'WritePendingRoutine' : [ 0x80, ['pointer64', ['void']]],
'PartitionStyle' : [ 0x88, ['unsigned long']],
'DiskInfo' : [ 0x8c, ['__unnamed_1a75']],
} ],
'_IO_CLIENT_EXTENSION' : [ 0x10, {
'NextExtension' : [ 0x0, ['pointer64', ['_IO_CLIENT_EXTENSION']]],
'ClientIdentificationAddress' : [ 0x8, ['pointer64', ['void']]],
} ],
'_CM_NAME_HASH' : [ 0x18, {
'ConvKey' : [ 0x0, ['unsigned long']],
'NextHash' : [ 0x8, ['pointer64', ['_CM_NAME_HASH']]],
'NameLength' : [ 0x10, ['unsigned short']],
'Name' : [ 0x12, ['array', 1, ['unsigned short']]],
} ],
'_ARBITER_ALLOCATION_STATE' : [ 0x50, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'CurrentMinimum' : [ 0x10, ['unsigned long long']],
'CurrentMaximum' : [ 0x18, ['unsigned long long']],
'Entry' : [ 0x20, ['pointer64', ['_ARBITER_LIST_ENTRY']]],
'CurrentAlternative' : [ 0x28, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'AlternativeCount' : [ 0x30, ['unsigned long']],
'Alternatives' : [ 0x38, ['pointer64', ['_ARBITER_ALTERNATIVE']]],
'Flags' : [ 0x40, ['unsigned short']],
'RangeAttributes' : [ 0x42, ['unsigned char']],
'RangeAvailableAttributes' : [ 0x43, ['unsigned char']],
'WorkSpace' : [ 0x48, ['unsigned long long']],
} ],
'_PCI_HEADER_TYPE_0' : [ 0x30, {
'BaseAddresses' : [ 0x0, ['array', 6, ['unsigned long']]],
'CIS' : [ 0x18, ['unsigned long']],
'SubVendorID' : [ 0x1c, ['unsigned short']],
'SubSystemID' : [ 0x1e, ['unsigned short']],
'ROMBaseAddress' : [ 0x20, ['unsigned long']],
'CapabilitiesPtr' : [ 0x24, ['unsigned char']],
'Reserved1' : [ 0x25, ['array', 3, ['unsigned char']]],
'Reserved2' : [ 0x28, ['unsigned long']],
'InterruptLine' : [ 0x2c, ['unsigned char']],
'InterruptPin' : [ 0x2d, ['unsigned char']],
'MinimumGrant' : [ 0x2e, ['unsigned char']],
'MaximumLatency' : [ 0x2f, ['unsigned char']],
} ],
'_PO_DEVICE_NOTIFY_ORDER' : [ 0x410, {
'DevNodeSequence' : [ 0x0, ['unsigned long']],
'WarmEjectPdoPointer' : [ 0x8, ['pointer64', ['pointer64', ['_DEVICE_OBJECT']]]],
'OrderLevel' : [ 0x10, ['array', 8, ['_PO_NOTIFY_ORDER_LEVEL']]],
} ],
'_FS_FILTER_CALLBACKS' : [ 0x68, {
'SizeOfFsFilterCallbacks' : [ 0x0, ['unsigned long']],
'Reserved' : [ 0x4, ['unsigned long']],
'PreAcquireForSectionSynchronization' : [ 0x8, ['pointer64', ['void']]],
'PostAcquireForSectionSynchronization' : [ 0x10, ['pointer64', ['void']]],
'PreReleaseForSectionSynchronization' : [ 0x18, ['pointer64', ['void']]],
'PostReleaseForSectionSynchronization' : [ 0x20, ['pointer64', ['void']]],
'PreAcquireForCcFlush' : [ 0x28, ['pointer64', ['void']]],
'PostAcquireForCcFlush' : [ 0x30, ['pointer64', ['void']]],
'PreReleaseForCcFlush' : [ 0x38, ['pointer64', ['void']]],
'PostReleaseForCcFlush' : [ 0x40, ['pointer64', ['void']]],
'PreAcquireForModifiedPageWriter' : [ 0x48, ['pointer64', ['void']]],
'PostAcquireForModifiedPageWriter' : [ 0x50, ['pointer64', ['void']]],
'PreReleaseForModifiedPageWriter' : [ 0x58, ['pointer64', ['void']]],
'PostReleaseForModifiedPageWriter' : [ 0x60, ['pointer64', ['void']]],
} ],
'_IA64_DBGKD_CONTROL_SET' : [ 0x14, {
'Continue' : [ 0x0, ['unsigned long']],
'CurrentSymbolStart' : [ 0x4, ['unsigned long long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long long']],
} ],
'_PO_MEMORY_RANGE_ARRAY_RANGE' : [ 0x20, {
'PageNo' : [ 0x0, ['unsigned long long']],
'StartPage' : [ 0x8, ['unsigned long long']],
'EndPage' : [ 0x10, ['unsigned long long']],
'CheckSum' : [ 0x18, ['unsigned long']],
} ],
'_u' : [ 0x50, {
'KeyNode' : [ 0x0, ['_CM_KEY_NODE']],
'KeyValue' : [ 0x0, ['_CM_KEY_VALUE']],
'KeySecurity' : [ 0x0, ['_CM_KEY_SECURITY']],
'KeyIndex' : [ 0x0, ['_CM_KEY_INDEX']],
'ValueData' : [ 0x0, ['_CM_BIG_DATA']],
'KeyList' : [ 0x0, ['array', 1, ['unsigned long']]],
'KeyString' : [ 0x0, ['array', 1, ['unsigned short']]],
} ],
'_ARBITER_CONFLICT_INFO' : [ 0x18, {
'OwningObject' : [ 0x0, ['pointer64', ['_DEVICE_OBJECT']]],
'Start' : [ 0x8, ['unsigned long long']],
'End' : [ 0x10, ['unsigned long long']],
} ],
'_PO_NOTIFY_ORDER_LEVEL' : [ 0x80, {
'LevelReady' : [ 0x0, ['_KEVENT']],
'DeviceCount' : [ 0x18, ['unsigned long']],
'ActiveCount' : [ 0x1c, ['unsigned long']],
'WaitSleep' : [ 0x20, ['_LIST_ENTRY']],
'ReadySleep' : [ 0x30, ['_LIST_ENTRY']],
'Pending' : [ 0x40, ['_LIST_ENTRY']],
'Complete' : [ 0x50, ['_LIST_ENTRY']],
'ReadyS0' : [ 0x60, ['_LIST_ENTRY']],
'WaitS0' : [ 0x70, ['_LIST_ENTRY']],
} ],
'__unnamed_1aa5' : [ 0x8, {
'Base' : [ 0x0, ['unsigned long']],
'Limit' : [ 0x4, ['unsigned long']],
} ],
'_PCI_HEADER_TYPE_2' : [ 0x30, {
'SocketRegistersBaseAddress' : [ 0x0, ['unsigned long']],
'CapabilitiesPtr' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'SecondaryStatus' : [ 0x6, ['unsigned short']],
'PrimaryBus' : [ 0x8, ['unsigned char']],
'SecondaryBus' : [ 0x9, ['unsigned char']],
'SubordinateBus' : [ 0xa, ['unsigned char']],
'SecondaryLatency' : [ 0xb, ['unsigned char']],
'Range' : [ 0xc, ['array', 4, ['__unnamed_1aa5']]],
'InterruptLine' : [ 0x2c, ['unsigned char']],
'InterruptPin' : [ 0x2d, ['unsigned char']],
'BridgeControl' : [ 0x2e, ['unsigned short']],
} ],
'_CM_KEY_VALUE' : [ 0x18, {
'Signature' : [ 0x0, ['unsigned short']],
'NameLength' : [ 0x2, ['unsigned short']],
'DataLength' : [ 0x4, ['unsigned long']],
'Data' : [ 0x8, ['unsigned long']],
'Type' : [ 0xc, ['unsigned long']],
'Flags' : [ 0x10, ['unsigned short']],
'Spare' : [ 0x12, ['unsigned short']],
'Name' : [ 0x14, ['array', 1, ['unsigned short']]],
} ],
'_FS_FILTER_CALLBACK_DATA' : [ 0x40, {
'SizeOfFsFilterCallbackData' : [ 0x0, ['unsigned long']],
'Operation' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'DeviceObject' : [ 0x8, ['pointer64', ['_DEVICE_OBJECT']]],
'FileObject' : [ 0x10, ['pointer64', ['_FILE_OBJECT']]],
'Parameters' : [ 0x18, ['_FS_FILTER_PARAMETERS']],
} ],
'_PO_MEMORY_RANGE_ARRAY_LINK' : [ 0x18, {
'Next' : [ 0x0, ['pointer64', ['_PO_MEMORY_RANGE_ARRAY']]],
'NextTable' : [ 0x8, ['unsigned long long']],
'CheckSum' : [ 0x10, ['unsigned long']],
'EntryCount' : [ 0x14, ['unsigned long']],
} ],
'_FAST_IO_DISPATCH' : [ 0xe0, {
'SizeOfFastIoDispatch' : [ 0x0, ['unsigned long']],
'FastIoCheckIfPossible' : [ 0x8, ['pointer64', ['void']]],
'FastIoRead' : [ 0x10, ['pointer64', ['void']]],
'FastIoWrite' : [ 0x18, ['pointer64', ['void']]],
'FastIoQueryBasicInfo' : [ 0x20, ['pointer64', ['void']]],
'FastIoQueryStandardInfo' : [ 0x28, ['pointer64', ['void']]],
'FastIoLock' : [ 0x30, ['pointer64', ['void']]],
'FastIoUnlockSingle' : [ 0x38, ['pointer64', ['void']]],
'FastIoUnlockAll' : [ 0x40, ['pointer64', ['void']]],
'FastIoUnlockAllByKey' : [ 0x48, ['pointer64', ['void']]],
'FastIoDeviceControl' : [ 0x50, ['pointer64', ['void']]],
'AcquireFileForNtCreateSection' : [ 0x58, ['pointer64', ['void']]],
'ReleaseFileForNtCreateSection' : [ 0x60, ['pointer64', ['void']]],
'FastIoDetachDevice' : [ 0x68, ['pointer64', ['void']]],
'FastIoQueryNetworkOpenInfo' : [ 0x70, ['pointer64', ['void']]],
'AcquireForModWrite' : [ 0x78, ['pointer64', ['void']]],
'MdlRead' : [ 0x80, ['pointer64', ['void']]],
'MdlReadComplete' : [ 0x88, ['pointer64', ['void']]],
'PrepareMdlWrite' : [ 0x90, ['pointer64', ['void']]],
'MdlWriteComplete' : [ 0x98, ['pointer64', ['void']]],
'FastIoReadCompressed' : [ 0xa0, ['pointer64', ['void']]],
'FastIoWriteCompressed' : [ 0xa8, ['pointer64', ['void']]],
'MdlReadCompleteCompressed' : [ 0xb0, ['pointer64', ['void']]],
'MdlWriteCompleteCompressed' : [ 0xb8, ['pointer64', ['void']]],
'FastIoQueryOpen' : [ 0xc0, ['pointer64', ['void']]],
'ReleaseForModWrite' : [ 0xc8, ['pointer64', ['void']]],
'AcquireForCcFlush' : [ 0xd0, ['pointer64', ['void']]],
'ReleaseForCcFlush' : [ 0xd8, ['pointer64', ['void']]],
} ],
'_OBJECT_DIRECTORY_ENTRY' : [ 0x18, {
'ChainLink' : [ 0x0, ['pointer64', ['_OBJECT_DIRECTORY_ENTRY']]],
'Object' : [ 0x8, ['pointer64', ['void']]],
'HashValue' : [ 0x10, ['unsigned long']],
} ],
'_POP_DEVICE_POWER_IRP' : [ 0x58, {
'Free' : [ 0x0, ['_SINGLE_LIST_ENTRY']],
'Irp' : [ 0x8, ['pointer64', ['_IRP']]],
'Notify' : [ 0x10, ['pointer64', ['_PO_DEVICE_NOTIFY']]],
'Pending' : [ 0x18, ['_LIST_ENTRY']],
'Complete' : [ 0x28, ['_LIST_ENTRY']],
'Abort' : [ 0x38, ['_LIST_ENTRY']],
'Failed' : [ 0x48, ['_LIST_ENTRY']],
} ],
'_FILE_BASIC_INFORMATION' : [ 0x28, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x20, ['unsigned long']],
} ],
'_RTL_RANGE' : [ 0x28, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
'UserData' : [ 0x10, ['pointer64', ['void']]],
'Owner' : [ 0x18, ['pointer64', ['void']]],
'Attributes' : [ 0x20, ['unsigned char']],
'Flags' : [ 0x21, ['unsigned char']],
} ],
'_PCI_HEADER_TYPE_1' : [ 0x30, {
'BaseAddresses' : [ 0x0, ['array', 2, ['unsigned long']]],
'PrimaryBus' : [ 0x8, ['unsigned char']],
'SecondaryBus' : [ 0x9, ['unsigned char']],
'SubordinateBus' : [ 0xa, ['unsigned char']],
'SecondaryLatency' : [ 0xb, ['unsigned char']],
'IOBase' : [ 0xc, ['unsigned char']],
'IOLimit' : [ 0xd, ['unsigned char']],
'SecondaryStatus' : [ 0xe, ['unsigned short']],
'MemoryBase' : [ 0x10, ['unsigned short']],
'MemoryLimit' : [ 0x12, ['unsigned short']],
'PrefetchBase' : [ 0x14, ['unsigned short']],
'PrefetchLimit' : [ 0x16, ['unsigned short']],
'PrefetchBaseUpper32' : [ 0x18, ['unsigned long']],
'PrefetchLimitUpper32' : [ 0x1c, ['unsigned long']],
'IOBaseUpper16' : [ 0x20, ['unsigned short']],
'IOLimitUpper16' : [ 0x22, ['unsigned short']],
'CapabilitiesPtr' : [ 0x24, ['unsigned char']],
'Reserved1' : [ 0x25, ['array', 3, ['unsigned char']]],
'ROMBaseAddress' : [ 0x28, ['unsigned long']],
'InterruptLine' : [ 0x2c, ['unsigned char']],
'InterruptPin' : [ 0x2d, ['unsigned char']],
'BridgeControl' : [ 0x2e, ['unsigned short']],
} ],
'_PRIVILEGE_SET' : [ 0x14, {
'PrivilegeCount' : [ 0x0, ['unsigned long']],
'Control' : [ 0x4, ['unsigned long']],
'Privilege' : [ 0x8, ['array', 1, ['_LUID_AND_ATTRIBUTES']]],
} ],
'_IO_SECURITY_CONTEXT' : [ 0x18, {
'SecurityQos' : [ 0x0, ['pointer64', ['_SECURITY_QUALITY_OF_SERVICE']]],
'AccessState' : [ 0x8, ['pointer64', ['_ACCESS_STATE']]],
'DesiredAccess' : [ 0x10, ['unsigned long']],
'FullCreateOptions' : [ 0x14, ['unsigned long']],
} ],
'_X86_DBGKD_CONTROL_SET' : [ 0x10, {
'TraceFlag' : [ 0x0, ['unsigned long']],
'Dr7' : [ 0x4, ['unsigned long']],
'CurrentSymbolStart' : [ 0x8, ['unsigned long']],
'CurrentSymbolEnd' : [ 0xc, ['unsigned long']],
} ],
'_RTL_ACTIVATION_CONTEXT_STACK_FRAME' : [ 0x18, {
'Previous' : [ 0x0, ['pointer64', ['_RTL_ACTIVATION_CONTEXT_STACK_FRAME']]],
'ActivationContext' : [ 0x8, ['pointer64', ['_ACTIVATION_CONTEXT']]],
'Flags' : [ 0x10, ['unsigned long']],
} ],
'_MAILSLOT_CREATE_PARAMETERS' : [ 0x18, {
'MailslotQuota' : [ 0x0, ['unsigned long']],
'MaximumMessageSize' : [ 0x4, ['unsigned long']],
'ReadTimeout' : [ 0x8, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x10, ['unsigned char']],
} ],
'_NAMED_PIPE_CREATE_PARAMETERS' : [ 0x28, {
'NamedPipeType' : [ 0x0, ['unsigned long']],
'ReadMode' : [ 0x4, ['unsigned long']],
'CompletionMode' : [ 0x8, ['unsigned long']],
'MaximumInstances' : [ 0xc, ['unsigned long']],
'InboundQuota' : [ 0x10, ['unsigned long']],
'OutboundQuota' : [ 0x14, ['unsigned long']],
'DefaultTimeout' : [ 0x18, ['_LARGE_INTEGER']],
'TimeoutSpecified' : [ 0x20, ['unsigned char']],
} ],
'_CM_BIG_DATA' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_SUPPORTED_RANGE' : [ 0x28, {
'Next' : [ 0x0, ['pointer64', ['_SUPPORTED_RANGE']]],
'SystemAddressSpace' : [ 0x8, ['unsigned long']],
'SystemBase' : [ 0x10, ['long long']],
'Base' : [ 0x18, ['long long']],
'Limit' : [ 0x20, ['long long']],
} ],
'_CM_KEY_NODE' : [ 0x50, {
'Signature' : [ 0x0, ['unsigned short']],
'Flags' : [ 0x2, ['unsigned short']],
'LastWriteTime' : [ 0x4, ['_LARGE_INTEGER']],
'Spare' : [ 0xc, ['unsigned long']],
'Parent' : [ 0x10, ['unsigned long']],
'SubKeyCounts' : [ 0x14, ['array', 2, ['unsigned long']]],
'SubKeyLists' : [ 0x1c, ['array', 2, ['unsigned long']]],
'ValueList' : [ 0x24, ['_CHILD_LIST']],
'ChildHiveReference' : [ 0x1c, ['_CM_KEY_REFERENCE']],
'Security' : [ 0x2c, ['unsigned long']],
'Class' : [ 0x30, ['unsigned long']],
'MaxNameLen' : [ 0x34, ['BitField', dict(start_bit = 0, end_bit = 16, native_type='unsigned long')]],
'UserFlags' : [ 0x34, ['BitField', dict(start_bit = 16, end_bit = 20, native_type='unsigned long')]],
'VirtControlFlags' : [ 0x34, ['BitField', dict(start_bit = 20, end_bit = 24, native_type='unsigned long')]],
'Debug' : [ 0x34, ['BitField', dict(start_bit = 24, end_bit = 32, native_type='unsigned long')]],
'MaxClassLen' : [ 0x38, ['unsigned long']],
'MaxValueNameLen' : [ 0x3c, ['unsigned long']],
'MaxValueDataLen' : [ 0x40, ['unsigned long']],
'WorkVar' : [ 0x44, ['unsigned long']],
'NameLength' : [ 0x48, ['unsigned short']],
'ClassLength' : [ 0x4a, ['unsigned short']],
'Name' : [ 0x4c, ['array', 1, ['unsigned short']]],
} ],
'_ARBITER_ORDERING' : [ 0x10, {
'Start' : [ 0x0, ['unsigned long long']],
'End' : [ 0x8, ['unsigned long long']],
} ],
'_ARBITER_LIST_ENTRY' : [ 0x60, {
'ListEntry' : [ 0x0, ['_LIST_ENTRY']],
'AlternativeCount' : [ 0x10, ['unsigned long']],
'Alternatives' : [ 0x18, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'PhysicalDeviceObject' : [ 0x20, ['pointer64', ['_DEVICE_OBJECT']]],
'RequestSource' : [ 0x28, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterRequestLegacyReported', 1: 'ArbiterRequestHalReported', 2: 'ArbiterRequestLegacyAssigned', 3: 'ArbiterRequestPnpDetected', 4: 'ArbiterRequestPnpEnumerated', -1: 'ArbiterRequestUndefined'})]],
'Flags' : [ 0x2c, ['unsigned long']],
'WorkSpace' : [ 0x30, ['long long']],
'InterfaceType' : [ 0x38, ['Enumeration', dict(target = 'long', choices = {0: 'Internal', 1: 'Isa', 2: 'Eisa', 3: 'MicroChannel', 4: 'TurboChannel', 5: 'PCIBus', 6: 'VMEBus', 7: 'NuBus', 8: 'PCMCIABus', 9: 'CBus', 10: 'MPIBus', 11: 'MPSABus', 12: 'ProcessorInternal', 13: 'InternalPowerBus', 14: 'PNPISABus', 15: 'PNPBus', 16: 'MaximumInterfaceType', -1: 'InterfaceTypeUndefined'})]],
'SlotNumber' : [ 0x3c, ['unsigned long']],
'BusNumber' : [ 0x40, ['unsigned long']],
'Assignment' : [ 0x48, ['pointer64', ['_CM_PARTIAL_RESOURCE_DESCRIPTOR']]],
'SelectedAlternative' : [ 0x50, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Result' : [ 0x58, ['Enumeration', dict(target = 'long', choices = {0: 'ArbiterResultSuccess', 1: 'ArbiterResultExternalConflict', 2: 'ArbiterResultNullRequest', -1: 'ArbiterResultUndefined'})]],
} ],
'_LPCP_NONPAGED_PORT_QUEUE' : [ 0x28, {
'Semaphore' : [ 0x0, ['_KSEMAPHORE']],
'BackPointer' : [ 0x20, ['pointer64', ['_LPCP_PORT_OBJECT']]],
} ],
'_CM_KEY_INDEX' : [ 0x8, {
'Signature' : [ 0x0, ['unsigned short']],
'Count' : [ 0x2, ['unsigned short']],
'List' : [ 0x4, ['array', 1, ['unsigned long']]],
} ],
'_FILE_NETWORK_OPEN_INFORMATION' : [ 0x38, {
'CreationTime' : [ 0x0, ['_LARGE_INTEGER']],
'LastAccessTime' : [ 0x8, ['_LARGE_INTEGER']],
'LastWriteTime' : [ 0x10, ['_LARGE_INTEGER']],
'ChangeTime' : [ 0x18, ['_LARGE_INTEGER']],
'AllocationSize' : [ 0x20, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x28, ['_LARGE_INTEGER']],
'FileAttributes' : [ 0x30, ['unsigned long']],
} ],
'_CM_KEY_REFERENCE' : [ 0x10, {
'KeyCell' : [ 0x0, ['unsigned long']],
'KeyHive' : [ 0x8, ['pointer64', ['_HHIVE']]],
} ],
'_ARBITER_ALTERNATIVE' : [ 0x38, {
'Minimum' : [ 0x0, ['unsigned long long']],
'Maximum' : [ 0x8, ['unsigned long long']],
'Length' : [ 0x10, ['unsigned long']],
'Alignment' : [ 0x14, ['unsigned long']],
'Priority' : [ 0x18, ['long']],
'Flags' : [ 0x1c, ['unsigned long']],
'Descriptor' : [ 0x20, ['pointer64', ['_IO_RESOURCE_DESCRIPTOR']]],
'Reserved' : [ 0x28, ['array', 3, ['unsigned long']]],
} ],
'__unnamed_1b2b' : [ 0x10, {
'EndingOffset' : [ 0x0, ['pointer64', ['_LARGE_INTEGER']]],
'ResourceToRelease' : [ 0x8, ['pointer64', ['pointer64', ['_ERESOURCE']]]],
} ],
'__unnamed_1b2d' : [ 0x8, {
'ResourceToRelease' : [ 0x0, ['pointer64', ['_ERESOURCE']]],
} ],
'__unnamed_1b31' : [ 0x8, {
'SyncType' : [ 0x0, ['Enumeration', dict(target = 'long', choices = {0: 'SyncTypeOther', 1: 'SyncTypeCreateSection'})]],
'PageProtection' : [ 0x4, ['unsigned long']],
} ],
'__unnamed_1b33' : [ 0x28, {
'Argument1' : [ 0x0, ['pointer64', ['void']]],
'Argument2' : [ 0x8, ['pointer64', ['void']]],
'Argument3' : [ 0x10, ['pointer64', ['void']]],
'Argument4' : [ 0x18, ['pointer64', ['void']]],
'Argument5' : [ 0x20, ['pointer64', ['void']]],
} ],
'_FS_FILTER_PARAMETERS' : [ 0x28, {
'AcquireForModifiedPageWriter' : [ 0x0, ['__unnamed_1b2b']],
'ReleaseForModifiedPageWriter' : [ 0x0, ['__unnamed_1b2d']],
'AcquireForSectionSynchronization' : [ 0x0, ['__unnamed_1b31']],
'Others' : [ 0x0, ['__unnamed_1b33']],
} ],
'_COMPRESSED_DATA_INFO' : [ 0xc, {
'CompressionFormatAndEngine' : [ 0x0, ['unsigned short']],
'CompressionUnitShift' : [ 0x2, ['unsigned char']],
'ChunkShift' : [ 0x3, ['unsigned char']],
'ClusterShift' : [ 0x4, ['unsigned char']],
'Reserved' : [ 0x5, ['unsigned char']],
'NumberOfChunks' : [ 0x6, ['unsigned short']],
'CompressedChunkSizes' : [ 0x8, ['array', 1, ['unsigned long']]],
} ],
'_FILE_STANDARD_INFORMATION' : [ 0x18, {
'AllocationSize' : [ 0x0, ['_LARGE_INTEGER']],
'EndOfFile' : [ 0x8, ['_LARGE_INTEGER']],
'NumberOfLinks' : [ 0x10, ['unsigned long']],
'DeletePending' : [ 0x14, ['unsigned char']],
'Directory' : [ 0x15, ['unsigned char']],
} ],
'_CHILD_LIST' : [ 0x8, {
'Count' : [ 0x0, ['unsigned long']],
'List' : [ 0x4, ['unsigned long']],
} ],
'_CM_KEY_SECURITY' : [ 0x28, {
'Signature' : [ 0x0, ['unsigned short']],
'Reserved' : [ 0x2, ['unsigned short']],
'Flink' : [ 0x4, ['unsigned long']],
'Blink' : [ 0x8, ['unsigned long']],
'ReferenceCount' : [ 0xc, ['unsigned long']],
'DescriptorLength' : [ 0x10, ['unsigned long']],
'Descriptor' : [ 0x14, ['_SECURITY_DESCRIPTOR_RELATIVE']],
} ],
}
| gpl-2.0 |
mancoast/CPythonPyc_test | cpython/263_test_nis.py | 58 | 1317 | from test import test_support
import unittest
import nis
class NisTests(unittest.TestCase):
def test_maps(self):
try:
maps = nis.maps()
except nis.error, msg:
# NIS is probably not active, so this test isn't useful
if test_support.verbose:
print "Test Skipped:", msg
# Can't raise TestSkipped as regrtest only recognizes the exception
# import time.
return
try:
# On some systems, this map is only accessible to the
# super user
maps.remove("passwd.adjunct.byname")
except ValueError:
pass
done = 0
for nismap in maps:
mapping = nis.cat(nismap)
for k, v in mapping.items():
if not k:
continue
if nis.match(k, nismap) != v:
self.fail("NIS match failed for key `%s' in map `%s'" % (k, nismap))
else:
# just test the one key, otherwise this test could take a
# very long time
done = 1
break
if done:
break
def test_main():
test_support.run_unittest(NisTests)
if __name__ == '__main__':
test_main()
| gpl-3.0 |
numo16/wesnoth | data/tools/terrain2wiki.py | 25 | 3386 | #!/usr/bin/python
# -*- coding:utf-8 -*-
"""
A script to create the "Terrain Table" on the TerrainCodeTableWML wiki page.
Add the output to the wiki whenever a new terrain is added to mainline.
"""
from __future__ import with_statement # For python < 2.6
import os
import sys
import re
try:
import argparse
except ImportError:
print('Please install argparse by running "easy_install argparse"')
sys.exit(1)
# Where to get terrain images
terrain_url = "https://raw.github.com/wesnoth/wesnoth/master/data/core/images/terrain/%s.png"
def parse_terrain(data):
"""
Parses the terrains. Input looks like this:
[terrain_type]
symbol_image=water/ocean-grey-tile
id=deep_water_gray
editor_name= _ "Gray Deep Water"
string=Wog
aliasof=Wo
submerge=0.5
editor_group=water
[/terrain_type]
Output is a text in wiki format.
"""
# Remove all comments.
data = "\n".join([i for i in data.split("\n") if not i.startswith("#")])
terrains = re.compile("\[terrain_type\](.*?)\[\/terrain_type\]", re.DOTALL).findall(data)
data = """{{AutogeneratedWML}}{| border="1"
!terrain
!name
!string
!alias of
!editor group
"""
for i in terrains:
# Strip unneeded things.
i = i[5:]
i = i.split("\n ")
# Don't parse special files that are hacks. They shouldn't be used
# directly. (They're only there to make aliasing work.)
if i[0].startswith(" "):
continue
# This avoids problems due to additional = in strings. Exact string
# removal does not matter as long as we do not print help_topic_text
# in the wiki page.
removeus = ("<italic>text='", "'</italic>", "<ref>dst='", "text='", "'</ref>")
for text in removeus:
i = [a.replace(text, "") for a in i]
# Create a dictionary of key and values
content = dict([v.strip().split("=") for v in i])
# Hidden things shouldn't be displayed
if 'hidden' in content:
continue
data += """|-
| %s
| %s
| <code>%s</code>
| <code>%s</code>
| %s
""" % (
terrain_url % (content['editor_image'] if 'editor_image' in content else content['symbol_image']),
content['editor_name'][4:-1] if 'editor_name' in content else content['name'][4:-1],
content['string'].replace("# wmllint: ignore", "").replace("|", "|"),
content['aliasof'].replace("|", "|") if 'aliasof' in content else "",
content['editor_group'])
data += "|}"
return data
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='terrain2wiki is a tool to\
convert the terrain codes located in terrain.cfg to wiki formatted text.')
parser.add_argument('-f', '--file', default='data/core/terrain.cfg',
dest='path', help="The location of the terrain.cfg file.")
parser.add_argument('-o', '--output', default='/tmp/TerrainCodeTableWML',
dest='output_path', help="The location of the output file.")
args = parser.parse_args()
path = args.path
output_path = args.output_path
if not os.path.exists(path) or not path.endswith('.cfg'):
print("Invalid path: '%s' does not exist or not a .cfg file.") % path
sys.exit(1)
with open(path, "r") as input_file:
data = input_file.read()
data = parse_terrain(data)
with open(output_path, "w") as output:
output.write(data)
| gpl-2.0 |
talele08/appengine-mapreduce | python/src/mapreduce/property_range.py | 48 | 12527 | #!/usr/bin/env python
"""A class representing entity property range."""
# pylint: disable=g-bad-name
# pylint: disable=g-import-not-at-top
import datetime
from google.appengine.ext import ndb
from google.appengine.ext import db
from mapreduce import errors
from mapreduce import util
__all__ = [
"should_shard_by_property_range",
"PropertyRange"]
def should_shard_by_property_range(filters):
"""Returns whether these filters suggests sharding by property range.
Args:
filters: user supplied filters. Each filter should be a list or tuple of
format (<property_name_as_str>, <query_operator_as_str>,
<value_of_certain_type>). Value type is up to the property's type.
Returns:
True if these filters suggests sharding by property range. False
Otherwise.
"""
if not filters:
return False
for f in filters:
if f[1] != "=":
return True
return False
class PropertyRange(object):
"""A class that represents a range on a db.Model's property.
It supports splitting the range into n shards and generating a query that
returns entities within that range.
"""
def __init__(self,
filters,
model_class_path):
"""Init.
Args:
filters: user supplied filters. Each filter should be a list or tuple of
format (<property_name_as_str>, <query_operator_as_str>,
<value_of_certain_type>). Value type should satisfy the property's type.
model_class_path: full path to the model class in str.
"""
self.filters = filters
self.model_class_path = model_class_path
self.model_class = util.for_name(self.model_class_path)
self.prop, self.start, self.end = self._get_range_from_filters(
self.filters, self.model_class)
@classmethod
def _get_range_from_filters(cls, filters, model_class):
"""Get property range from filters user provided.
This method also validates there is one and only one closed range on a
single property.
Args:
filters: user supplied filters. Each filter should be a list or tuple of
format (<property_name_as_str>, <query_operator_as_str>,
<value_of_certain_type>). Value type should satisfy the property's type.
model_class: the model class for the entity type to apply filters on.
Returns:
a tuple of (property, start_filter, end_filter). property is the model's
field that the range is about. start_filter and end_filter define the
start and the end of the range. (None, None, None) if no range is found.
Raises:
BadReaderParamsError: if any filter is invalid in any way.
"""
if not filters:
return None, None, None
range_property = None
start_val = None
end_val = None
start_filter = None
end_filter = None
for f in filters:
prop, op, val = f
if op in [">", ">=", "<", "<="]:
if range_property and range_property != prop:
raise errors.BadReaderParamsError(
"Range on only one property is supported.")
range_property = prop
if val is None:
raise errors.BadReaderParamsError(
"Range can't be None in filter %s", f)
if op in [">", ">="]:
if start_val is not None:
raise errors.BadReaderParamsError(
"Operation %s is specified more than once.", op)
start_val = val
start_filter = f
else:
if end_val is not None:
raise errors.BadReaderParamsError(
"Operation %s is specified more than once.", op)
end_val = val
end_filter = f
elif op != "=":
raise errors.BadReaderParamsError(
"Only < <= > >= = are supported as operation. Got %s", op)
if not range_property:
return None, None, None
if start_val is None or end_val is None:
raise errors.BadReaderParamsError(
"Filter should contains a complete range on property %s",
range_property)
if issubclass(model_class, db.Model):
property_obj = model_class.properties()[range_property]
else:
property_obj = (
model_class._properties[ # pylint: disable=protected-access
range_property])
supported_properties = (
_DISCRETE_PROPERTY_SPLIT_FUNCTIONS.keys() +
_CONTINUOUS_PROPERTY_SPLIT_FUNCTIONS.keys())
if not isinstance(property_obj, tuple(supported_properties)):
raise errors.BadReaderParamsError(
"Filtered property %s is not supported by sharding.", range_property)
if not start_val < end_val:
raise errors.BadReaderParamsError(
"Start value %s should be smaller than end value %s",
start_val, end_val)
return property_obj, start_filter, end_filter
def split(self, n):
"""Evenly split this range into contiguous, non overlapping subranges.
Args:
n: number of splits.
Returns:
a list of contiguous, non overlapping sub PropertyRanges. Maybe less than
n when not enough subranges.
"""
new_range_filters = []
name = self.start[0]
prop_cls = self.prop.__class__
if prop_cls in _DISCRETE_PROPERTY_SPLIT_FUNCTIONS:
splitpoints = _DISCRETE_PROPERTY_SPLIT_FUNCTIONS[prop_cls](
self.start[2], self.end[2], n,
self.start[1] == ">=", self.end[1] == "<=")
start_filter = (name, ">=", splitpoints[0])
for p in splitpoints[1:]:
end_filter = (name, "<", p)
new_range_filters.append([start_filter, end_filter])
start_filter = (name, ">=", p)
else:
splitpoints = _CONTINUOUS_PROPERTY_SPLIT_FUNCTIONS[prop_cls](
self.start[2], self.end[2], n)
start_filter = self.start
for p in splitpoints:
end_filter = (name, "<", p)
new_range_filters.append([start_filter, end_filter])
start_filter = (name, ">=", p)
new_range_filters.append([start_filter, self.end])
for f in new_range_filters:
f.extend(self._equality_filters)
return [self.__class__(f, self.model_class_path) for f in new_range_filters]
def make_query(self, ns):
"""Make a query of entities within this range.
Query options are not supported. They should be specified when the query
is run.
Args:
ns: namespace of this query.
Returns:
a db.Query or ndb.Query, depends on the model class's type.
"""
if issubclass(self.model_class, db.Model):
query = db.Query(self.model_class, namespace=ns)
for f in self.filters:
query.filter("%s %s" % (f[0], f[1]), f[2])
else:
query = self.model_class.query(namespace=ns)
for f in self.filters:
query = query.filter(ndb.FilterNode(*f))
return query
@property
def _equality_filters(self):
return [f for f in self.filters if f[1] == "="]
def to_json(self):
return {"filters": self.filters,
"model_class_path": self.model_class_path}
@classmethod
def from_json(cls, json):
return cls(json["filters"], json["model_class_path"])
def _split_datetime_property(start, end, n, include_start, include_end):
# datastore stored datetime precision is microsecond.
if not include_start:
start += datetime.timedelta(microseconds=1)
if include_end:
end += datetime.timedelta(microseconds=1)
delta = end - start
stride = delta // n
if stride <= datetime.timedelta():
raise ValueError("Range too small to split: start %r end %r", start, end)
splitpoints = [start]
previous = start
for _ in range(n-1):
point = previous + stride
if point == previous or point > end:
continue
previous = point
splitpoints.append(point)
if end not in splitpoints:
splitpoints.append(end)
return splitpoints
def _split_float_property(start, end, n):
delta = float(end - start)
stride = delta / n
if stride <= 0:
raise ValueError("Range too small to split: start %r end %r", start, end)
splitpoints = []
for i in range(1, n):
splitpoints.append(start + i * stride)
return splitpoints
def _split_integer_property(start, end, n, include_start, include_end):
if not include_start:
start += 1
if include_end:
end += 1
delta = float(end - start)
stride = delta / n
if stride <= 0:
raise ValueError("Range too small to split: start %r end %r", start, end)
splitpoints = [start]
previous = start
for i in range(1, n):
point = start + int(round(i * stride))
if point == previous or point > end:
continue
previous = point
splitpoints.append(point)
if end not in splitpoints:
splitpoints.append(end)
return splitpoints
def _split_string_property(start, end, n, include_start, include_end):
try:
start = start.encode("ascii")
end = end.encode("ascii")
except UnicodeEncodeError, e:
raise ValueError("Only ascii str is supported.", e)
return _split_byte_string_property(start, end, n, include_start, include_end)
# The alphabet splitting supports.
_ALPHABET = "".join(chr(i) for i in range(128))
# String length determines how many unique strings we can choose from.
# We can't split into more shards than this: len(_ALPHABET)^_STRING_LENGTH
_STRING_LENGTH = 4
def _split_byte_string_property(start, end, n, include_start, include_end):
# Get prefix, suffix, and the real start/end to split on.
i = 0
for i, (s, e) in enumerate(zip(start, end)):
if s != e:
break
common_prefix = start[:i]
start_suffix = start[i+_STRING_LENGTH:]
end_suffix = end[i+_STRING_LENGTH:]
start = start[i:i+_STRING_LENGTH]
end = end[i:i+_STRING_LENGTH]
# Convert str to ord.
weights = _get_weights(_STRING_LENGTH)
start_ord = _str_to_ord(start, weights)
if not include_start:
start_ord += 1
end_ord = _str_to_ord(end, weights)
if include_end:
end_ord += 1
# Do split.
stride = (end_ord - start_ord) / float(n)
if stride <= 0:
raise ValueError("Range too small to split: start %s end %s", start, end)
splitpoints = [_ord_to_str(start_ord, weights)]
previous = start_ord
for i in range(1, n):
point = start_ord + int(round(stride * i))
if point == previous or point > end_ord:
continue
previous = point
splitpoints.append(_ord_to_str(point, weights))
end_str = _ord_to_str(end_ord, weights)
if end_str not in splitpoints:
splitpoints.append(end_str)
# Append suffix.
splitpoints[0] += start_suffix
splitpoints[-1] += end_suffix
return [common_prefix + point for point in splitpoints]
def _get_weights(max_length):
"""Get weights for each offset in str of certain max length.
Args:
max_length: max length of the strings.
Returns:
A list of ints as weights.
Example:
If max_length is 2 and alphabet is "ab", then we have order "", "a", "aa",
"ab", "b", "ba", "bb". So the weight for the first char is 3.
"""
weights = [1]
for i in range(1, max_length):
weights.append(weights[i-1] * len(_ALPHABET) + 1)
weights.reverse()
return weights
def _str_to_ord(content, weights):
"""Converts a string to its lexicographical order.
Args:
content: the string to convert. Of type str.
weights: weights from _get_weights.
Returns:
an int or long that represents the order of this string. "" has order 0.
"""
ordinal = 0
for i, c in enumerate(content):
ordinal += weights[i] * _ALPHABET.index(c) + 1
return ordinal
def _ord_to_str(ordinal, weights):
"""Reverse function of _str_to_ord."""
chars = []
for weight in weights:
if ordinal == 0:
return "".join(chars)
ordinal -= 1
index, ordinal = divmod(ordinal, weight)
chars.append(_ALPHABET[index])
return "".join(chars)
# discrete property split functions all have the same interface.
# They take start, end, shard_number n, include_start, include_end.
# They return at most n+1 points, forming n ranges.
# Each range should be include_start, exclude_end.
_DISCRETE_PROPERTY_SPLIT_FUNCTIONS = {
db.DateTimeProperty: _split_datetime_property,
db.IntegerProperty: _split_integer_property,
db.StringProperty: _split_string_property,
db.ByteStringProperty: _split_byte_string_property,
# ndb.
ndb.DateTimeProperty: _split_datetime_property,
ndb.IntegerProperty: _split_integer_property,
ndb.StringProperty: _split_string_property,
ndb.BlobProperty: _split_byte_string_property
}
_CONTINUOUS_PROPERTY_SPLIT_FUNCTIONS = {
db.FloatProperty: _split_float_property,
# ndb.
ndb.FloatProperty: _split_float_property,
}
| apache-2.0 |
telerik/cloudbase-init | cloudbaseinit/tests/plugins/windows/userdataplugins/test_urldownload.py | 1 | 2120 | # Copyright 2013 Mirantis Inc.
# Copyright 2014 Cloudbase Solutions Srl
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
import unittest
from cloudbaseinit.openstack.common import cfg
from cloudbaseinit.plugins.windows.userdataplugins import urldownload
CONF = cfg.CONF
class UrlDownloadHandlerTests(unittest.TestCase):
def setUp(self):
self._urldownload = urldownload.URLDownloadPlugin()
@mock.patch('cloudbaseinit.plugins.windows.userdatautils'
'.execute_user_data_script')
def _test_process(self, mock_execute_user_data_script, filename):
mock_part = mock.MagicMock()
mock_part.get_filename.return_value = filename
response = self._urldownload.process(mock_part)
mock_part.get_filename.assert_called_with()
if filename:
mock_execute_user_data_script.assert_called_with(
mock_part.get_payload())
self.assertEqual(response, mock_execute_user_data_script())
else:
self.assertTrue(response is None)
response = self._urldownload.process(mock_part)
mock_part.get_filename.assert_called_with()
if filename:
mock_execute_user_data_script.assert_called_with(
mock_part.get_payload())
self.assertEqual(response, mock_execute_user_data_script())
else:
self.assertTrue(response is None)
def test_process(self):
self._test_process(filename='cfn-userdata')
def test_process_content_not_supported(self):
self._test_process(filename=None)
| apache-2.0 |
thepiper/standoff | venv/lib/python2.7/site-packages/werkzeug/contrib/profiler.py | 362 | 5151 | # -*- coding: utf-8 -*-
"""
werkzeug.contrib.profiler
~~~~~~~~~~~~~~~~~~~~~~~~~
This module provides a simple WSGI profiler middleware for finding
bottlenecks in web application. It uses the :mod:`profile` or
:mod:`cProfile` module to do the profiling and writes the stats to the
stream provided (defaults to stderr).
Example usage::
from werkzeug.contrib.profiler import ProfilerMiddleware
app = ProfilerMiddleware(app)
:copyright: (c) 2014 by the Werkzeug Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import sys
import time
import os.path
try:
try:
from cProfile import Profile
except ImportError:
from profile import Profile
from pstats import Stats
available = True
except ImportError:
available = False
class MergeStream(object):
"""An object that redirects `write` calls to multiple streams.
Use this to log to both `sys.stdout` and a file::
f = open('profiler.log', 'w')
stream = MergeStream(sys.stdout, f)
profiler = ProfilerMiddleware(app, stream)
"""
def __init__(self, *streams):
if not streams:
raise TypeError('at least one stream must be given')
self.streams = streams
def write(self, data):
for stream in self.streams:
stream.write(data)
class ProfilerMiddleware(object):
"""Simple profiler middleware. Wraps a WSGI application and profiles
a request. This intentionally buffers the response so that timings are
more exact.
By giving the `profile_dir` argument, pstat.Stats files are saved to that
directory, one file per request. Without it, a summary is printed to
`stream` instead.
For the exact meaning of `sort_by` and `restrictions` consult the
:mod:`profile` documentation.
.. versionadded:: 0.9
Added support for `restrictions` and `profile_dir`.
:param app: the WSGI application to profile.
:param stream: the stream for the profiled stats. defaults to stderr.
:param sort_by: a tuple of columns to sort the result by.
:param restrictions: a tuple of profiling strictions, not used if dumping
to `profile_dir`.
:param profile_dir: directory name to save pstat files
"""
def __init__(self, app, stream=None,
sort_by=('time', 'calls'), restrictions=(), profile_dir=None):
if not available:
raise RuntimeError('the profiler is not available because '
'profile or pstat is not installed.')
self._app = app
self._stream = stream or sys.stdout
self._sort_by = sort_by
self._restrictions = restrictions
self._profile_dir = profile_dir
def __call__(self, environ, start_response):
response_body = []
def catching_start_response(status, headers, exc_info=None):
start_response(status, headers, exc_info)
return response_body.append
def runapp():
appiter = self._app(environ, catching_start_response)
response_body.extend(appiter)
if hasattr(appiter, 'close'):
appiter.close()
p = Profile()
start = time.time()
p.runcall(runapp)
body = b''.join(response_body)
elapsed = time.time() - start
if self._profile_dir is not None:
prof_filename = os.path.join(self._profile_dir,
'%s.%s.%06dms.%d.prof' % (
environ['REQUEST_METHOD'],
environ.get('PATH_INFO').strip(
'/').replace('/', '.') or 'root',
elapsed * 1000.0,
time.time()
))
p.dump_stats(prof_filename)
else:
stats = Stats(p, stream=self._stream)
stats.sort_stats(*self._sort_by)
self._stream.write('-' * 80)
self._stream.write('\nPATH: %r\n' % environ.get('PATH_INFO'))
stats.print_stats(*self._restrictions)
self._stream.write('-' * 80 + '\n\n')
return [body]
def make_action(app_factory, hostname='localhost', port=5000,
threaded=False, processes=1, stream=None,
sort_by=('time', 'calls'), restrictions=()):
"""Return a new callback for :mod:`werkzeug.script` that starts a local
server with the profiler enabled.
::
from werkzeug.contrib import profiler
action_profile = profiler.make_action(make_app)
"""
def action(hostname=('h', hostname), port=('p', port),
threaded=threaded, processes=processes):
"""Start a new development server."""
from werkzeug.serving import run_simple
app = ProfilerMiddleware(app_factory(), stream, sort_by, restrictions)
run_simple(hostname, port, app, False, None, threaded, processes)
return action
| gpl-3.0 |
bcb/qutebrowser | tests/unit/misc/test_split.py | 1 | 6937 | # vim: ft=python fileencoding=utf-8 sts=4 sw=4 et:
# Copyright 2014-2016 Florian Bruhin (The Compiler) <[email protected]>
#
# This file is part of qutebrowser.
#
# qutebrowser is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# qutebrowser is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with qutebrowser. If not, see <http://www.gnu.org/licenses/>.
"""Tests for qutebrowser.misc.split."""
import collections
import pytest
from qutebrowser.misc import split
# Most tests copied from Python's shlex.
# The original test data set was from shellwords, by Hartmut Goebel.
# Format: input/split|output|without|keep/split|output|with|keep/
test_data_str = r"""
one two/one|two/one| two/
one "two three" four/one|two three|four/one| "two three"| four/
one 'two three' four/one|two three|four/one| 'two three'| four/
one "two\" three" four/one|two" three|four/one| "two\" three"| four/
one 'two'\'' three' four/one|two' three|four/one| 'two'\'' three'| four/
one "two three/one|two three/one| "two three/
one 'two three/one|two three/one| 'two three/
one\/one\/one\/
one "two\/one|two\/one| "two\/
one /one/one| /
open -t i/open|-t|i/open| -t| i/
foo bar/foo|bar/foo| bar/
foo bar/foo|bar/ foo| bar/
foo bar /foo|bar/ foo| bar| /
foo bar bla fasel/foo|bar|bla|fasel/foo| bar| bla| fasel/
x y z xxxx/x|y|z|xxxx/x| y| z| xxxx/
\x bar/x|bar/\x| bar/
\ x bar/ x|bar/\ x| bar/
\ bar/ bar/\ bar/
foo \x bar/foo|x|bar/foo| \x| bar/
foo \ x bar/foo| x|bar/foo| \ x| bar/
foo \ bar/foo| bar/foo| \ bar/
foo "bar" bla/foo|bar|bla/foo| "bar"| bla/
"foo" "bar" "bla"/foo|bar|bla/"foo"| "bar"| "bla"/
"foo" bar "bla"/foo|bar|bla/"foo"| bar| "bla"/
"foo" bar bla/foo|bar|bla/"foo"| bar| bla/
foo 'bar' bla/foo|bar|bla/foo| 'bar'| bla/
'foo' 'bar' 'bla'/foo|bar|bla/'foo'| 'bar'| 'bla'/
'foo' bar 'bla'/foo|bar|bla/'foo'| bar| 'bla'/
'foo' bar bla/foo|bar|bla/'foo'| bar| bla/
blurb foo"bar"bar"fasel" baz/blurb|foobarbarfasel|baz/blurb| foo"bar"bar"fasel"| baz/
blurb foo'bar'bar'fasel' baz/blurb|foobarbarfasel|baz/blurb| foo'bar'bar'fasel'| baz/
""//""/
''//''/
foo "" bar/foo||bar/foo| ""| bar/
foo '' bar/foo||bar/foo| ''| bar/
foo "" "" "" bar/foo||||bar/foo| ""| ""| ""| bar/
foo '' '' '' bar/foo||||bar/foo| ''| ''| ''| bar/
\"/"/\"/
"\""/"/"\""/
"foo\ bar"/foo\ bar/"foo\ bar"/
"foo\\ bar"/foo\ bar/"foo\\ bar"/
"foo\\ bar\""/foo\ bar"/"foo\\ bar\""/
"foo\\" bar\"/foo\|bar"/"foo\\"| bar\"/
"foo\\ bar\" dfadf"/foo\ bar" dfadf/"foo\\ bar\" dfadf"/
"foo\\\ bar\" dfadf"/foo\\ bar" dfadf/"foo\\\ bar\" dfadf"/
"foo\\\x bar\" dfadf"/foo\\x bar" dfadf/"foo\\\x bar\" dfadf"/
"foo\x bar\" dfadf"/foo\x bar" dfadf/"foo\x bar\" dfadf"/
\'/'/\'/
'foo\ bar'/foo\ bar/'foo\ bar'/
'foo\\ bar'/foo\\ bar/'foo\\ bar'/
"foo\\\x bar\" df'a\ 'df"/foo\\x bar" df'a\ 'df/"foo\\\x bar\" df'a\ 'df"/
\"foo/"foo/\"foo/
\"foo\x/"foox/\"foo\x/
"foo\x"/foo\x/"foo\x"/
"foo\ "/foo\ /"foo\ "/
foo\ xx/foo xx/foo\ xx/
foo\ x\x/foo xx/foo\ x\x/
foo\ x\x\"/foo xx"/foo\ x\x\"/
"foo\ x\x"/foo\ x\x/"foo\ x\x"/
"foo\ x\x\\"/foo\ x\x\/"foo\ x\x\\"/
"foo\ x\x\\""foobar"/foo\ x\x\foobar/"foo\ x\x\\""foobar"/
"foo\ x\x\\"\'"foobar"/foo\ x\x\'foobar/"foo\ x\x\\"\'"foobar"/
"foo\ x\x\\"\'"fo'obar"/foo\ x\x\'fo'obar/"foo\ x\x\\"\'"fo'obar"/
"foo\ x\x\\"\'"fo'obar" 'don'\''t'/foo\ x\x\'fo'obar|don't/"foo\ x\x\\"\'"fo'obar"| 'don'\''t'/
"foo\ x\x\\"\'"fo'obar" 'don'\''t' \\/foo\ x\x\'fo'obar|don't|\/"foo\ x\x\\"\'"fo'obar"| 'don'\''t'| \\/
foo\ bar/foo bar/foo\ bar/
:-) ;-)/:-)|;-)/:-)| ;-)/
áéíóú/áéíóú/áéíóú/
"""
def _parse_split_test_data_str():
"""
Parse the test data set into a namedtuple to use in tests.
Returns:
A list of namedtuples with str attributes: input, keep, no_keep
"""
tuple_class = collections.namedtuple('TestCase', 'input, keep, no_keep')
for line in test_data_str.splitlines():
if not line:
continue
data = line.split('/')
item = tuple_class(input=data[0], keep=data[1].split('|'),
no_keep=data[2].split('|'))
yield item
yield tuple_class(input='', keep=[], no_keep=[])
class TestSplit:
"""Test split."""
@pytest.fixture(params=_parse_split_test_data_str(), ids=lambda e: e.input)
def split_test_case(self, request):
"""Fixture to automatically parametrize all depending tests.
It will use the test data from test_data_str, parsed using
_parse_split_test_data_str().
"""
return request.param
def test_split(self, split_test_case):
"""Test splitting."""
items = split.split(split_test_case.input)
assert items == split_test_case.keep
def test_split_keep_original(self, split_test_case):
"""Test if splitting with keep=True yields the original string."""
items = split.split(split_test_case.input, keep=True)
assert ''.join(items) == split_test_case.input
def test_split_keep(self, split_test_case):
"""Test splitting with keep=True."""
items = split.split(split_test_case.input, keep=True)
assert items == split_test_case.no_keep
class TestSimpleSplit:
"""Test simple_split."""
TESTS = {
' foo bar': [' foo', ' bar'],
'foobar': ['foobar'],
' foo bar baz ': [' foo', ' bar', ' baz', ' '],
'f\ti\ts\th': ['f', '\ti', '\ts', '\th'],
'foo\nbar': ['foo', '\nbar'],
}
@pytest.mark.parametrize('test', TESTS, ids=repr)
def test_str_split(self, test):
"""Test if the behavior matches str.split."""
assert split.simple_split(test) == test.rstrip().split()
@pytest.mark.parametrize('s, maxsplit',
[("foo bar baz", 1), (" foo bar baz ", 0)],
ids=repr)
def test_str_split_maxsplit(self, s, maxsplit):
"""Test if the behavior matches str.split with given maxsplit."""
actual = split.simple_split(s, maxsplit=maxsplit)
expected = s.rstrip().split(maxsplit=maxsplit)
assert actual == expected
@pytest.mark.parametrize('test, expected', TESTS.items(), ids=repr)
def test_split_keep(self, test, expected):
"""Test splitting with keep=True."""
assert split.simple_split(test, keep=True) == expected
def test_maxsplit_0_keep(self):
"""Test special case with maxsplit=0 and keep=True."""
s = "foo bar"
assert split.simple_split(s, keep=True, maxsplit=0) == [s]
| gpl-3.0 |
repotvsupertuga/tvsupertuga.repository | instal/script.module.resolveurl/lib/resolveurl/plugins/prostream.py | 2 | 1310 | """
Plugin for ResolveUrl
Copyright (C) 2020 gujal
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
from __resolve_generic__ import ResolveGeneric
from lib import helpers
class ProStreamResolver(ResolveGeneric):
name = "prostream.to"
domains = ['prostream.to']
pattern = r'(?://|\.)(prostream\.to)/(?:embed-)?([0-9a-zA-Z]+)'
def get_media_url(self, host, media_id):
return helpers.get_media_url(self.get_url(host, media_id),
patterns=[r'''sources:\s*\["\s*(?P<url>[^"]+)'''],
generic_patterns=False)
def get_url(self, host, media_id):
return self._default_get_url(host, media_id, template='https://{host}/embed-{media_id}.html')
| gpl-2.0 |
ppwwyyxx/tensorflow | tensorflow/lite/testing/op_tests/unpack.py | 3 | 2091 | # Copyright 2019 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Test configs for unpack."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import tensorflow as tf
from tensorflow.lite.testing.zip_test_utils import create_tensor_data
from tensorflow.lite.testing.zip_test_utils import make_zip_of_tests
from tensorflow.lite.testing.zip_test_utils import register_make_test_function
@register_make_test_function()
def make_unpack_tests(options):
"""Make a set of tests to do unpack."""
test_parameters = [{
"base_shape": [[3, 4, 3], [3, 4], [5, 6, 7, 8]],
"axis": [0, 1, 2, 3],
}]
def get_valid_axis(parameters):
"""Return a tweaked version of 'axis'."""
axis = parameters["axis"]
shape = parameters["base_shape"][:]
while axis > len(shape) - 1:
axis -= 1
return axis
def build_graph(parameters):
input_tensor = tf.compat.v1.placeholder(
dtype=tf.float32, name=("input"), shape=parameters["base_shape"])
outs = tf.unstack(input_tensor, axis=get_valid_axis(parameters))
return [input_tensor], [outs[0]]
def build_inputs(parameters, sess, inputs, outputs):
input_value = create_tensor_data(np.float32, shape=parameters["base_shape"])
return [input_value], sess.run(
outputs, feed_dict=dict(zip(inputs, [input_value])))
make_zip_of_tests(options, test_parameters, build_graph, build_inputs)
| apache-2.0 |
google-code-export/tvstreamrecord | cherrypy/scaffold/__init__.py | 80 | 1859 | """<MyProject>, a CherryPy application.
Use this as a base for creating new CherryPy applications. When you want
to make a new app, copy and paste this folder to some other location
(maybe site-packages) and rename it to the name of your project,
then tweak as desired.
Even before any tweaking, this should serve a few demonstration pages.
Change to this directory and run:
../cherryd -c site.conf
"""
import cherrypy
from cherrypy import tools, url
import os
local_dir = os.path.join(os.getcwd(), os.path.dirname(__file__))
class Root:
_cp_config = {'tools.log_tracebacks.on': True,
}
def index(self):
return """<html>
<body>Try some <a href='%s?a=7'>other</a> path,
or a <a href='%s?n=14'>default</a> path.<br />
Or, just look at the pretty picture:<br />
<img src='%s' />
</body></html>""" % (url("other"), url("else"),
url("files/made_with_cherrypy_small.png"))
index.exposed = True
def default(self, *args, **kwargs):
return "args: %s kwargs: %s" % (args, kwargs)
default.exposed = True
def other(self, a=2, b='bananas', c=None):
cherrypy.response.headers['Content-Type'] = 'text/plain'
if c is None:
return "Have %d %s." % (int(a), b)
else:
return "Have %d %s, %s." % (int(a), b, c)
other.exposed = True
files = cherrypy.tools.staticdir.handler(
section="/files",
dir=os.path.join(local_dir, "static"),
# Ignore .php files, etc.
match=r'\.(css|gif|html?|ico|jpe?g|js|png|swf|xml)$',
)
root = Root()
# Uncomment the following to use your own favicon instead of CP's default.
#favicon_path = os.path.join(local_dir, "favicon.ico")
#root.favicon_ico = tools.staticfile.handler(filename=favicon_path)
| gpl-3.0 |
tboyce021/home-assistant | homeassistant/components/mcp23017/switch.py | 8 | 2770 | """Support for switch sensor using I2C MCP23017 chip."""
from adafruit_mcp230xx.mcp23017 import MCP23017 # pylint: disable=import-error
import board # pylint: disable=import-error
import busio # pylint: disable=import-error
import digitalio # pylint: disable=import-error
import voluptuous as vol
from homeassistant.components.switch import PLATFORM_SCHEMA
from homeassistant.const import DEVICE_DEFAULT_NAME
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import ToggleEntity
CONF_INVERT_LOGIC = "invert_logic"
CONF_I2C_ADDRESS = "i2c_address"
CONF_PINS = "pins"
CONF_PULL_MODE = "pull_mode"
DEFAULT_INVERT_LOGIC = False
DEFAULT_I2C_ADDRESS = 0x20
_SWITCHES_SCHEMA = vol.Schema({cv.positive_int: cv.string})
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_PINS): _SWITCHES_SCHEMA,
vol.Optional(CONF_INVERT_LOGIC, default=DEFAULT_INVERT_LOGIC): cv.boolean,
vol.Optional(CONF_I2C_ADDRESS, default=DEFAULT_I2C_ADDRESS): vol.Coerce(int),
}
)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the MCP23017 devices."""
invert_logic = config.get(CONF_INVERT_LOGIC)
i2c_address = config.get(CONF_I2C_ADDRESS)
i2c = busio.I2C(board.SCL, board.SDA)
mcp = MCP23017(i2c, address=i2c_address)
switches = []
pins = config.get(CONF_PINS)
for pin_num, pin_name in pins.items():
pin = mcp.get_pin(pin_num)
switches.append(MCP23017Switch(pin_name, pin, invert_logic))
add_entities(switches)
class MCP23017Switch(ToggleEntity):
"""Representation of a MCP23017 output pin."""
def __init__(self, name, pin, invert_logic):
"""Initialize the pin."""
self._name = name or DEVICE_DEFAULT_NAME
self._pin = pin
self._invert_logic = invert_logic
self._state = False
self._pin.direction = digitalio.Direction.OUTPUT
self._pin.value = self._invert_logic
@property
def name(self):
"""Return the name of the switch."""
return self._name
@property
def should_poll(self):
"""No polling needed."""
return False
@property
def is_on(self):
"""Return true if device is on."""
return self._state
@property
def assumed_state(self):
"""Return true if optimistic updates are used."""
return True
def turn_on(self, **kwargs):
"""Turn the device on."""
self._pin.value = not self._invert_logic
self._state = True
self.schedule_update_ha_state()
def turn_off(self, **kwargs):
"""Turn the device off."""
self._pin.value = self._invert_logic
self._state = False
self.schedule_update_ha_state()
| apache-2.0 |
cryptica/slapnet | benchmarks/scalable/LeaderElectionCR79/make_net.py | 1 | 2184 | #!/usr/bin/python3
import sys
import random
def make_net(n,order):
def previous(i):
return (((i-2) % n) + 1)
print('petri net "leader election %i" {' % n)
print(' places {')
for i in range(1,n+1):
print(' ', end='')
for j in range(1,n+1):
print('s%in%i ' % (i,j), end='')
print()
print(' ', end='')
for j in range(1,n+1):
print('s%im%i ' % (i,j), end='')
print()
print()
print(' lead')
print(' }')
print(' transitions {')
for i in range(1,n+1):
print(' ', end='')
for j in range(1,n+1):
print('s%isend%i ' % (i,j), end='')
print()
print(' ', end='')
for j in range(1,n+1):
if j < i:
print('s%idisc%i ' % (i,j), end='')
elif i == j:
print('s%iacpt%i ' % (i,j), end='')
else:
print('s%ipass%i ' % (i,j), end='')
print()
print()
print(' newleader')
print(' }')
print(' arcs {')
for i in range(1,n+1):
for j in range(1,n+1):
print(' s%in%i -> s%isend%i -> s%im%i' % (i,j,i,j,i,j))
print()
for j in range(1,n+1):
print(' s%im%i -> ' % (previous(i),j), end='')
if j < i:
print('s%idisc%i ' % (i,j))
elif i == j:
print('s%iacpt%i -> lead' % (i,j))
else:
print('s%ipass%i -> s%im%i' % (i,j,i,j))
print()
print()
print(' lead -> newleader -> { ', end='')
for i in range(1,n+1):
print('s%in%i ' % (i,order[i-1]), end='')
print('}')
print(' }')
print(' initial { ', end='')
for i in range(1,n+1):
print('s%in%i ' % (i,order[i-1]), end='')
print('}')
print('}')
#print('safety property {')
#print(' lead >= 2')
#print('}')
print('liveness property {')
print(' newleader = 0')
print('}')
n = int(sys.argv[1])
o = sys.argv[2]
order = list(range(1,n+1))
if o == 'rand':
random.shuffle(order)
elif o == 'rev':
order.reverse()
make_net(n,order)
| gpl-3.0 |
Distrotech/intellij-community | python/lib/Lib/site-packages/django/contrib/gis/db/models/manager.py | 505 | 3578 | from django.db.models.manager import Manager
from django.contrib.gis.db.models.query import GeoQuerySet
class GeoManager(Manager):
"Overrides Manager to return Geographic QuerySets."
# This manager should be used for queries on related fields
# so that geometry columns on Oracle and MySQL are selected
# properly.
use_for_related_fields = True
def get_query_set(self):
return GeoQuerySet(self.model, using=self._db)
def area(self, *args, **kwargs):
return self.get_query_set().area(*args, **kwargs)
def centroid(self, *args, **kwargs):
return self.get_query_set().centroid(*args, **kwargs)
def collect(self, *args, **kwargs):
return self.get_query_set().collect(*args, **kwargs)
def difference(self, *args, **kwargs):
return self.get_query_set().difference(*args, **kwargs)
def distance(self, *args, **kwargs):
return self.get_query_set().distance(*args, **kwargs)
def envelope(self, *args, **kwargs):
return self.get_query_set().envelope(*args, **kwargs)
def extent(self, *args, **kwargs):
return self.get_query_set().extent(*args, **kwargs)
def extent3d(self, *args, **kwargs):
return self.get_query_set().extent3d(*args, **kwargs)
def force_rhr(self, *args, **kwargs):
return self.get_query_set().force_rhr(*args, **kwargs)
def geohash(self, *args, **kwargs):
return self.get_query_set().geohash(*args, **kwargs)
def geojson(self, *args, **kwargs):
return self.get_query_set().geojson(*args, **kwargs)
def gml(self, *args, **kwargs):
return self.get_query_set().gml(*args, **kwargs)
def intersection(self, *args, **kwargs):
return self.get_query_set().intersection(*args, **kwargs)
def kml(self, *args, **kwargs):
return self.get_query_set().kml(*args, **kwargs)
def length(self, *args, **kwargs):
return self.get_query_set().length(*args, **kwargs)
def make_line(self, *args, **kwargs):
return self.get_query_set().make_line(*args, **kwargs)
def mem_size(self, *args, **kwargs):
return self.get_query_set().mem_size(*args, **kwargs)
def num_geom(self, *args, **kwargs):
return self.get_query_set().num_geom(*args, **kwargs)
def num_points(self, *args, **kwargs):
return self.get_query_set().num_points(*args, **kwargs)
def perimeter(self, *args, **kwargs):
return self.get_query_set().perimeter(*args, **kwargs)
def point_on_surface(self, *args, **kwargs):
return self.get_query_set().point_on_surface(*args, **kwargs)
def reverse_geom(self, *args, **kwargs):
return self.get_query_set().reverse_geom(*args, **kwargs)
def scale(self, *args, **kwargs):
return self.get_query_set().scale(*args, **kwargs)
def snap_to_grid(self, *args, **kwargs):
return self.get_query_set().snap_to_grid(*args, **kwargs)
def svg(self, *args, **kwargs):
return self.get_query_set().svg(*args, **kwargs)
def sym_difference(self, *args, **kwargs):
return self.get_query_set().sym_difference(*args, **kwargs)
def transform(self, *args, **kwargs):
return self.get_query_set().transform(*args, **kwargs)
def translate(self, *args, **kwargs):
return self.get_query_set().translate(*args, **kwargs)
def union(self, *args, **kwargs):
return self.get_query_set().union(*args, **kwargs)
def unionagg(self, *args, **kwargs):
return self.get_query_set().unionagg(*args, **kwargs)
| apache-2.0 |
xinwu/horizon | openstack_dashboard/dashboards/project/networks/views.py | 43 | 5560 | # Copyright 2012 NEC Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Views for managing Neutron Networks.
"""
from django.core.urlresolvers import reverse
from django.core.urlresolvers import reverse_lazy
from django.utils.translation import ugettext_lazy as _
from horizon import exceptions
from horizon import forms
from horizon import tables
from horizon.utils import memoized
from horizon import workflows
from openstack_dashboard import api
from openstack_dashboard.dashboards.project.networks \
import forms as project_forms
from openstack_dashboard.dashboards.project.networks.ports \
import tables as port_tables
from openstack_dashboard.dashboards.project.networks.subnets \
import tables as subnet_tables
from openstack_dashboard.dashboards.project.networks \
import tables as project_tables
from openstack_dashboard.dashboards.project.networks \
import workflows as project_workflows
class IndexView(tables.DataTableView):
table_class = project_tables.NetworksTable
template_name = 'project/networks/index.html'
page_title = _("Networks")
def get_data(self):
try:
tenant_id = self.request.user.tenant_id
networks = api.neutron.network_list_for_tenant(self.request,
tenant_id)
except Exception:
networks = []
msg = _('Network list can not be retrieved.')
exceptions.handle(self.request, msg)
return networks
class CreateView(workflows.WorkflowView):
workflow_class = project_workflows.CreateNetwork
ajax_template_name = 'project/networks/create.html'
class UpdateView(forms.ModalFormView):
context_object_name = 'network'
form_class = project_forms.UpdateNetwork
form_id = "update_network_form"
modal_header = _("Edit Network")
submit_label = _("Save Changes")
submit_url = "horizon:project:networks:update"
success_url = reverse_lazy("horizon:project:networks:index")
template_name = 'project/networks/update.html'
page_title = _("Update Network")
def get_context_data(self, **kwargs):
context = super(UpdateView, self).get_context_data(**kwargs)
args = (self.kwargs['network_id'],)
context["network_id"] = self.kwargs['network_id']
context["submit_url"] = reverse(self.submit_url, args=args)
return context
@memoized.memoized_method
def _get_object(self, *args, **kwargs):
network_id = self.kwargs['network_id']
try:
return api.neutron.network_get(self.request, network_id)
except Exception:
redirect = self.success_url
msg = _('Unable to retrieve network details.')
exceptions.handle(self.request, msg, redirect=redirect)
def get_initial(self):
network = self._get_object()
return {'network_id': network['id'],
'tenant_id': network['tenant_id'],
'name': network['name'],
'admin_state': network['admin_state_up']}
class DetailView(tables.MultiTableView):
table_classes = (subnet_tables.SubnetsTable, port_tables.PortsTable)
template_name = 'project/networks/detail.html'
page_title = _("Network Details: {{ network.name }}")
def get_subnets_data(self):
try:
network = self._get_data()
subnets = api.neutron.subnet_list(self.request,
network_id=network.id)
except Exception:
subnets = []
msg = _('Subnet list can not be retrieved.')
exceptions.handle(self.request, msg)
return subnets
def get_ports_data(self):
try:
network_id = self.kwargs['network_id']
ports = api.neutron.port_list(self.request, network_id=network_id)
except Exception:
ports = []
msg = _('Port list can not be retrieved.')
exceptions.handle(self.request, msg)
return ports
@memoized.memoized_method
def _get_data(self):
try:
network_id = self.kwargs['network_id']
network = api.neutron.network_get(self.request, network_id)
network.set_id_as_name_if_empty(length=0)
except Exception:
msg = _('Unable to retrieve details for network "%s".') \
% (network_id)
exceptions.handle(self.request, msg,
redirect=self.get_redirect_url())
return network
def get_context_data(self, **kwargs):
context = super(DetailView, self).get_context_data(**kwargs)
network = self._get_data()
context["network"] = network
table = project_tables.NetworksTable(self.request)
context["url"] = self.get_redirect_url()
context["actions"] = table.render_row_actions(network)
return context
@staticmethod
def get_redirect_url():
return reverse_lazy('horizon:project:networks:index')
| apache-2.0 |
kmackenzieii/marauders-map | capture/make_fingerprint.py | 1 | 1582 | import re
import os
import kirk
import numpy as n
import pickle
def trimmean(arr, percent):
n = len(arr)
k = int(round(n*(float(percent)/100)/2))
return n.mean(arr[k+1:n-k])
File = kirk.File
width = kirk.width
height = kirk.height
box_size = kirk.box_size
#Dictionary data structure to hold out parsed data
#For each MAC address there is a multidimensional array of size [x][y]
#In each of those arrays is a list of RSSI values found at that location
rssi = {}
#Loop through every file in our data directory and extract data into rssi
for filename in os.listdir('./fingerprint'):
data = re.split('_',filename)
x = int(data[0])
y = int(data[1])
f = open('./fingerprint/'+filename)
for line in f:
read = line.split()
if len(read)==3 and read[0] == read[1]:
mac = read[0]
if read[2] != '':
strength = int(read[2].strip())
if mac in rssi:
rssi[mac][x][y].append(strength)
else:
if mac != "48:5a:3f:45:21:0f": #Filter out my cellphone
arr = [[[] for _ in range(kirk.x)] for _ in range(kirk.y)]
rssi.update({mac:arr})
rssi[mac][x][y].append(strength)
#Now that we have the data, calculate averages for each location
fingerprint = {}
for mac in rssi:
avg = [[None for _ in range(kirk.x)] for _ in range(kirk.y)]
for x in range(len(rssi[mac])):
for y in range(len(rssi[mac][x])):
l = rssi[mac][x][y]
if len(l) > 0:
avg[x][y] = n.mean(l)
#avg[x][y] = trimmean(l, 80)
fingerprint.update({mac:avg})
finger_file = open(r'fingerprint.pkl', 'wb')
pickle.dump(fingerprint, finger_file)
finger_file.close()
| mit |
cwu2011/seaborn | seaborn/timeseries.py | 6 | 13239 | """Timeseries plotting functions."""
from __future__ import division
import numpy as np
import pandas as pd
from scipy import stats, interpolate
import matplotlib as mpl
import matplotlib.pyplot as plt
from .external.six import string_types
from . import utils
from . import algorithms as algo
from .palettes import color_palette
def tsplot(data, time=None, unit=None, condition=None, value=None,
err_style="ci_band", ci=68, interpolate=True, color=None,
estimator=np.mean, n_boot=5000, err_palette=None, err_kws=None,
legend=True, ax=None, **kwargs):
"""Plot one or more timeseries with flexible representation of uncertainty.
This function can take data specified either as a long-form (tidy)
DataFrame or as an ndarray with dimensions for sampling unit, time, and
(optionally) condition. The interpretation of some of the other parameters
changes depending on the type of object passed as data.
Parameters
----------
data : DataFrame or ndarray
Data for the plot. Should either be a "long form" dataframe or an
array with dimensions (unit, time, condition). In both cases, the
condition field/dimension is optional. The type of this argument
determines the interpretation of the next few parameters.
time : string or series-like
Either the name of the field corresponding to time in the data
DataFrame or x values for a plot when data is an array. If a Series,
the name will be used to label the x axis.
unit : string
Field in the data DataFrame identifying the sampling unit (e.g.
subject, neuron, etc.). The error representation will collapse over
units at each time/condition observation. This has no role when data
is an array.
value : string
Either the name of the field corresponding to the data values in
the data DataFrame (i.e. the y coordinate) or a string that forms
the y axis label when data is an array.
condition : string or Series-like
Either the name of the field identifying the condition an observation
falls under in the data DataFrame, or a sequence of names with a length
equal to the size of the third dimension of data. There will be a
separate trace plotted for each condition. If condition is a Series
with a name attribute, the name will form the title for the plot
legend (unless legend is set to False).
err_style : string or list of strings or None
Names of ways to plot uncertainty across units from set of
{ci_band, ci_bars, boot_traces, boot_kde, unit_traces, unit_points}.
Can use one or more than one method.
ci : float or list of floats in [0, 100]
Confidence interaval size(s). If a list, it will stack the error
plots for each confidence interval. Only relevant for error styles
with "ci" in the name.
interpolate : boolean
Whether to do a linear interpolation between each timepoint when
plotting. The value of this parameter also determines the marker
used for the main plot traces, unless marker is specified as a keyword
argument.
color : seaborn palette or matplotlib color name or dictionary
Palette or color for the main plots and error representation (unless
plotting by unit, which can be separately controlled with err_palette).
If a dictionary, should map condition name to color spec.
estimator : callable
Function to determine central tendency and to pass to bootstrap
must take an ``axis`` argument.
n_boot : int
Number of bootstrap iterations.
err_palette: seaborn palette
Palette name or list of colors used when plotting data for each unit.
err_kws : dict, optional
Keyword argument dictionary passed through to matplotlib function
generating the error plot,
ax : axis object, optional
Plot in given axis; if None creates a new figure
kwargs :
Other keyword arguments are passed to main plot() call
Returns
-------
ax : matplotlib axis
axis with plot data
"""
# Sort out default values for the parameters
if ax is None:
ax = plt.gca()
if err_kws is None:
err_kws = {}
# Handle different types of input data
if isinstance(data, pd.DataFrame):
xlabel = time
ylabel = value
# Condition is optional
if condition is None:
condition = pd.Series(np.ones(len(data)))
legend = False
legend_name = None
n_cond = 1
else:
legend = True and legend
legend_name = condition
n_cond = len(data[condition].unique())
else:
data = np.asarray(data)
# Data can be a timecourse from a single unit or
# several observations in one condition
if data.ndim == 1:
data = data[np.newaxis, :, np.newaxis]
elif data.ndim == 2:
data = data[:, :, np.newaxis]
n_unit, n_time, n_cond = data.shape
# Units are experimental observations. Maybe subjects, or neurons
if unit is None:
units = np.arange(n_unit)
unit = "unit"
units = np.repeat(units, n_time * n_cond)
ylabel = None
# Time forms the xaxis of the plot
if time is None:
times = np.arange(n_time)
else:
times = np.asarray(time)
xlabel = None
if hasattr(time, "name"):
xlabel = time.name
time = "time"
times = np.tile(np.repeat(times, n_cond), n_unit)
# Conditions split the timeseries plots
if condition is None:
conds = range(n_cond)
legend = False
if isinstance(color, dict):
err = "Must have condition names if using color dict."
raise ValueError(err)
else:
conds = np.asarray(condition)
legend = True and legend
if hasattr(condition, "name"):
legend_name = condition.name
else:
legend_name = None
condition = "cond"
conds = np.tile(conds, n_unit * n_time)
# Value forms the y value in the plot
if value is None:
ylabel = None
else:
ylabel = value
value = "value"
# Convert to long-form DataFrame
data = pd.DataFrame(dict(value=data.ravel(),
time=times,
unit=units,
cond=conds))
# Set up the err_style and ci arguments for the loop below
if isinstance(err_style, string_types):
err_style = [err_style]
elif err_style is None:
err_style = []
if not hasattr(ci, "__iter__"):
ci = [ci]
# Set up the color palette
if color is None:
current_palette = mpl.rcParams["axes.color_cycle"]
if len(current_palette) < n_cond:
colors = color_palette("husl", n_cond)
else:
colors = color_palette(n_colors=n_cond)
elif isinstance(color, dict):
colors = [color[c] for c in data[condition].unique()]
else:
try:
colors = color_palette(color, n_cond)
except ValueError:
color = mpl.colors.colorConverter.to_rgb(color)
colors = [color] * n_cond
# Do a groupby with condition and plot each trace
for c, (cond, df_c) in enumerate(data.groupby(condition, sort=False)):
df_c = df_c.pivot(unit, time, value)
x = df_c.columns.values.astype(np.float)
# Bootstrap the data for confidence intervals
boot_data = algo.bootstrap(df_c.values, n_boot=n_boot,
axis=0, func=estimator)
cis = [utils.ci(boot_data, v, axis=0) for v in ci]
central_data = estimator(df_c.values, axis=0)
# Get the color for this condition
color = colors[c]
# Use subroutines to plot the uncertainty
for style in err_style:
# Allow for null style (only plot central tendency)
if style is None:
continue
# Grab the function from the global environment
try:
plot_func = globals()["_plot_%s" % style]
except KeyError:
raise ValueError("%s is not a valid err_style" % style)
# Possibly set up to plot each observation in a different color
if err_palette is not None and "unit" in style:
orig_color = color
color = color_palette(err_palette, len(df_c.values))
# Pass all parameters to the error plotter as keyword args
plot_kwargs = dict(ax=ax, x=x, data=df_c.values,
boot_data=boot_data,
central_data=central_data,
color=color, err_kws=err_kws)
# Plot the error representation, possibly for multiple cis
for ci_i in cis:
plot_kwargs["ci"] = ci_i
plot_func(**plot_kwargs)
if err_palette is not None and "unit" in style:
color = orig_color
# Plot the central trace
kwargs.setdefault("marker", "" if interpolate else "o")
ls = kwargs.pop("ls", "-" if interpolate else "")
kwargs.setdefault("linestyle", ls)
label = cond if legend else "_nolegend_"
ax.plot(x, central_data, color=color, label=label, **kwargs)
# Pad the sides of the plot only when not interpolating
ax.set_xlim(x.min(), x.max())
x_diff = x[1] - x[0]
if not interpolate:
ax.set_xlim(x.min() - x_diff, x.max() + x_diff)
# Add the plot labels
if xlabel is not None:
ax.set_xlabel(xlabel)
if ylabel is not None:
ax.set_ylabel(ylabel)
if legend:
ax.legend(loc=0, title=legend_name)
return ax
# Subroutines for tsplot errorbar plotting
# ----------------------------------------
def _plot_ci_band(ax, x, ci, color, err_kws, **kwargs):
"""Plot translucent error bands around the central tendancy."""
low, high = ci
if "alpha" not in err_kws:
err_kws["alpha"] = 0.2
ax.fill_between(x, low, high, color=color, **err_kws)
def _plot_ci_bars(ax, x, central_data, ci, color, err_kws, **kwargs):
"""Plot error bars at each data point."""
for x_i, y_i, (low, high) in zip(x, central_data, ci.T):
ax.plot([x_i, x_i], [low, high], color=color,
solid_capstyle="round", **err_kws)
def _plot_boot_traces(ax, x, boot_data, color, err_kws, **kwargs):
"""Plot 250 traces from bootstrap."""
err_kws.setdefault("alpha", 0.25)
err_kws.setdefault("linewidth", 0.25)
if "lw" in err_kws:
err_kws["linewidth"] = err_kws.pop("lw")
ax.plot(x, boot_data.T, color=color, label="_nolegend_", **err_kws)
def _plot_unit_traces(ax, x, data, ci, color, err_kws, **kwargs):
"""Plot a trace for each observation in the original data."""
if isinstance(color, list):
if "alpha" not in err_kws:
err_kws["alpha"] = .5
for i, obs in enumerate(data):
ax.plot(x, obs, color=color[i], label="_nolegend_", **err_kws)
else:
if "alpha" not in err_kws:
err_kws["alpha"] = .2
ax.plot(x, data.T, color=color, label="_nolegend_", **err_kws)
def _plot_unit_points(ax, x, data, color, err_kws, **kwargs):
"""Plot each original data point discretely."""
if isinstance(color, list):
for i, obs in enumerate(data):
ax.plot(x, obs, "o", color=color[i], alpha=0.8, markersize=4,
label="_nolegend_", **err_kws)
else:
ax.plot(x, data.T, "o", color=color, alpha=0.5, markersize=4,
label="_nolegend_", **err_kws)
def _plot_boot_kde(ax, x, boot_data, color, **kwargs):
"""Plot the kernal density estimate of the bootstrap distribution."""
kwargs.pop("data")
_ts_kde(ax, x, boot_data, color, **kwargs)
def _plot_unit_kde(ax, x, data, color, **kwargs):
"""Plot the kernal density estimate over the sample."""
_ts_kde(ax, x, data, color, **kwargs)
def _ts_kde(ax, x, data, color, **kwargs):
"""Upsample over time and plot a KDE of the bootstrap distribution."""
kde_data = []
y_min, y_max = data.min(), data.max()
y_vals = np.linspace(y_min, y_max, 100)
upsampler = interpolate.interp1d(x, data)
data_upsample = upsampler(np.linspace(x.min(), x.max(), 100))
for pt_data in data_upsample.T:
pt_kde = stats.kde.gaussian_kde(pt_data)
kde_data.append(pt_kde(y_vals))
kde_data = np.transpose(kde_data)
rgb = mpl.colors.ColorConverter().to_rgb(color)
img = np.zeros((kde_data.shape[0], kde_data.shape[1], 4))
img[:, :, :3] = rgb
kde_data /= kde_data.max(axis=0)
kde_data[kde_data > 1] = 1
img[:, :, 3] = kde_data
ax.imshow(img, interpolation="spline16", zorder=2,
extent=(x.min(), x.max(), y_min, y_max),
aspect="auto", origin="lower")
| bsd-3-clause |
dllsf/odootest | addons/l10n_be_invoice_bba/invoice.py | 11 | 12783 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2011 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import re, time, random
from openerp import api
from openerp.osv import fields, osv
from openerp.tools.translate import _
import logging
_logger = logging.getLogger(__name__)
"""
account.invoice object:
- Add support for Belgian structured communication
- Rename 'reference' field labels to 'Communication'
"""
class account_invoice(osv.osv):
_inherit = 'account.invoice'
@api.cr_uid_context
def _get_reference_type(self, cursor, user, context=None):
"""Add BBA Structured Communication Type and change labels from 'reference' into 'communication' """
res = super(account_invoice, self)._get_reference_type(cursor, user,
context=context)
res[[i for i,x in enumerate(res) if x[0] == 'none'][0]] = ('none', 'Free Communication')
res.append(('bba', 'BBA Structured Communication'))
#l_logger.warning('reference_type = %s' %res )
return res
def check_bbacomm(self, val):
supported_chars = '0-9+*/ '
pattern = re.compile('[^' + supported_chars + ']')
if pattern.findall(val or ''):
return False
bbacomm = re.sub('\D', '', val or '')
if len(bbacomm) == 12:
base = int(bbacomm[:10])
mod = base % 97 or 97
if mod == int(bbacomm[-2:]):
return True
return False
def _check_communication(self, cr, uid, ids):
for inv in self.browse(cr, uid, ids):
if inv.reference_type == 'bba':
return self.check_bbacomm(inv.reference)
return True
def onchange_partner_id(self, cr, uid, ids, type, partner_id,
date_invoice=False, payment_term=False,
partner_bank_id=False, company_id=False,
context=None):
result = super(account_invoice, self).onchange_partner_id(cr, uid, ids, type, partner_id,
date_invoice, payment_term, partner_bank_id, company_id, context)
# reference_type = self.default_get(cr, uid, ['reference_type'])['reference_type']
# _logger.warning('partner_id %s' % partner_id)
reference = False
reference_type = 'none'
if partner_id:
if (type == 'out_invoice'):
reference_type = self.pool.get('res.partner').browse(cr, uid, partner_id, context=context).out_inv_comm_type
if reference_type:
reference = self.generate_bbacomm(cr, uid, ids, type, reference_type, partner_id, '', context=context)['value']['reference']
res_update = {
'reference_type': reference_type or 'none',
'reference': reference,
}
result['value'].update(res_update)
return result
def generate_bbacomm(self, cr, uid, ids, type, reference_type, partner_id, reference, context=None):
partner_obj = self.pool.get('res.partner')
reference = reference or ''
algorithm = False
if partner_id:
algorithm = partner_obj.browse(cr, uid, partner_id, context=context).out_inv_comm_algorithm
algorithm = algorithm or 'random'
if (type == 'out_invoice'):
if reference_type == 'bba':
if algorithm == 'date':
if not self.check_bbacomm(reference):
doy = time.strftime('%j')
year = time.strftime('%Y')
seq = '001'
seq_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', 'like', '+++%s/%s/%%' % (doy, year))], order='reference')
if seq_ids:
prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])
if prev_seq < 999:
seq = '%03d' % (prev_seq + 1)
else:
raise osv.except_osv(_('Warning!'),
_('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \
'\nPlease create manually a unique BBA Structured Communication.'))
bbacomm = doy + year + seq
base = int(bbacomm)
mod = base % 97 or 97
reference = '+++%s/%s/%s%02d+++' % (doy, year, seq, mod)
elif algorithm == 'partner_ref':
if not self.check_bbacomm(reference):
partner_ref = self.pool.get('res.partner').browse(cr, uid, partner_id).ref
partner_ref_nr = re.sub('\D', '', partner_ref or '')
if (len(partner_ref_nr) < 3) or (len(partner_ref_nr) > 7):
raise osv.except_osv(_('Warning!'),
_('The Partner should have a 3-7 digit Reference Number for the generation of BBA Structured Communications!' \
'\nPlease correct the Partner record.'))
else:
partner_ref_nr = partner_ref_nr.ljust(7, '0')
seq = '001'
seq_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', 'like', '+++%s/%s/%%' % (partner_ref_nr[:3], partner_ref_nr[3:]))], order='reference')
if seq_ids:
prev_seq = int(self.browse(cr, uid, seq_ids[-1]).reference[12:15])
if prev_seq < 999:
seq = '%03d' % (prev_seq + 1)
else:
raise osv.except_osv(_('Warning!'),
_('The daily maximum of outgoing invoices with an automatically generated BBA Structured Communications has been exceeded!' \
'\nPlease create manually a unique BBA Structured Communication.'))
bbacomm = partner_ref_nr + seq
base = int(bbacomm)
mod = base % 97 or 97
reference = '+++%s/%s/%s%02d+++' % (partner_ref_nr[:3], partner_ref_nr[3:], seq, mod)
elif algorithm == 'random':
if not self.check_bbacomm(reference):
base = random.randint(1, 9999999999)
bbacomm = str(base).rjust(10, '0')
base = int(bbacomm)
mod = base % 97 or 97
mod = str(mod).rjust(2, '0')
reference = '+++%s/%s/%s%s+++' % (bbacomm[:3], bbacomm[3:7], bbacomm[7:], mod)
else:
raise osv.except_osv(_('Error!'),
_("Unsupported Structured Communication Type Algorithm '%s' !" \
"\nPlease contact your OpenERP support channel.") % algorithm)
return {'value': {'reference': reference}}
def create(self, cr, uid, vals, context=None):
reference = vals.get('reference', False)
reference_type = vals.get('reference_type', False)
if vals.get('type') == 'out_invoice' and not reference_type:
# fallback on default communication type for partner
reference_type = self.pool.get('res.partner').browse(cr, uid, vals['partner_id']).out_inv_comm_type
if reference_type == 'bba':
reference = self.generate_bbacomm(cr, uid, [], vals['type'], reference_type, vals['partner_id'], '', context={})['value']['reference']
vals.update({
'reference_type': reference_type or 'none',
'reference': reference,
})
if reference_type == 'bba':
if not reference:
raise osv.except_osv(_('Warning!'),
_('Empty BBA Structured Communication!' \
'\nPlease fill in a unique BBA Structured Communication.'))
if self.check_bbacomm(reference):
reference = re.sub('\D', '', reference)
vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++'
same_ids = self.search(cr, uid,
[('type', '=', 'out_invoice'), ('reference_type', '=', 'bba'),
('reference', '=', vals['reference'])])
if same_ids:
raise osv.except_osv(_('Warning!'),
_('The BBA Structured Communication has already been used!' \
'\nPlease create manually a unique BBA Structured Communication.'))
return super(account_invoice, self).create(cr, uid, vals, context=context)
def write(self, cr, uid, ids, vals, context=None):
if isinstance(ids, (int, long)):
ids = [ids]
for inv in self.browse(cr, uid, ids, context):
if vals.has_key('reference_type'):
reference_type = vals['reference_type']
else:
reference_type = inv.reference_type or ''
if reference_type == 'bba':
if vals.has_key('reference'):
bbacomm = vals['reference']
else:
bbacomm = inv.reference or ''
if self.check_bbacomm(bbacomm):
reference = re.sub('\D', '', bbacomm)
vals['reference'] = '+++' + reference[0:3] + '/' + reference[3:7] + '/' + reference[7:] + '+++'
same_ids = self.search(cr, uid,
[('id', '!=', inv.id), ('type', '=', 'out_invoice'),
('reference_type', '=', 'bba'), ('reference', '=', vals['reference'])])
if same_ids:
raise osv.except_osv(_('Warning!'),
_('The BBA Structured Communication has already been used!' \
'\nPlease create manually a unique BBA Structured Communication.'))
return super(account_invoice, self).write(cr, uid, ids, vals, context)
def copy(self, cr, uid, id, default=None, context=None):
default = default or {}
invoice = self.browse(cr, uid, id, context=context)
if invoice.type in ['out_invoice']:
reference_type = invoice.reference_type or 'none'
default['reference_type'] = reference_type
if reference_type == 'bba':
partner = invoice.partner_id
default['reference'] = self.generate_bbacomm(cr, uid, id,
invoice.type, reference_type,
partner.id, '', context=context)['value']['reference']
return super(account_invoice, self).copy(cr, uid, id, default, context=context)
_columns = {
'reference': fields.char('Communication', help="The partner reference of this invoice."),
'reference_type': fields.selection(_get_reference_type, 'Communication Type',
required=True),
}
_constraints = [
(_check_communication, 'Invalid BBA Structured Communication !', ['Communication']),
]
account_invoice()
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 |
WillieMaddox/numpy | numpy/linalg/tests/test_regression.py | 78 | 3097 | """ Test functions for linalg module
"""
from __future__ import division, absolute_import, print_function
import numpy as np
from numpy import linalg, arange, float64, array, dot, transpose
from numpy.testing import (
TestCase, run_module_suite, assert_equal, assert_array_equal,
assert_array_almost_equal, assert_array_less
)
rlevel = 1
class TestRegression(TestCase):
def test_eig_build(self, level=rlevel):
# Ticket #652
rva = array([1.03221168e+02 + 0.j,
-1.91843603e+01 + 0.j,
-6.04004526e-01 + 15.84422474j,
-6.04004526e-01 - 15.84422474j,
-1.13692929e+01 + 0.j,
-6.57612485e-01 + 10.41755503j,
-6.57612485e-01 - 10.41755503j,
1.82126812e+01 + 0.j,
1.06011014e+01 + 0.j,
7.80732773e+00 + 0.j,
-7.65390898e-01 + 0.j,
1.51971555e-15 + 0.j,
-1.51308713e-15 + 0.j])
a = arange(13 * 13, dtype=float64)
a.shape = (13, 13)
a = a % 17
va, ve = linalg.eig(a)
va.sort()
rva.sort()
assert_array_almost_equal(va, rva)
def test_eigh_build(self, level=rlevel):
# Ticket 662.
rvals = [68.60568999, 89.57756725, 106.67185574]
cov = array([[77.70273908, 3.51489954, 15.64602427],
[3.51489954, 88.97013878, -1.07431931],
[15.64602427, -1.07431931, 98.18223512]])
vals, vecs = linalg.eigh(cov)
assert_array_almost_equal(vals, rvals)
def test_svd_build(self, level=rlevel):
# Ticket 627.
a = array([[0., 1.], [1., 1.], [2., 1.], [3., 1.]])
m, n = a.shape
u, s, vh = linalg.svd(a)
b = dot(transpose(u[:, n:]), a)
assert_array_almost_equal(b, np.zeros((2, 2)))
def test_norm_vector_badarg(self):
# Regression for #786: Froebenius norm for vectors raises
# TypeError.
self.assertRaises(ValueError, linalg.norm, array([1., 2., 3.]), 'fro')
def test_lapack_endian(self):
# For bug #1482
a = array([[5.7998084, -2.1825367],
[-2.1825367, 9.85910595]], dtype='>f8')
b = array(a, dtype='<f8')
ap = linalg.cholesky(a)
bp = linalg.cholesky(b)
assert_array_equal(ap, bp)
def test_large_svd_32bit(self):
# See gh-4442, 64bit would require very large/slow matrices.
x = np.eye(1000, 66)
np.linalg.svd(x)
def test_svd_no_uv(self):
# gh-4733
for shape in (3, 4), (4, 4), (4, 3):
for t in float, complex:
a = np.ones(shape, dtype=t)
w = linalg.svd(a, compute_uv=False)
c = np.count_nonzero(np.absolute(w) > 0.5)
assert_equal(c, 1)
assert_equal(np.linalg.matrix_rank(a), 1)
assert_array_less(1, np.linalg.norm(a, ord=2))
if __name__ == '__main__':
run_module_suite()
| bsd-3-clause |
pcostell/apitools | apitools/base/py/exceptions.py | 8 | 4111 | #!/usr/bin/env python
#
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Exceptions for generated client libraries."""
class Error(Exception):
"""Base class for all exceptions."""
class TypecheckError(Error, TypeError):
"""An object of an incorrect type is provided."""
class NotFoundError(Error):
"""A specified resource could not be found."""
class UserError(Error):
"""Base class for errors related to user input."""
class InvalidDataError(Error):
"""Base class for any invalid data error."""
class CommunicationError(Error):
"""Any communication error talking to an API server."""
class HttpError(CommunicationError):
"""Error making a request. Soon to be HttpError."""
def __init__(self, response, content, url,
method_config=None, request=None):
super(HttpError, self).__init__()
self.response = response
self.content = content
self.url = url
self.method_config = method_config
self.request = request
def __str__(self):
content = self.content
if isinstance(content, bytes):
content = self.content.decode('ascii', 'replace')
return 'HttpError accessing <%s>: response: <%s>, content <%s>' % (
self.url, self.response, content)
@property
def status_code(self):
# TODO(craigcitro): Turn this into something better than a
# KeyError if there is no status.
return int(self.response['status'])
@classmethod
def FromResponse(cls, http_response):
return cls(http_response.info, http_response.content,
http_response.request_url)
class InvalidUserInputError(InvalidDataError):
"""User-provided input is invalid."""
class InvalidDataFromServerError(InvalidDataError, CommunicationError):
"""Data received from the server is malformed."""
class BatchError(Error):
"""Error generated while constructing a batch request."""
class ConfigurationError(Error):
"""Base class for configuration errors."""
class GeneratedClientError(Error):
"""The generated client configuration is invalid."""
class ConfigurationValueError(UserError):
"""Some part of the user-specified client configuration is invalid."""
class ResourceUnavailableError(Error):
"""User requested an unavailable resource."""
class CredentialsError(Error):
"""Errors related to invalid credentials."""
class TransferError(CommunicationError):
"""Errors related to transfers."""
class TransferRetryError(TransferError):
"""Retryable errors related to transfers."""
class TransferInvalidError(TransferError):
"""The given transfer is invalid."""
class RequestError(CommunicationError):
"""The request was not successful."""
class RetryAfterError(HttpError):
"""The response contained a retry-after header."""
def __init__(self, response, content, url, retry_after):
super(RetryAfterError, self).__init__(response, content, url)
self.retry_after = int(retry_after)
@classmethod
def FromResponse(cls, http_response):
return cls(http_response.info, http_response.content,
http_response.request_url, http_response.retry_after)
class BadStatusCodeError(HttpError):
"""The request completed but returned a bad status code."""
class NotYetImplementedError(GeneratedClientError):
"""This functionality is not yet implemented."""
class StreamExhausted(Error):
"""Attempted to read more bytes from a stream than were available."""
| apache-2.0 |
akretion/odoo | addons/stock/tests/common.py | 15 | 5095 | # -*- coding: utf-8 -*-
from odoo.tests import common
class TestStockCommon(common.TransactionCase):
def setUp(self):
super(TestStockCommon, self).setUp()
self.ProductObj = self.env['product.product']
self.UomObj = self.env['uom.uom']
self.PartnerObj = self.env['res.partner']
self.ModelDataObj = self.env['ir.model.data']
self.StockPackObj = self.env['stock.move.line']
self.StockQuantObj = self.env['stock.quant']
self.PickingObj = self.env['stock.picking']
self.MoveObj = self.env['stock.move']
self.InvObj = self.env['stock.inventory']
self.InvLineObj = self.env['stock.inventory.line']
self.LotObj = self.env['stock.production.lot']
# Model Data
self.partner_agrolite_id = self.ModelDataObj.xmlid_to_res_id('base.res_partner_2')
self.partner_delta_id = self.ModelDataObj.xmlid_to_res_id('base.res_partner_4')
self.picking_type_in = self.ModelDataObj.xmlid_to_res_id('stock.picking_type_in')
self.picking_type_out = self.ModelDataObj.xmlid_to_res_id('stock.picking_type_out')
self.supplier_location = self.ModelDataObj.xmlid_to_res_id('stock.stock_location_suppliers')
self.stock_location = self.ModelDataObj.xmlid_to_res_id('stock.stock_location_stock')
pack_location = self.env.ref('stock.location_pack_zone')
pack_location.active = True
self.pack_location = pack_location.id
output_location = self.env.ref('stock.stock_location_output')
output_location.active = True
self.output_location = output_location.id
self.customer_location = self.ModelDataObj.xmlid_to_res_id('stock.stock_location_customers')
self.categ_unit = self.ModelDataObj.xmlid_to_res_id('uom.product_uom_categ_unit')
self.categ_kgm = self.ModelDataObj.xmlid_to_res_id('uom.product_uom_categ_kgm')
# Product Created A, B, C, D
self.productA = self.ProductObj.create({'name': 'Product A', 'type': 'product'})
self.productB = self.ProductObj.create({'name': 'Product B', 'type': 'product'})
self.productC = self.ProductObj.create({'name': 'Product C', 'type': 'product'})
self.productD = self.ProductObj.create({'name': 'Product D', 'type': 'product'})
self.productE = self.ProductObj.create({'name': 'Product E', 'type': 'product'})
# Configure unit of measure.
self.uom_kg = self.env['uom.uom'].search([('category_id', '=', self.categ_kgm), ('uom_type', '=', 'reference')], limit=1)
self.uom_kg.write({
'name': 'Test-KG',
'rounding': 0.000001})
self.uom_tone = self.UomObj.create({
'name': 'Test-Tone',
'category_id': self.categ_kgm,
'uom_type': 'bigger',
'factor_inv': 1000.0,
'rounding': 0.001})
self.uom_gm = self.UomObj.create({
'name': 'Test-G',
'category_id': self.categ_kgm,
'uom_type': 'smaller',
'factor': 1000.0,
'rounding': 0.001})
self.uom_mg = self.UomObj.create({
'name': 'Test-MG',
'category_id': self.categ_kgm,
'uom_type': 'smaller',
'factor': 100000.0,
'rounding': 0.001})
# Check Unit
self.uom_unit = self.env['uom.uom'].search([('category_id', '=', self.categ_unit), ('uom_type', '=', 'reference')], limit=1)
self.uom_unit.write({
'name': 'Test-Unit',
'rounding': 1.0})
self.uom_dozen = self.UomObj.create({
'name': 'Test-DozenA',
'category_id': self.categ_unit,
'factor_inv': 12,
'uom_type': 'bigger',
'rounding': 0.001})
self.uom_sdozen = self.UomObj.create({
'name': 'Test-SDozenA',
'category_id': self.categ_unit,
'factor_inv': 144,
'uom_type': 'bigger',
'rounding': 0.001})
self.uom_sdozen_round = self.UomObj.create({
'name': 'Test-SDozenA Round',
'category_id': self.categ_unit,
'factor_inv': 144,
'uom_type': 'bigger',
'rounding': 1.0})
# Product for different unit of measure.
self.DozA = self.ProductObj.create({'name': 'Dozon-A', 'type': 'product', 'uom_id': self.uom_dozen.id, 'uom_po_id': self.uom_dozen.id})
self.SDozA = self.ProductObj.create({'name': 'SuperDozon-A', 'type': 'product', 'uom_id': self.uom_sdozen.id, 'uom_po_id': self.uom_sdozen.id})
self.SDozARound = self.ProductObj.create({'name': 'SuperDozenRound-A', 'type': 'product', 'uom_id': self.uom_sdozen_round.id, 'uom_po_id': self.uom_sdozen_round.id})
self.UnitA = self.ProductObj.create({'name': 'Unit-A', 'type': 'product'})
self.kgB = self.ProductObj.create({'name': 'kg-B', 'type': 'product', 'uom_id': self.uom_kg.id, 'uom_po_id': self.uom_kg.id})
self.gB = self.ProductObj.create({'name': 'g-B', 'type': 'product', 'uom_id': self.uom_gm.id, 'uom_po_id': self.uom_gm.id})
| agpl-3.0 |
boundary/boundary-plugin-aws-elb | boundary_aws_plugin/cloudwatch_plugin.py | 8 | 4606 | from __future__ import (absolute_import, division, print_function, unicode_literals)
import logging
import datetime
import time
from . import boundary_plugin
from . import status_store
"""
If getting statistics from CloudWatch fails, we will retry up to this number of times before
giving up and aborting the plugin. Use 0 for unlimited retries.
"""
PLUGIN_RETRY_COUNT = 0
"""
If getting statistics from CloudWatch fails, we will wait this long (in seconds) before retrying.
This value must not be greater than 30 seconds, because the Boundary Relay will think we've
timed out and terminate us after 30 seconds of inactivity.
"""
PLUGIN_RETRY_DELAY = 5
class CloudwatchPlugin(object):
def __init__(self, cloudwatch_metrics_type, boundary_metric_prefix, status_store_filename):
self.cloudwatch_metrics_type = cloudwatch_metrics_type
self.boundary_metric_prefix = boundary_metric_prefix
self.status_store_filename = status_store_filename
def get_metric_data_with_retries(self, *args, **kwargs):
"""
Calls the get_metric_data function, taking into account retry configuration.
"""
retry_range = xrange(PLUGIN_RETRY_COUNT) if PLUGIN_RETRY_COUNT > 0 else iter(int, 1)
for _ in retry_range:
try:
return self.cloudwatch_metrics.get_metric_data(*args, **kwargs)
except Exception as e:
logging.error("Error retrieving CloudWatch data: %s" % e)
boundary_plugin.report_alive()
time.sleep(PLUGIN_RETRY_DELAY)
boundary_plugin.report_alive()
logging.fatal("Max retries exceeded retrieving CloudWatch data")
raise Exception("Max retries exceeded retrieving CloudWatch data")
def handle_metrics(self, data, reported_metrics):
# Data format:
# (RegionId, EntityName, MetricName) -> [(Timestamp, Value, Statistic), (Timestamp, Value, Statistic), ...]
for metric_key, metric_list in data.items():
region_id, entity_name, metric_name = metric_key
for metric_list_item in metric_list:
# Do not report duplicate or past samples (note: we are comparing tuples here, which
# amounts to comparing their timestamps).
if reported_metrics.get(metric_key, (datetime.datetime.min,)) >= metric_list_item:
continue
metric_timestamp, metric_value, metric_statistic = metric_list_item
boundary_plugin.boundary_report_metric(self.boundary_metric_prefix + metric_name,
metric_value, entity_name, metric_timestamp)
reported_metrics[metric_key] = metric_list_item
status_store.save_status_store(self.status_store_filename, reported_metrics)
def main(self):
settings = boundary_plugin.parse_params()
reported_metrics = status_store.load_status_store(self.status_store_filename) or dict()
logging.basicConfig(level=logging.ERROR, filename=settings.get('log_file', None))
reports_log = settings.get('report_log_file', None)
if reports_log:
boundary_plugin.log_metrics_to_file(reports_log)
boundary_plugin.start_keepalive_subprocess()
self.cloudwatch_metrics = self.cloudwatch_metrics_type(settings['access_key_id'], settings['secret_key'])
# Bring us up to date! Get all data since the last time we know we reported valid data
# (minus 20 minutes as a buffer), and report it now, so that we report data on any time
# this plugin was down for any reason.
try:
earliest_timestamp = max(reported_metrics.values(), key=lambda v: v[0])[0] - datetime.timedelta(minutes=20)
except ValueError:
# Probably first run or someone deleted our status store file - just start from now
logging.error("No status store data; starting data collection from now")
pass
else:
logging.error("Starting historical data collection from %s" % earliest_timestamp)
data = self.get_metric_data_with_retries(only_latest=False,
start_time=earliest_timestamp, end_time=datetime.datetime.utcnow())
self.handle_metrics(data, reported_metrics)
logging.error("Historical data collection complete")
while True:
data = self.get_metric_data_with_retries()
self.handle_metrics(data, reported_metrics)
boundary_plugin.sleep_interval()
| apache-2.0 |
ryfeus/lambda-packs | Tensorflow/source/tensorflow/python/keras/wrappers/scikit_learn/__init__.py | 73 | 1062 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Keras scikit-learn API wrapper."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.keras._impl.keras.wrappers.scikit_learn import KerasClassifier
from tensorflow.python.keras._impl.keras.wrappers.scikit_learn import KerasRegressor
del absolute_import
del division
del print_function
| mit |
Lonchadepavo/EstacionCisma | tools/expand_filedir_paths.py | 166 | 3839 | #!/usr/bin/env python
import re, os, sys, fnmatch
# Regex pattern to extract the directory path in a #define FILE_DIR
filedir_pattern = re.compile(r'^#define\s*FILE_DIR\s*"(.*?)"')
# Regex pattern to extract any single quoted piece of text. This can also
# match single quoted strings inside of double quotes, which is part of a
# regular text string and should not be replaced. The replacement function
# however will any match that doesn't appear to be a filename so these
# extra matches should not be a problem.
rename_pattern = re.compile(r"'(.+?)'")
# Only filenames matching this pattern will have their resources renamed
source_pattern = re.compile(r"^.*?\.(dm|dmm)$")
# Open the .dme file and return a list of all FILE_DIR paths in it
def read_filedirs(filename):
result = []
dme_file = file(filename, "rt")
# Read each line from the file and check for regex pattern match
for row in dme_file:
match = filedir_pattern.match(row)
if match:
result.append(match.group(1))
dme_file.close()
return result
# Search through a list of directories, and build a dictionary which
# maps every file to its full pathname (relative to the .dme file)
# If the same filename appears in more than one directory, the earlier
# directory in the list takes preference.
def index_files(file_dirs):
result = {}
# Reverse the directory list so the earlier directories take precedence
# by replacing the previously indexed file of the same name
for directory in reversed(file_dirs):
for name in os.listdir(directory):
# Replace backslash path separators on Windows with forward slash
# Force "name" to lowercase when used as a key since BYOND resource
# names are case insensitive, even on Linux.
if name.find(".") == -1:
continue
result[name.lower()] = directory.replace('\\', '/') + '/' + name
return result
# Recursively search for every .dm/.dmm file in the .dme file directory. For
# each file, search it for any resource names in single quotes, and replace
# them with the full path previously found by index_files()
def rewrite_sources(resources):
# Create a closure for the regex replacement function to capture the
# resources dictionary which can't be passed directly to this function
def replace_func(name):
key = name.group(1).lower()
if key in resources:
replacement = resources[key]
else:
replacement = name.group(1)
return "'" + replacement + "'"
# Search recursively for all .dm and .dmm files
for (dirpath, dirs, files) in os.walk("."):
for name in files:
if source_pattern.match(name):
path = dirpath + '/' + name
source_file = file(path, "rt")
output_file = file(path + ".tmp", "wt")
# Read file one line at a time and perform replacement of all
# single quoted resource names with the fullpath to that resource
# file. Write the updated text back out to a temporary file.
for row in source_file:
row = rename_pattern.sub(replace_func, row)
output_file.write(row)
output_file.close()
source_file.close()
# Delete original source file and replace with the temporary
# output. On Windows, an atomic rename() operation is not
# possible like it is under POSIX.
os.remove(path)
os.rename(path + ".tmp", path)
dirs = read_filedirs("tgstation.dme");
resources = index_files(dirs)
rewrite_sources(resources)
| gpl-3.0 |
fishscene/streamlink | src/streamlink/plugins/ustreamtv.py | 3 | 20932 | import re
from collections import namedtuple
from functools import partial
from random import randint
from time import sleep
from streamlink.compat import urlparse, urljoin, range
from streamlink.exceptions import StreamError, PluginError, NoStreamsError
from streamlink.plugin import Plugin, PluginOptions
from streamlink.plugin.api import http, validate
from streamlink.stream import RTMPStream, HLSStream, HTTPStream, Stream
from streamlink.stream.flvconcat import FLVTagConcat
from streamlink.stream.segmented import (
SegmentedStreamReader, SegmentedStreamWriter, SegmentedStreamWorker
)
try:
import librtmp
HAS_LIBRTMP = True
except ImportError:
HAS_LIBRTMP = False
_url_re = re.compile("""
http(s)?://(www\.)?ustream.tv
(?:
(/embed/|/channel/id/)(?P<channel_id>\d+)
)?
(?:
/recorded/(?P<video_id>\d+)
)?
""", re.VERBOSE)
_channel_id_re = re.compile("\"channelId\":(\d+)")
HLS_PLAYLIST_URL = (
"http://iphone-streaming.ustream.tv"
"/uhls/{0}/streams/live/iphone/playlist.m3u8"
)
RECORDED_URL = "http://tcdn.ustream.tv/video/{0}"
RTMP_URL = "rtmp://r{0}-1-{1}-channel-live.ums.ustream.tv:1935/ustream"
SWF_URL = "http://static-cdn1.ustream.tv/swf/live/viewer.rsl:505.swf"
_module_info_schema = validate.Schema(
list,
validate.length(1),
validate.get(0),
dict
)
_amf3_array = validate.Schema(
validate.any(
validate.all(
{int: object},
validate.transform(lambda a: list(a.values())),
),
list
)
)
_recorded_schema = validate.Schema({
validate.optional("stream"): validate.all(
_amf3_array,
[{
"name": validate.text,
"streams": validate.all(
_amf3_array,
[{
"streamName": validate.text,
"bitrate": float,
}],
),
validate.optional("url"): validate.text,
}]
)
})
_stream_schema = validate.Schema(
validate.any({
"name": validate.text,
"url": validate.text,
"streams": validate.all(
_amf3_array,
[{
"chunkId": validate.any(int, float),
"chunkRange": {validate.text: validate.text},
"chunkTime": validate.any(int, float),
"offset": validate.any(int, float),
"offsetInMs": validate.any(int, float),
"streamName": validate.text,
validate.optional("bitrate"): validate.any(int, float),
validate.optional("height"): validate.any(int, float),
validate.optional("description"): validate.text,
validate.optional("isTranscoded"): bool
}],
)
},
{
"name": validate.text,
"varnishUrl": validate.text
})
)
_channel_schema = validate.Schema({
validate.optional("stream"): validate.any(
validate.all(
_amf3_array,
[_stream_schema],
),
"offline"
)
})
Chunk = namedtuple("Chunk", "num url offset")
if HAS_LIBRTMP:
from io import BytesIO
from time import time
from librtmp.rtmp import RTMPTimeoutError, PACKET_TYPE_INVOKE
from streamlink.packages.flashmedia.types import AMF0Value
def decode_amf(body):
def generator():
fd = BytesIO(body)
while True:
try:
yield AMF0Value.read(fd)
except IOError:
break
return list(generator())
class FlashmediaRTMP(librtmp.RTMP):
"""RTMP connection using python-flashmedia's AMF decoder.
TODO: Move to python-librtmp instead.
"""
def process_packets(self, transaction_id=None, invoked_method=None,
timeout=None):
start = time()
while self.connected and transaction_id not in self._invoke_results:
if timeout and (time() - start) >= timeout:
raise RTMPTimeoutError("Timeout")
packet = self.read_packet()
if packet.type == PACKET_TYPE_INVOKE:
try:
decoded = decode_amf(packet.body)
except IOError:
continue
try:
method, transaction_id_, obj = decoded[:3]
args = decoded[3:]
except ValueError:
continue
if method == "_result":
if len(args) > 0:
result = args[0]
else:
result = None
self._invoke_results[transaction_id_] = result
else:
handler = self._invoke_handlers.get(method)
if handler:
res = handler(*args)
if res is not None:
self.call("_result", res,
transaction_id=transaction_id_)
if method == invoked_method:
self._invoke_args[invoked_method] = args
break
if transaction_id_ == 1.0:
self._connect_result = packet
else:
self.handle_packet(packet)
else:
self.handle_packet(packet)
if transaction_id:
result = self._invoke_results.pop(transaction_id, None)
return result
if invoked_method:
args = self._invoke_args.pop(invoked_method, None)
return args
def create_ums_connection(app, media_id, page_url, password,
exception=PluginError):
url = RTMP_URL.format(randint(0, 0xffffff), media_id)
params = {
"application": app,
"media": str(media_id),
"password": password
}
conn = FlashmediaRTMP(url,
swfurl=SWF_URL,
pageurl=page_url,
connect_data=params)
try:
conn.connect()
except librtmp.RTMPError:
raise exception("Failed to connect to RTMP server")
return conn
class UHSStreamWriter(SegmentedStreamWriter):
def __init__(self, *args, **kwargs):
SegmentedStreamWriter.__init__(self, *args, **kwargs)
self.concater = FLVTagConcat(flatten_timestamps=True,
sync_headers=True)
def fetch(self, chunk, retries=None):
if not retries or self.closed:
return
try:
params = {}
if chunk.offset:
params["start"] = chunk.offset
return http.get(chunk.url,
timeout=self.timeout,
params=params,
exception=StreamError)
except StreamError as err:
self.logger.error("Failed to open chunk {0}: {1}", chunk.num, err)
return self.fetch(chunk, retries - 1)
def write(self, chunk, res, chunk_size=8192):
try:
for data in self.concater.iter_chunks(buf=res.content,
skip_header=not chunk.offset):
self.reader.buffer.write(data)
if self.closed:
break
else:
self.logger.debug("Download of chunk {0} complete", chunk.num)
except IOError as err:
self.logger.error("Failed to read chunk {0}: {1}", chunk.num, err)
class UHSStreamWorker(SegmentedStreamWorker):
def __init__(self, *args, **kwargs):
SegmentedStreamWorker.__init__(self, *args, **kwargs)
self.chunk_ranges = {}
self.chunk_id = None
self.chunk_id_max = None
self.chunks = []
self.filename_format = ""
self.module_info_reload_time = 2
self.process_module_info()
def fetch_module_info(self):
self.logger.debug("Fetching module info")
conn = create_ums_connection("channel",
self.stream.channel_id,
self.stream.page_url,
self.stream.password,
exception=StreamError)
try:
result = conn.process_packets(invoked_method="moduleInfo",
timeout=10)
except (IOError, librtmp.RTMPError) as err:
raise StreamError("Failed to get module info: {0}".format(err))
finally:
conn.close()
result = _module_info_schema.validate(result)
return _channel_schema.validate(result, "module info")
def process_module_info(self):
if self.closed:
return
try:
result = self.fetch_module_info()
except PluginError as err:
self.logger.error("{0}", err)
return
providers = result.get("stream")
if not providers or providers == "offline":
self.logger.debug("Stream went offline")
self.close()
return
for provider in providers:
if provider.get("name") == self.stream.provider:
break
else:
return
try:
stream = provider["streams"][self.stream.stream_index]
except IndexError:
self.logger.error("Stream index not in result")
return
filename_format = stream["streamName"].replace("%", "%s")
filename_format = urljoin(provider["url"], filename_format)
self.filename_format = filename_format
self.update_chunk_info(stream)
def update_chunk_info(self, result):
chunk_range = result["chunkRange"]
if not chunk_range:
return
chunk_id = int(result["chunkId"])
chunk_offset = int(result["offset"])
chunk_range = dict(map(partial(map, int), chunk_range.items()))
self.chunk_ranges.update(chunk_range)
self.chunk_id_min = sorted(chunk_range)[0]
self.chunk_id_max = int(result["chunkId"])
self.chunks = [Chunk(i, self.format_chunk_url(i),
not self.chunk_id and i == chunk_id and chunk_offset)
for i in range(self.chunk_id_min, self.chunk_id_max + 1)]
if self.chunk_id is None and self.chunks:
self.chunk_id = chunk_id
def format_chunk_url(self, chunk_id):
chunk_hash = ""
for chunk_start in sorted(self.chunk_ranges):
if chunk_id >= chunk_start:
chunk_hash = self.chunk_ranges[chunk_start]
return self.filename_format % (chunk_id, chunk_hash)
def valid_chunk(self, chunk):
return self.chunk_id and chunk.num >= self.chunk_id
def iter_segments(self):
while not self.closed:
for chunk in filter(self.valid_chunk, self.chunks):
self.logger.debug("Adding chunk {0} to queue", chunk.num)
yield chunk
# End of stream
if self.closed:
return
self.chunk_id = chunk.num + 1
if self.wait(self.module_info_reload_time):
try:
self.process_module_info()
except StreamError as err:
self.logger.warning("Failed to process module info: {0}", err)
class UHSStreamReader(SegmentedStreamReader):
__worker__ = UHSStreamWorker
__writer__ = UHSStreamWriter
def __init__(self, stream, *args, **kwargs):
self.logger = stream.session.logger.new_module("stream.uhs")
SegmentedStreamReader.__init__(self, stream, *args, **kwargs)
class UHSStream(Stream):
__shortname__ = "uhs"
def __init__(self, session, channel_id, page_url, provider,
stream_index, password=""):
Stream.__init__(self, session)
self.channel_id = channel_id
self.page_url = page_url
self.provider = provider
self.stream_index = stream_index
self.password = password
def __repr__(self):
return "<UHSStream({0!r}, {1!r}, {2!r}, {3!r}, {4!r})>".format(
self.channel_id, self.page_url, self.provider,
self.stream_index, self.password
)
def __json__(self):
json = Stream.__json__(self)
json.update({
"channel_id": self.channel_id,
"page_url": self.page_url,
"provider": self.provider,
"stream_index": self.stream_index,
"password": self.password
})
return json
def open(self):
reader = UHSStreamReader(self)
reader.open()
return reader
class UStreamTV(Plugin):
options = PluginOptions({
"password": ""
})
@classmethod
def can_handle_url(cls, url):
return _url_re.match(url)
@classmethod
def stream_weight(cls, stream):
match = re.match("mobile_(\w+)", stream)
if match:
weight, group = Plugin.stream_weight(match.group(1))
weight -= 1
group = "mobile_ustream"
elif stream == "recorded":
weight, group = 720, "ustream"
else:
weight, group = Plugin.stream_weight(stream)
return weight, group
def _get_channel_id(self):
res = http.get(self.url)
match = _channel_id_re.search(res.text)
if match:
return int(match.group(1))
def _get_hls_streams(self, channel_id, wait_for_transcode=False):
# HLS streams are created on demand, so we may have to wait
# for a transcode to be started.
attempts = wait_for_transcode and 10 or 1
playlist_url = HLS_PLAYLIST_URL.format(channel_id)
streams = {}
while attempts and not streams:
try:
streams = HLSStream.parse_variant_playlist(self.session,
playlist_url,
nameprefix="mobile_")
except IOError:
# Channel is probably offline
break
attempts -= 1
sleep(3)
return streams
def _create_rtmp_stream(self, cdn, stream_name):
parsed = urlparse(cdn)
params = {
"rtmp": cdn,
"app": parsed.path[1:],
"playpath": stream_name,
"pageUrl": self.url,
"swfUrl": SWF_URL,
"live": True
}
return RTMPStream(self.session, params)
def _get_module_info(self, app, media_id, password="", schema=None):
self.logger.debug("Waiting for moduleInfo invoke")
conn = create_ums_connection(app, media_id, self.url, password)
attempts = 3
while conn.connected and attempts:
try:
result = conn.process_packets(invoked_method="moduleInfo",
timeout=10)
except (IOError, librtmp.RTMPError) as err:
raise PluginError("Failed to get stream info: {0}".format(err))
try:
result = _module_info_schema.validate(result)
break
except PluginError:
attempts -= 1
conn.close()
if schema:
result = schema.validate(result)
return result
def _get_desktop_streams(self, channel_id):
password = self.options.get("password")
channel = self._get_module_info("channel", channel_id, password,
schema=_channel_schema)
if not isinstance(channel.get("stream"), list):
raise NoStreamsError(self.url)
streams = {}
for provider in channel["stream"]:
if provider["name"] == u"uhs_akamai": # not heavily tested, but got a stream working
continue
provider_url = provider["url"]
provider_name = provider["name"]
for stream_index, stream_info in enumerate(provider["streams"]):
stream = None
stream_height = int(stream_info.get("height", 0))
stream_name = stream_info.get("description")
if not stream_name:
if stream_height > 0:
if not stream_info.get("isTranscoded"):
stream_name = "{0}p+".format(stream_height)
else:
stream_name = "{0}p".format(stream_height)
else:
stream_name = "live"
if stream_name in streams:
provider_name_clean = provider_name.replace("uhs_", "")
stream_name += "_alt_{0}".format(provider_name_clean)
if provider_name.startswith("uhs_"):
stream = UHSStream(self.session, channel_id,
self.url, provider_name,
stream_index, password)
elif provider_url.startswith("rtmp"):
playpath = stream_info["streamName"]
stream = self._create_rtmp_stream(provider_url,
playpath)
if stream:
streams[stream_name] = stream
return streams
def _get_live_streams(self, channel_id):
has_desktop_streams = False
if HAS_LIBRTMP:
try:
streams = self._get_desktop_streams(channel_id)
# TODO: Replace with "yield from" when dropping Python 2.
for stream in streams.items():
has_desktop_streams = True
yield stream
except PluginError as err:
self.logger.error("Unable to fetch desktop streams: {0}", err)
except NoStreamsError:
pass
else:
self.logger.warning(
"python-librtmp is not installed, but is needed to access "
"the desktop streams"
)
try:
streams = self._get_hls_streams(channel_id,
wait_for_transcode=not has_desktop_streams)
# TODO: Replace with "yield from" when dropping Python 2.
for stream in streams.items():
yield stream
except PluginError as err:
self.logger.error("Unable to fetch mobile streams: {0}", err)
except NoStreamsError:
pass
def _get_recorded_streams(self, video_id):
if HAS_LIBRTMP:
recording = self._get_module_info("recorded", video_id,
schema=_recorded_schema)
if not isinstance(recording.get("stream"), list):
return
for provider in recording["stream"]:
base_url = provider.get("url")
for stream_info in provider["streams"]:
bitrate = int(stream_info.get("bitrate", 0))
stream_name = (bitrate > 0 and "{0}k".format(bitrate) or
"recorded")
url = stream_info["streamName"]
if base_url:
url = base_url + url
if url.startswith("http"):
yield stream_name, HTTPStream(self.session, url)
elif url.startswith("rtmp"):
params = dict(rtmp=url, pageUrl=self.url)
yield stream_name, RTMPStream(self.session, params)
else:
self.logger.warning(
"The proper API could not be used without python-librtmp "
"installed. Stream URL is not guaranteed to be valid"
)
url = RECORDED_URL.format(video_id)
random_hash = "{0:02x}{1:02x}".format(randint(0, 255),
randint(0, 255))
params = dict(hash=random_hash)
stream = HTTPStream(self.session, url, params=params)
yield "recorded", stream
def _get_streams(self):
match = _url_re.match(self.url)
video_id = match.group("video_id")
if video_id:
return self._get_recorded_streams(video_id)
channel_id = match.group("channel_id") or self._get_channel_id()
if channel_id:
return self._get_live_streams(channel_id)
__plugin__ = UStreamTV
| bsd-2-clause |
khara914/cf-phpbuildpack | lib/build_pack_utils/downloads.py | 15 | 4096 | import os
import urllib2
import re
import logging
from subprocess import Popen
from subprocess import PIPE
class Downloader(object):
def __init__(self, config):
self._ctx = config
self._log = logging.getLogger('downloads')
self._init_proxy()
def _init_proxy(self):
handlers = {}
for key in self._ctx.keys():
if key.lower().endswith('_proxy'):
handlers[key.split('_')[0]] = self._ctx[key]
self._log.debug('Loaded proxy handlers [%s]', handlers)
openers = []
if handlers:
openers.append(urllib2.ProxyHandler(handlers))
for handler in handlers.values():
if '@' in handler:
openers.append(urllib2.ProxyBasicAuthHandler())
opener = urllib2.build_opener(*openers)
urllib2.install_opener(opener)
def download(self, url, toFile):
path_to_download_executable = os.path.join(
self._ctx['BP_DIR'],
'compile-extensions',
'bin',
'download_dependency')
command_arguments = [
path_to_download_executable,
url,
toFile]
process = Popen(command_arguments, stdout=PIPE)
exit_code = process.wait()
translated_uri = process.stdout.read().rstrip()
if exit_code == 0:
print "Downloaded [%s] to [%s]" % (translated_uri, toFile)
elif exit_code == 1:
raise RuntimeError("Could not download dependency: %s" % url)
elif exit_code == 3:
raise RuntimeError("MD5 of downloaded dependency does not match expected value")
def custom_extension_download(self, url, toFile):
res = urllib2.urlopen(url)
with open(toFile, 'w') as f:
f.write(res.read())
print 'Downloaded [%s] to [%s]' % (url, toFile)
self._log.info('Downloaded [%s] to [%s]', url, toFile)
def download_direct(self, url):
buf = urllib2.urlopen(url).read()
self._log.info('Downloaded [%s] to memory', url)
self._log.debug("Downloaded [%s] [%s]", url, buf)
return buf
class CurlDownloader(object):
def __init__(self, config):
self._ctx = config
self._status_pattern = re.compile(r'^(.*)<!-- Status: (\d+) -->$',
re.DOTALL)
self._log = logging.getLogger('downloads')
def download(self, url, toFile):
cmd = ["curl", "-s",
"-o", toFile,
"-w", '%{http_code}']
for key in self._ctx.keys():
if key.lower().endswith('_proxy'):
cmd.extend(['-x', self._ctx[key]])
cmd.append(url)
self._log.debug("Running [%s]", cmd)
proc = Popen(cmd, stdout=PIPE)
output, unused_err = proc.communicate()
proc.poll()
self._log.debug("Curl returned [%s]", output)
if output and \
(output.startswith('4') or
output.startswith('5')):
raise RuntimeError("curl says [%s]" % output)
print 'Downloaded [%s] to [%s]' % (url, toFile)
self._log.info('Downloaded [%s] to [%s]', url, toFile)
def download_direct(self, url):
cmd = ["curl", "-s",
"-w", '<!-- Status: %{http_code} -->']
for key in self._ctx.keys():
if key.lower().endswith('_proxy'):
cmd.extend(['-x', self._ctx[key]])
cmd.append(url)
self._log.debug("Running [%s]", cmd)
proc = Popen(cmd, stdout=PIPE)
output, unused_err = proc.communicate()
proc.poll()
m = self._status_pattern.match(output)
if m:
resp = m.group(1)
code = m.group(2)
self._log.debug("Curl returned [%s]", code)
if (code.startswith('4') or code.startswith('5')):
raise RuntimeError("curl says [%s]" % output)
self._log.info('Downloaded [%s] to memory', url)
self._log.debug('Downloaded [%s] [%s]', url, resp)
return resp
| apache-2.0 |
SelenaProject/selena | app/core/modules/weather/weather.py | 1 | 5028 | # !/usr/bin/env python3
import threading
import time
import urllib.request
import json
from .. import modulebase
weather_check_interval = 60 # check every minute
city = 'Kanata,ON'
cur_weather_url = ('http://api.openweathermap.org/data/2.5/weather?q=%s&units=metric') % (city)
forecast_url = ('http://api.openweathermap.org/data/2.5/forecast?q=%s&units=metric') % (city)
class weather(modulebase.ModuleBase):
data = None
encode = lambda x : json.dumps(x).encode('utf-8')
def __init__(self) :
weather.data = WeatherData()
def deinit(self) :
pass
def GET_temperature(self):
data = {
'temp' : weather.data.cur_temp()
}
return weather.encode(data)
def GET_current(self) :
wd = weather.data
data = {
'city' : city,
'temp' : wd.cur_temp(),
'weather' : wd.cur_weather(),
'humidity' : wd.cur_humidity(),
'clouds' : wd.cur_clouds(),
'timestamp' : wd.timestamp()
}
return weather.encode(data)
def GET_forecast(self) :
data = weather.data.forecast()
return weather.encode(data)
def POST_test(self) :
return "Good!"
class WeatherData :
def __init__(self) :
self.__cur_temp = -1
self.__humidity = -1
self.__clouds = -1
self.__cur_weather = {}
self.__forecast = []
self.__timestamp = 0
self.__lock = threading.Lock()
self.__start_checker()
'''
Public getters
'''
def cur_temp(self) :
with self.__lock :
return self.__cur_temp
def cur_weather(self) :
with self.__lock :
return self.__cur_weather
def cur_humidity(self) :
with self.__lock :
return self.__humidity
def cur_clouds(self) :
with self.__lock :
return self.__clouds
def forecast(self) :
with self.__lock :
return self.__forecast
def timestamp(self) :
with self.__lock :
return self.__timestamp
'''
Private setters
'''
def __set_cur_temp(self, temp) :
with self.__lock :
self.__cur_temp = temp
def __set_cur_weather(self, weather_id, weather_descr) :
with self.__lock :
self.__cur_weather['id'] = weather_id
self.__cur_weather['descr'] = weather_descr
def __set_cur_humidity(self, hum) :
with self.__lock :
self.__humidity = hum
def __set_cur_clouds(self, clouds) :
with self.__lock :
self.__clouds = clouds
def __set_forecast(self, forecast) :
with self.__lock :
self.__forecast = forecast
def __set_timestamp(self, timestamp) :
with self.__lock :
self.__timestamp = timestamp
'''
Threading
'''
def __start_checker(self) :
print('Starting weather checker...')
self.__checker = threading.Thread(target=self.__check_weather)
self.__checker.daemon = True
self.__checker.start()
def __check_weather(self) :
while True :
print('Checking weather...')
response = urllib.request.urlopen( urllib.request.Request(url=cur_weather_url) )
json_obj = json.loads(response.read().decode('utf-8'))
print (str(json_obj))
self.__set_timestamp(int(time.time()))
main = json_obj.get('main', {})
temp = main.get('temp', -1)
hum = main.get('humidity', -1)
self.__set_cur_temp(temp)
self.__set_cur_humidity(hum)
weather = json_obj.get('weather', [])
if len(weather) > 0 :
wthr_id = weather[0].get('id', 0)
wthr_descr = weather[0].get('main', '')
self.__set_cur_weather(wthr_id, wthr_descr)
clouds = json_obj.get('clouds', {}).get('all', -1)
self.__set_cur_clouds(clouds)
# get forecast
response = urllib.request.urlopen( urllib.request.Request(url=forecast_url) )
json_obj = json.loads(response.read().decode('utf-8'))
# extract data
data_list = json_obj.get('list', [])
fc_data = []
for list_item in data_list[:8] :
fc_item = {}
fc_item['timestamp'] = list_item.get('dt', 0)
fc_main = list_item.get('main', {})
fc_item['temp'] = fc_main.get('temp', -1)
fc_item['humidity'] = fc_main.get('humidity', -1)
fc_weather = list_item.get('weather', [])
fc_item['weather'] = {
'id' : fc_weather[0].get('id', 0),
'descr' : fc_weather[0].get('main', '')
} if len(fc_weather) > 0 else { 'id' : 0, 'descr': '' }
fc_data.append(fc_item)
self.__set_forecast(fc_data)
time.sleep(weather_check_interval)
| apache-2.0 |
sunsrising/xnhb | contrib/devtools/fix-copyright-headers.py | 80 | 1348 | #!/usr/bin/env python
'''
Run this script to update all the copyright headers of files
that were changed this year.
For example:
// Copyright (c) 2009-2012 The Bitcoin Core developers
it will change it to
// Copyright (c) 2009-2015 The Bitcoin Core developers
'''
import os
import time
import re
year = time.gmtime()[0]
CMD_GIT_DATE = 'git log --format=@%%at -1 %s | date +"%%Y" -u -f -'
CMD_REGEX= "perl -pi -e 's/(20\d\d)(?:-20\d\d)? The Bitcoin/$1-%s The Bitcoin/' %s"
REGEX_CURRENT= re.compile("%s The Bitcoin" % year)
CMD_LIST_FILES= "find %s | grep %s"
FOLDERS = ["./qa", "./src"]
EXTENSIONS = [".cpp",".h", ".py"]
def get_git_date(file_path):
r = os.popen(CMD_GIT_DATE % file_path)
for l in r:
# Result is one line, so just return
return l.replace("\n","")
return ""
n=1
for folder in FOLDERS:
for extension in EXTENSIONS:
for file_path in os.popen(CMD_LIST_FILES % (folder, extension)):
file_path = os.getcwd() + file_path[1:-1]
if file_path.endswith(extension):
git_date = get_git_date(file_path)
if str(year) == git_date:
# Only update if current year is not found
if REGEX_CURRENT.search(open(file_path, "r").read()) is None:
print n,"Last git edit", git_date, "-", file_path
os.popen(CMD_REGEX % (year,file_path))
n = n + 1
| mit |
malexzx/grpc | examples/python/multiplex/helloworld_pb2.py | 29 | 6763 | # Generated by the protocol buffer compiler. DO NOT EDIT!
# source: helloworld.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
from google.protobuf import descriptor_pb2
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
DESCRIPTOR = _descriptor.FileDescriptor(
name='helloworld.proto',
package='helloworld',
syntax='proto3',
serialized_pb=_b('\n\x10helloworld.proto\x12\nhelloworld\"\x1c\n\x0cHelloRequest\x12\x0c\n\x04name\x18\x01 \x01(\t\"\x1d\n\nHelloReply\x12\x0f\n\x07message\x18\x01 \x01(\t2I\n\x07Greeter\x12>\n\x08SayHello\x12\x18.helloworld.HelloRequest\x1a\x16.helloworld.HelloReply\"\x00\x42\x36\n\x1bio.grpc.examples.helloworldB\x0fHelloWorldProtoP\x01\xa2\x02\x03HLWb\x06proto3')
)
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
_HELLOREQUEST = _descriptor.Descriptor(
name='HelloRequest',
full_name='helloworld.HelloRequest',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='name', full_name='helloworld.HelloRequest.name', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=32,
serialized_end=60,
)
_HELLOREPLY = _descriptor.Descriptor(
name='HelloReply',
full_name='helloworld.HelloReply',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='message', full_name='helloworld.HelloReply.message', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=_b("").decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
options=None),
],
extensions=[
],
nested_types=[],
enum_types=[
],
options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=62,
serialized_end=91,
)
DESCRIPTOR.message_types_by_name['HelloRequest'] = _HELLOREQUEST
DESCRIPTOR.message_types_by_name['HelloReply'] = _HELLOREPLY
HelloRequest = _reflection.GeneratedProtocolMessageType('HelloRequest', (_message.Message,), dict(
DESCRIPTOR = _HELLOREQUEST,
__module__ = 'helloworld_pb2'
# @@protoc_insertion_point(class_scope:helloworld.HelloRequest)
))
_sym_db.RegisterMessage(HelloRequest)
HelloReply = _reflection.GeneratedProtocolMessageType('HelloReply', (_message.Message,), dict(
DESCRIPTOR = _HELLOREPLY,
__module__ = 'helloworld_pb2'
# @@protoc_insertion_point(class_scope:helloworld.HelloReply)
))
_sym_db.RegisterMessage(HelloReply)
DESCRIPTOR.has_options = True
DESCRIPTOR._options = _descriptor._ParseOptions(descriptor_pb2.FileOptions(), _b('\n\033io.grpc.examples.helloworldB\017HelloWorldProtoP\001\242\002\003HLW'))
import grpc
from grpc.beta import implementations as beta_implementations
from grpc.beta import interfaces as beta_interfaces
from grpc.framework.common import cardinality
from grpc.framework.interfaces.face import utilities as face_utilities
class GreeterStub(object):
"""The greeting service definition.
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SayHello = channel.unary_unary(
'/helloworld.Greeter/SayHello',
request_serializer=HelloRequest.SerializeToString,
response_deserializer=HelloReply.FromString,
)
class GreeterServicer(object):
"""The greeting service definition.
"""
def SayHello(self, request, context):
"""Sends a greeting
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_GreeterServicer_to_server(servicer, server):
rpc_method_handlers = {
'SayHello': grpc.unary_unary_rpc_method_handler(
servicer.SayHello,
request_deserializer=HelloRequest.FromString,
response_serializer=HelloReply.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'helloworld.Greeter', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
class BetaGreeterServicer(object):
"""The greeting service definition.
"""
def SayHello(self, request, context):
"""Sends a greeting
"""
context.code(beta_interfaces.StatusCode.UNIMPLEMENTED)
class BetaGreeterStub(object):
"""The greeting service definition.
"""
def SayHello(self, request, timeout, metadata=None, with_call=False, protocol_options=None):
"""Sends a greeting
"""
raise NotImplementedError()
SayHello.future = None
def beta_create_Greeter_server(servicer, pool=None, pool_size=None, default_timeout=None, maximum_timeout=None):
request_deserializers = {
('helloworld.Greeter', 'SayHello'): HelloRequest.FromString,
}
response_serializers = {
('helloworld.Greeter', 'SayHello'): HelloReply.SerializeToString,
}
method_implementations = {
('helloworld.Greeter', 'SayHello'): face_utilities.unary_unary_inline(servicer.SayHello),
}
server_options = beta_implementations.server_options(request_deserializers=request_deserializers, response_serializers=response_serializers, thread_pool=pool, thread_pool_size=pool_size, default_timeout=default_timeout, maximum_timeout=maximum_timeout)
return beta_implementations.server(method_implementations, options=server_options)
def beta_create_Greeter_stub(channel, host=None, metadata_transformer=None, pool=None, pool_size=None):
request_serializers = {
('helloworld.Greeter', 'SayHello'): HelloRequest.SerializeToString,
}
response_deserializers = {
('helloworld.Greeter', 'SayHello'): HelloReply.FromString,
}
cardinalities = {
'SayHello': cardinality.Cardinality.UNARY_UNARY,
}
stub_options = beta_implementations.stub_options(host=host, metadata_transformer=metadata_transformer, request_serializers=request_serializers, response_deserializers=response_deserializers, thread_pool=pool, thread_pool_size=pool_size)
return beta_implementations.dynamic_stub(channel, 'helloworld.Greeter', cardinalities, options=stub_options)
# @@protoc_insertion_point(module_scope)
| bsd-3-clause |
sve-odoo/odoo | addons/website_sale/models/sale_order.py | 26 | 10438 | # -*- coding: utf-8 -*-
import random
from openerp import SUPERUSER_ID
from openerp.osv import osv, orm, fields
from openerp.addons.web.http import request
class payment_transaction(orm.Model):
_inherit = 'payment.transaction'
_columns = {
# link with the sale order
'sale_order_id': fields.many2one('sale.order', 'Sale Order'),
}
class sale_order(osv.Model):
_inherit = "sale.order"
def _cart_qty(self, cr, uid, ids, field_name, arg, context=None):
res = dict()
for order in self.browse(cr, uid, ids, context=context):
res[order.id] = int(sum(l.product_uom_qty for l in (order.website_order_line or [])))
return res
_columns = {
'website_order_line': fields.one2many(
'sale.order.line', 'order_id',
string='Order Lines displayed on Website', readonly=True,
help='Order Lines to be displayed on the website. They should not be used for computation purpose.',
),
'cart_quantity': fields.function(_cart_qty, type='integer', string='Cart Quantity'),
'payment_acquirer_id': fields.many2one('payment.acquirer', 'Payment Acquirer', on_delete='set null'),
'payment_tx_id': fields.many2one('payment.transaction', 'Transaction', on_delete='set null'),
}
def _get_errors(self, cr, uid, order, context=None):
return []
def _get_website_data(self, cr, uid, order, context):
return {
'partner': order.partner_id.id,
'order': order
}
def _cart_find_product_line(self, cr, uid, ids, product_id=None, line_id=None, context=None, **kwargs):
for so in self.browse(cr, uid, ids, context=context):
domain = [('order_id', '=', so.id), ('product_id', '=', product_id)]
if line_id:
domain += [('id', '=', line_id)]
return self.pool.get('sale.order.line').search(cr, SUPERUSER_ID, domain, context=context)
def _website_product_id_change(self, cr, uid, ids, order_id, product_id, line_id=None, context=None):
so = self.pool.get('sale.order').browse(cr, uid, order_id, context=context)
values = self.pool.get('sale.order.line').product_id_change(cr, SUPERUSER_ID, [],
pricelist=so.pricelist_id.id,
product=product_id,
partner_id=so.partner_id.id,
context=context
)['value']
if line_id:
line = self.pool.get('sale.order.line').browse(cr, SUPERUSER_ID, line_id, context=context)
values['name'] = line.name
else:
product = self.pool.get('product.product').browse(cr, uid, product_id, context=context)
values['name'] = product.description_sale or product.name
values['product_id'] = product_id
values['order_id'] = order_id
if values.get('tax_id') != None:
values['tax_id'] = [(6, 0, values['tax_id'])]
return values
def _cart_update(self, cr, uid, ids, product_id=None, line_id=None, add_qty=0, set_qty=0, context=None, **kwargs):
""" Add or set product quantity, add_qty can be negative """
sol = self.pool.get('sale.order.line')
quantity = 0
for so in self.browse(cr, uid, ids, context=context):
if line_id != False:
line_ids = so._cart_find_product_line(product_id, line_id, context=context, **kwargs)
if line_ids:
line_id = line_ids[0]
# Create line if no line with product_id can be located
if not line_id:
values = self._website_product_id_change(cr, uid, ids, so.id, product_id, context=context)
line_id = sol.create(cr, SUPERUSER_ID, values, context=context)
if add_qty:
add_qty -= 1
# compute new quantity
if set_qty:
quantity = set_qty
elif add_qty != None:
quantity = sol.browse(cr, SUPERUSER_ID, line_id, context=context).product_uom_qty + (add_qty or 0)
# Remove zero of negative lines
if quantity <= 0:
sol.unlink(cr, SUPERUSER_ID, [line_id], context=context)
else:
# update line
values = self._website_product_id_change(cr, uid, ids, so.id, product_id, line_id, context=context)
values['product_uom_qty'] = quantity
sol.write(cr, SUPERUSER_ID, [line_id], values, context=context)
return {'line_id': line_id, 'quantity': quantity}
def _cart_accessories(self, cr, uid, ids, context=None):
for order in self.browse(cr, uid, ids, context=context):
s = set(j.id for l in (order.website_order_line or []) for j in (l.product_id.accessory_product_ids or []))
s -= set(l.product_id.id for l in order.order_line)
product_ids = random.sample(s, min(len(s),3))
return self.pool['product.product'].browse(cr, uid, product_ids, context=context)
class website(orm.Model):
_inherit = 'website'
_columns = {
'pricelist_id': fields.related('user_id','partner_id','property_product_pricelist',
type='many2one', relation='product.pricelist', string='Default Pricelist'),
'currency_id': fields.related('pricelist_id','currency_id',
type='many2one', relation='res.currency', string='Default Currency'),
}
def sale_product_domain(self, cr, uid, ids, context=None):
return [("sale_ok", "=", True)]
def sale_get_order(self, cr, uid, ids, force_create=False, code=None, update_pricelist=None, context=None):
sale_order_obj = self.pool['sale.order']
sale_order_id = request.session.get('sale_order_id')
sale_order = None
# create so if needed
if not sale_order_id and (force_create or code):
# TODO cache partner_id session
partner = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).partner_id
for w in self.browse(cr, uid, ids):
values = {
'user_id': w.user_id.id,
'partner_id': partner.id,
'pricelist_id': partner.property_product_pricelist.id,
'section_id': self.pool.get('ir.model.data').get_object_reference(cr, uid, 'website', 'salesteam_website_sales')[1],
}
sale_order_id = sale_order_obj.create(cr, SUPERUSER_ID, values, context=context)
values = sale_order_obj.onchange_partner_id(cr, SUPERUSER_ID, [], partner.id, context=context)['value']
sale_order_obj.write(cr, SUPERUSER_ID, [sale_order_id], values, context=context)
request.session['sale_order_id'] = sale_order_id
if sale_order_id:
# TODO cache partner_id session
partner = self.pool['res.users'].browse(cr, SUPERUSER_ID, uid, context=context).partner_id
sale_order = sale_order_obj.browse(cr, SUPERUSER_ID, sale_order_id, context=context)
if not sale_order.exists():
request.session['sale_order_id'] = None
return None
# check for change of pricelist with a coupon
if code and code != sale_order.pricelist_id.code:
pricelist_ids = self.pool['product.pricelist'].search(cr, SUPERUSER_ID, [('code', '=', code)], context=context)
if pricelist_ids:
pricelist_id = pricelist_ids[0]
request.session['sale_order_code_pricelist_id'] = pricelist_id
update_pricelist = True
request.session['sale_order_code_pricelist_id'] = False
pricelist_id = request.session.get('sale_order_code_pricelist_id') or partner.property_product_pricelist.id
# check for change of partner_id ie after signup
if sale_order.partner_id.id != partner.id and request.website.partner_id.id != partner.id:
flag_pricelist = False
if pricelist_id != sale_order.pricelist_id.id:
flag_pricelist = True
fiscal_position = sale_order.fiscal_position and sale_order.fiscal_position.id or False
values = sale_order_obj.onchange_partner_id(cr, SUPERUSER_ID, [sale_order_id], partner.id, context=context)['value']
if values.get('fiscal_position'):
order_lines = map(int,sale_order.order_line)
values.update(sale_order_obj.onchange_fiscal_position(cr, SUPERUSER_ID, [],
values['fiscal_position'], [[6, 0, order_lines]], context=context)['value'])
values['partner_id'] = partner.id
sale_order_obj.write(cr, SUPERUSER_ID, [sale_order_id], values, context=context)
if flag_pricelist or values.get('fiscal_position') != fiscal_position:
update_pricelist = True
# update the pricelist
if update_pricelist:
values = {'pricelist_id': pricelist_id}
values.update(sale_order.onchange_pricelist_id(pricelist_id, None)['value'])
sale_order.write(values)
for line in sale_order.order_line:
sale_order._cart_update(product_id=line.product_id.id, add_qty=0)
# update browse record
if (code and code != sale_order.pricelist_id.code) or sale_order.partner_id.id != partner.id:
sale_order = sale_order_obj.browse(cr, SUPERUSER_ID, sale_order.id, context=context)
return sale_order
def sale_get_transaction(self, cr, uid, ids, context=None):
transaction_obj = self.pool.get('payment.transaction')
tx_id = request.session.get('sale_transaction_id')
if tx_id:
tx_ids = transaction_obj.search(cr, uid, [('id', '=', tx_id), ('state', 'not in', ['cancel'])], context=context)
if tx_ids:
return transaction_obj.browse(cr, uid, tx_ids[0], context=context)
else:
request.session['sale_transaction_id'] = False
return False
def sale_reset(self, cr, uid, ids, context=None):
request.session.update({
'sale_order_id': False,
'sale_transaction_id': False,
'sale_order_code_pricelist_id': False,
})
| agpl-3.0 |
saurabh6790/omnitech-libs | core/doctype/custom_script/custom_script.py | 34 | 1208 | # Copyright (c) 2013, Web Notes Technologies Pvt. Ltd. and Contributors
# MIT License. See license.txt
from __future__ import unicode_literals
import webnotes
from webnotes.utils import cstr
class DocType:
def __init__(self, d, dl):
self.doc, self.doclist = d, dl
def autoname(self):
self.doc.name = self.doc.dt + "-" + self.doc.script_type
def on_update(self):
webnotes.clear_cache(doctype=self.doc.dt)
def on_trash(self):
webnotes.clear_cache(doctype=self.doc.dt)
def make_custom_server_script_file(doctype, script=None):
import os
from webnotes.plugins import get_path
file_path = get_path(None, "DocType", doctype)
if os.path.exists(file_path):
raise IOError(file_path + " already exists")
# create folder if not exists
webnotes.create_folder(os.path.dirname(file_path))
# create file
custom_script = """from __future__ import unicode_literals
import webnotes
from webnotes.utils import cint, cstr, flt
from webnotes.model.doc import Document
from webnotes.model.code import get_obj
from webnotes import msgprint, _
class CustomDocType(DocType):
{script}""".format(script=script or "\tpass")
with open(file_path, "w") as f:
f.write(custom_script.encode("utf-8")) | mit |
BenSto/pybikes | pybikes/bysykkel.py | 4 | 1909 | # -*- coding: utf-8 -*-
import json
from .base import BikeShareSystem, BikeShareStation
from . import utils
class BySykkel(BikeShareSystem):
authed = True
meta = {
'system': 'BySykkel',
'company': ['Urban Infrastructure Partner']
}
def __init__(self, tag, meta, feed_url, feed_details_url, key):
super(BySykkel, self).__init__(tag, meta)
self.feed_url = feed_url
self.feed_details_url = feed_details_url
self.key = key
def update(self, scraper=None):
if scraper is None:
scraper = utils.PyBikesScraper()
scraper.headers['Client-Identifier'] = self.key
self.stations = []
stations_data = json.loads(scraper.request(self.feed_url))
details_data = json.loads(scraper.request(self.feed_details_url))
# Aggregate status and information by uid
stations_data = {s['id']: s for s in stations_data['stations']}
details_data = {s['id']: s for s in details_data['stations']}
# Join stationsdata in stations
stations = [
(stations_data[id], details_data[id])
for id in stations_data.keys()
]
# append all data to info part of stations and create objects of this
for info, status in stations:
info.update(status)
station = BySykkelStation(info)
self.stations.append(station)
class BySykkelStation(BikeShareStation):
def __init__(self, info):
super(BySykkelStation, self).__init__()
self.name = info['title']
self.longitude = float(info['center']['longitude'])
self.latitude = float(info['center']['latitude'])
self.bikes = info['availability']['bikes']
self.free = info['availability']['locks']
self.extra = {
'uid': info['id'],
'placement': info['subtitle'],
}
| lgpl-3.0 |
yourcelf/btb | scanblog/profiles/models.py | 1 | 18215 | import os
import datetime
import itertools
import string
from django.db import models
from django.db.models import Q
from django.contrib.auth.models import User, Group
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.core.urlresolvers import reverse
from django.utils.translation import ugettext_lazy as _
from django.template.defaultfilters import slugify
from django.conf import settings
from scanning.models import Document
from comments.models import Comment
from btb.utils import OrgManager, OrgQuerySet
class ProfileManager(OrgManager):
"""
For statuses based on letters (e.g. invited, waitlisted, etc.), any letter,
whether sent or not, considers the status fulfilled. That is, one is
"invited" if an Letter(type='invited') has been created for the person,
whether or not it was sent. Creating a Letter is a contract to send it.
This differs from the v1 implementation.
"""
def active(self):
""" Everyone that hasn't been removed. """
return self.filter(user__is_active=True)
def inactive(self):
""" They have been removed for whatever reason. """
return self.filter(user__is_active=False)
def inactive_commenters(self):
return self.filter(user__is_active=False, blogger=False)
def inactive_bloggers(self):
return self.filter(user__is_active=False, blogger=True)
def active_and_inactive_commenters(self):
return self.filter(blogger=False)
def commenters(self):
""" They are not in prison. """
return self.active().filter(blogger=False)
def bloggers(self):
""" They are in prison. """
return self.active().filter(blogger=True)
def bloggers_with_posts(self):
return self.bloggers().select_related('user').filter(
user__documents_authored__status="published",
user__documents_authored__type="post",
).annotate(
authored_posts_count=models.Count('user__documents_authored'),
latest_post=models.Max(
'user__documents_authored__date_written'
),
).order_by('display_name')
def bloggers_with_profiles(self):
return self.bloggers().select_related('user').filter(
user__documents_authored__status="published",
user__documents_authored__type="profile",
).annotate(
authored_posts_count=models.Count('user__documents_authored'),
latest_post=models.Max(
'user__documents_authored__date_written'
),
).order_by('display_name')
def bloggers_with_just_profiles(self):
return self.bloggers().select_related('user').filter(
user__documents_authored__status="published",
user__documents_authored__type="profile",
).exclude(
user__documents_authored__type="post",
user__documents_authored__status="published",
).order_by('display_name')
def bloggers_with_published_content(self):
return self.bloggers().select_related('user').filter(
Q(user__documents_authored__status="published",
user__documents_authored__type="profile") |
Q(user__documents_authored__status="published",
user__documents_authored__type="post")
).distinct().order_by('display_name')
def enrolled(self):
""" They have returned a consent form. """
return self.bloggers().filter(consent_form_received=True)
def enrolled_in_contact(self):
""" They have returned a consent form, and we haven't lost contact. """
return self.enrolled().filter(lost_contact=False)
#
# Letter-based statuses
#
def invitable(self):
"""
No invitation letter has been created for them.
"""
return self.bloggers().filter(
consent_form_received=False
).exclude(
user__received_letters__type="consent_form"
)
def invited(self):
"""
An invitation letter has been created, but not returned.
"""
return self.bloggers().filter(
consent_form_received=False
).filter(
user__received_letters__type="consent_form"
)
def waitlistable(self):
"""
They have not been sent a consent form or waitlist postcard, and we
haven't received a consent form.
"""
return self.bloggers().filter(
consent_form_received=False,
).exclude(
user__received_letters__type="waitlist",
).exclude(
user__received_letters__type="consent_form",
)
def waitlisted(self):
"""
No invitation letter has been created, and a waitlist postcard has been
created.
"""
return self.bloggers().filter(
consent_form_received=False
).filter(
user__received_letters__type="waitlist"
).exclude(
user__received_letters__type="consent_form"
)
def needs_signup_complete_letter(self):
return self.enrolled().exclude(user__received_letters__type="signup_complete")
def needs_first_post_letter(self):
return (
self.enrolled().filter(user__documents_authored__status="published")
).exclude(user__received_letters__type="first_post")
def needs_comments_letter(self):
# Couldn't figure out how to make this a flat ORM query. Using two
# queries and custom SQL instead.
pks = Comment.objects.unmailed().values_list('document__author__pk', flat=True)
if pks:
where = '"{0}"."{1}" in ({2})'.format(
Profile._meta.db_table,
Profile.user.field.get_attname_column()[0],
",".join("%s" for i in pks),
)
return self.enrolled().extra(
where=[where],
params=pks
)
return self.none()
def recently_active(self, days=2*365):
"""
All bloggers with whom we haven't lost contact, are enrolled or have
been invited, and have sent us something within the last N days.
"""
cutoff = datetime.datetime.now() - datetime.timedelta(days=days)
return self.bloggers().filter(
lost_contact=False
).filter(
Q(consent_form_received=True) |
Q(user__received_letters__type="consent_form")
).filter(
user__documents_authored__created__gte=cutoff
).distinct()
class Profile(models.Model):
user = models.OneToOneField(settings.AUTH_USER_MODEL, primary_key=True)
display_name = models.CharField(max_length=50)
show_adult_content = models.BooleanField(
default=False,
help_text=_('Show posts and comments that have been marked as adult?')
)
blogger = models.BooleanField(default=False)
managed = models.BooleanField(default=False)
lost_contact = models.BooleanField(default=False)
blog_name = models.CharField(blank=True, default="", max_length=255)
comments_disabled = models.BooleanField(default=False)
mailing_address = models.TextField(blank=True, default="")
special_mail_handling = models.TextField(blank=True, default="")
consent_form_received = models.BooleanField(default=False)
objects = ProfileManager()
class QuerySet(OrgQuerySet):
orgs = ["user__organization"]
def light_dict(self):
return {
'id': self.pk,
'username': self.user.username,
'email': self.user.email,
'is_active': self.user.is_active,
'date_joined': self.user.date_joined.isoformat(),
'blogger': self.blogger,
'managed': self.managed,
'lost_contact': self.lost_contact,
'comments_disabled': self.comments_disabled,
'blog_name': self.blog_name,
'display_name': self.display_name,
'mailing_address': self.mailing_address,
'special_mail_handling': self.special_mail_handling,
'consent_form_received': self.consent_form_received,
'blog_url': self.get_blog_url(),
'profile_url': self.get_absolute_url(),
'edit_url': self.get_edit_url(),
'is_public': self.is_public(),
}
def to_dict(self):
scans_authored = getattr(self, "user__scans_authored", None)
dct = self.light_dict()
dct.update({
u'organizations': [o.light_dict() for o in self.user.organization_set.all()],
u'invited': Profile.objects.invited().filter(pk=self.pk).exists(),
u'waitlisted': Profile.objects.waitlisted().filter(pk=self.pk).exists(),
u'waitlistable': Profile.objects.waitlistable().filter(pk=self.pk).exists(),
u'scans_authored': scans_authored,
u'has_public_profile': self.has_public_profile(),
})
return dct
def save(self, *args, **kwargs):
if not self.display_name:
self.display_name = self.user.username
super(Profile, self).save(*args, **kwargs)
# Since profile status (active/license) can impact publicness of
# documents, we need to bump the documents if we save profiles.
for doc in self.user.documents_authored.all():
doc.set_publicness()
def __unicode__(self):
return self.display_name
def get_absolute_url(self):
return reverse('profiles.profile_show', args=[self.pk])
def get_user_edit_url(self):
return reverse('profiles.profile_edit', args=[self.pk])
def get_edit_url(self):
return "%s#/users/%s" % (reverse('moderation.home'), self.pk)
def get_blog_url(self):
return reverse('blogs.blog_show', args=[self.pk, self.get_blog_slug()])
def get_bare_blog_url(self):
return reverse('blogs.blog_show', args=[self.pk, ""])
def get_blog_slug(self):
return slugify(self.display_name)
def full_address(self):
return "\n".join((
self.display_name,
self.mailing_address
))
def is_public(self):
return self.user.is_active and ((not self.blogger) or self.consent_form_received)
def has_public_profile(self):
return Document.objects.filter(author__pk=self.pk, type="profile",
status="published").exists()
def has_blog_posts(self):
return Document.objects.filter(author__pk=self.pk, type="post",
status="published").exists()
def set_random_password(self):
"""
Set a random password on our associated user object. Does not save the user.
"""
chars = set(string.ascii_uppercase + string.digits)
char_gen = (c for c in itertools.imap(os.urandom, itertools.repeat(1)) if c in chars)
self.user.set_password(''.join(itertools.islice(char_gen, None, 32)))
def all_published_posts_as_latex_list(self):
from correspondence.utils import tex_escape
posts = self.user.documents_authored.public().order_by('date_written')
parts = [ur'\begin{itemize}']
for post in posts:
if post.in_reply_to:
try:
orig = posts.get(reply_code=post.in_reply_to)
except Document.DoesNotExist:
title = post.get_title()
else:
title = u'{} (in reply to {})'.format(
post.get_title(),
orig.get_title()
)
else:
title = post.get_title()
parts.append(ur' \item %s (\emph{%s})' % (
tex_escape(title),
post.date_written.strftime('%Y-%m-%d')
))
parts.append(ur'\end{itemize}')
return u"\n".join(parts)
class OrganizationManager(OrgManager):
def public(self):
return self.filter(public=True)
class Organization(models.Model):
name = models.CharField(max_length=255, unique=True)
slug = models.SlugField(unique=True)
personal_contact = models.CharField(max_length=255, blank=True)
public = models.BooleanField(
default=False,
help_text="Check to make this organization appear in the 'Groups' tab"
)
custom_intro_packet = models.FileField(upload_to=settings.UPLOAD_TO + "/org_intro_packets",
help_text="Leave blank to use the default packet, formatted with your address.",
blank=True, null=True)
mailing_address = models.TextField()
outgoing_mail_handled_by = models.ForeignKey('self', blank=True, null=True)
about = models.TextField(
blank=True,
help_text="Main text that will appear on the groups page.",
)
footer = models.TextField(
blank=True,
help_text="Additional text that will appear at the bottom of each post by a member of this organization.",
)
members = models.ManyToManyField(settings.AUTH_USER_MODEL, blank=True)
moderators = models.ManyToManyField(settings.AUTH_USER_MODEL,
related_name="organizations_moderated",
blank=True
)
objects = OrganizationManager()
class QuerySet(OrgQuerySet):
orgs = [""]
def to_dict(self):
dct = self.light_dict()
dct['moderators'] = [u.profile.light_dict() for u in self.moderators.select_related('profile').all()]
dct['members'] = [u.profile.light_dict() for u in self.members.select_related('profile').all()]
dct['about'] = self.about
dct['footer'] = self.footer
dct['mailing_address'] = self.mailing_address
dct['personal_contact'] = self.personal_contact
if self.custom_intro_packet:
dct['custom_intro_packet_url'] = self.custom_intro_packet.url
else:
dct['custom_intro_packet_url'] = None
if self.outgoing_mail_handled_by:
dct['outgoing_mail_handled_by'] = self.outgoing_mail_handled_by.light_dict()
else:
dct['outgoing_mail_handled_by'] = {}
return dct
def light_dict(self):
return {
u'id': self.pk,
u'slug': self.slug,
u'name': self.name,
u'public': self.public,
u'mailing_address': self.mailing_address,
}
def members_count(self):
return self.members.count()
def moderators_list(self):
return ", ".join(unicode(u.profile) for u in self.moderators.all())
def get_absolute_url(self):
return reverse("profiles.profile_list", kwargs={'org_slug': self.slug})
def __unicode__(self):
return self.name
class AffiliationManager(OrgManager):
def public(self): return self.all().public()
def private(self): return self.all().private()
class Affiliation(models.Model):
"""
Affiliations are like a "super tag" for posts, which:
1. can append additional HTML to the top of list and detail views
2. is limited to use by particular org's.
"""
title = models.CharField(max_length=255)
slug = models.SlugField(max_length=255, unique=True,
help_text="Use this to identify this affiliation when editing documents.")
logo = models.ImageField(upload_to="public/uploads/affiliations/",
blank=True, null=True)
list_body = models.TextField(
help_text="HTML for the top of the group page.")
detail_body = models.TextField(
help_text="HTML to append to individual posts for this group.")
organizations = models.ManyToManyField(Organization,
help_text="Which organizations are allowed to mark posts"
" as belonging to this affiliation?")
public = models.BooleanField(
default=False,
help_text="If false, the affiliation won't be listed publicly.")
order = models.IntegerField(
default=0,
help_text="Use to set the order for the list of affiliations on"
" the categories view. Lower numbers come first.")
created = models.DateTimeField(default=datetime.datetime.now)
modified = models.DateTimeField(blank=True)
objects = AffiliationManager()
class QuerySet(OrgQuerySet):
orgs = ["organizations"]
def public(self):
return self.filter(public=True)
def private(self):
return self.filter(public=False)
class Meta:
ordering = ['order', '-created']
def to_dict(self):
return {
u'id': self.pk,
u'title': self.title,
u'slug': self.slug,
u'logo_url': self.logo.url if self.logo else None,
u'list_body': self.list_body,
u'detail_body': self.detail_body,
u'organizations': [o.light_dict() for o in self.organizations.all()],
u'public': self.public,
u'order': self.order,
}
def total_num_responses(self):
return self.document_set.count()
def get_absolute_url(self):
return reverse("blogs.show_affiliation", args=[self.slug])
def save(self, *args, **kwargs):
self.modified = datetime.datetime.now()
return super(Affiliation, self).save(*args, **kwargs)
def __unicode__(self):
return self.slug
@receiver(post_save, sender=User)
def create_profile(sender, instance=None, **kwargs):
"""
Creates a profile on the User's save signal, so we know every user has one.
Add the user to the "readers" group.
"""
if instance is None:
return
profile, created = Profile.objects.get_or_create(user=instance)
readers, created = Group.objects.get_or_create(name="readers")
profile.user.groups.add(readers)
| agpl-3.0 |
astropy/astropy | astropy/io/ascii/tests/test_html.py | 7 | 22379 | # -*- coding: utf-8 -*-
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
This module tests some of the methods related to the ``HTML``
reader/writer and aims to document its functionality.
Requires `BeautifulSoup <http://www.crummy.com/software/BeautifulSoup/>`_
to be installed.
"""
from io import StringIO
from astropy.io.ascii import html
from astropy.io.ascii import core
from astropy.table import Table
import pytest
import numpy as np
from .common import setup_function, teardown_function # noqa
from astropy.io import ascii
from astropy.utils.compat.optional_deps import HAS_BLEACH, HAS_BS4 # noqa
if HAS_BS4:
from bs4 import BeautifulSoup, FeatureNotFound
@pytest.mark.skipif('not HAS_BS4')
def test_soupstring():
"""
Test to make sure the class SoupString behaves properly.
"""
soup = BeautifulSoup('<html><head></head><body><p>foo</p></body></html>',
'html.parser')
soup_str = html.SoupString(soup)
assert isinstance(soup_str, str)
assert isinstance(soup_str, html.SoupString)
assert soup_str == '<html><head></head><body><p>foo</p></body></html>'
assert soup_str.soup is soup
def test_listwriter():
"""
Test to make sure the class ListWriter behaves properly.
"""
lst = []
writer = html.ListWriter(lst)
for i in range(5):
writer.write(i)
for ch in 'abcde':
writer.write(ch)
assert lst == [0, 1, 2, 3, 4, 'a', 'b', 'c', 'd', 'e']
@pytest.mark.skipif('not HAS_BS4')
def test_identify_table():
"""
Test to make sure that identify_table() returns whether the
given BeautifulSoup tag is the correct table to process.
"""
# Should return False on non-<table> tags and None
soup = BeautifulSoup('<html><body></body></html>', 'html.parser')
assert html.identify_table(soup, {}, 0) is False
assert html.identify_table(None, {}, 0) is False
soup = BeautifulSoup('<table id="foo"><tr><th>A</th></tr><tr>'
'<td>B</td></tr></table>', 'html.parser').table
assert html.identify_table(soup, {}, 2) is False
assert html.identify_table(soup, {}, 1) is True # Default index of 1
# Same tests, but with explicit parameter
assert html.identify_table(soup, {'table_id': 2}, 1) is False
assert html.identify_table(soup, {'table_id': 1}, 1) is True
# Test identification by string ID
assert html.identify_table(soup, {'table_id': 'bar'}, 1) is False
assert html.identify_table(soup, {'table_id': 'foo'}, 1) is True
@pytest.mark.skipif('not HAS_BS4')
def test_missing_data():
"""
Test reading a table with missing data
"""
# First with default where blank => '0'
table_in = ['<table>',
'<tr><th>A</th></tr>',
'<tr><td></td></tr>',
'<tr><td>1</td></tr>',
'</table>']
dat = Table.read(table_in, format='ascii.html')
assert dat.masked is False
assert np.all(dat['A'].mask == [True, False])
assert dat['A'].dtype.kind == 'i'
# Now with a specific value '...' => missing
table_in = ['<table>',
'<tr><th>A</th></tr>',
'<tr><td>...</td></tr>',
'<tr><td>1</td></tr>',
'</table>']
dat = Table.read(table_in, format='ascii.html', fill_values=[('...', '0')])
assert dat.masked is False
assert np.all(dat['A'].mask == [True, False])
assert dat['A'].dtype.kind == 'i'
@pytest.mark.skipif('not HAS_BS4')
def test_rename_cols():
"""
Test reading a table and renaming cols
"""
table_in = ['<table>',
'<tr><th>A</th> <th>B</th></tr>',
'<tr><td>1</td><td>2</td></tr>',
'</table>']
# Swap column names
dat = Table.read(table_in, format='ascii.html', names=['B', 'A'])
assert dat.colnames == ['B', 'A']
assert len(dat) == 1
# Swap column names and only include A (the renamed version)
dat = Table.read(table_in, format='ascii.html', names=['B', 'A'], include_names=['A'])
assert dat.colnames == ['A']
assert len(dat) == 1
assert np.all(dat['A'] == 2)
@pytest.mark.skipif('not HAS_BS4')
def test_no_names():
"""
Test reading a table witn no column header
"""
table_in = ['<table>',
'<tr><td>1</td></tr>',
'<tr><td>2</td></tr>',
'</table>']
dat = Table.read(table_in, format='ascii.html')
assert dat.colnames == ['col1']
assert len(dat) == 2
dat = Table.read(table_in, format='ascii.html', names=['a'])
assert dat.colnames == ['a']
assert len(dat) == 2
@pytest.mark.skipif('not HAS_BS4')
def test_identify_table_fail():
"""
Raise an exception with an informative error message if table_id
is not found.
"""
table_in = ['<table id="foo"><tr><th>A</th></tr>',
'<tr><td>B</td></tr></table>']
with pytest.raises(core.InconsistentTableError) as err:
Table.read(table_in, format='ascii.html', htmldict={'table_id': 'bad_id'},
guess=False)
assert err.match("ERROR: HTML table id 'bad_id' not found$")
with pytest.raises(core.InconsistentTableError) as err:
Table.read(table_in, format='ascii.html', htmldict={'table_id': 3},
guess=False)
assert err.match("ERROR: HTML table number 3 not found$")
@pytest.mark.skipif('not HAS_BS4')
def test_backend_parsers():
"""
Make sure the user can specify which back-end parser to use
and that an error is raised if the parser is invalid.
"""
for parser in ('lxml', 'xml', 'html.parser', 'html5lib'):
try:
Table.read('data/html2.html', format='ascii.html',
htmldict={'parser': parser}, guess=False)
except FeatureNotFound:
if parser == 'html.parser':
raise
# otherwise ignore if the dependency isn't present
# reading should fail if the parser is invalid
with pytest.raises(FeatureNotFound):
Table.read('data/html2.html', format='ascii.html',
htmldict={'parser': 'foo'}, guess=False)
@pytest.mark.skipif('HAS_BS4')
def test_htmlinputter_no_bs4():
"""
This should return an OptionalTableImportError if BeautifulSoup
is not installed.
"""
inputter = html.HTMLInputter()
with pytest.raises(core.OptionalTableImportError):
inputter.process_lines([])
@pytest.mark.skipif('not HAS_BS4')
def test_htmlinputter():
"""
Test to ensure that HTMLInputter correctly converts input
into a list of SoupStrings representing table elements.
"""
f = 'data/html.html'
with open(f) as fd:
table = fd.read()
inputter = html.HTMLInputter()
inputter.html = {}
# In absence of table_id, defaults to the first table
expected = ['<tr><th>Column 1</th><th>Column 2</th><th>Column 3</th></tr>',
'<tr><td>1</td><td>a</td><td>1.05</td></tr>',
'<tr><td>2</td><td>b</td><td>2.75</td></tr>',
'<tr><td>3</td><td>c</td><td>-1.25</td></tr>']
assert [str(x) for x in inputter.get_lines(table)] == expected
# Should raise an InconsistentTableError if the table is not found
inputter.html = {'table_id': 4}
with pytest.raises(core.InconsistentTableError):
inputter.get_lines(table)
# Identification by string ID
inputter.html['table_id'] = 'second'
expected = ['<tr><th>Column A</th><th>Column B</th><th>Column C</th></tr>',
'<tr><td>4</td><td>d</td><td>10.5</td></tr>',
'<tr><td>5</td><td>e</td><td>27.5</td></tr>',
'<tr><td>6</td><td>f</td><td>-12.5</td></tr>']
assert [str(x) for x in inputter.get_lines(table)] == expected
# Identification by integer index
inputter.html['table_id'] = 3
expected = ['<tr><th>C1</th><th>C2</th><th>C3</th></tr>',
'<tr><td>7</td><td>g</td><td>105.0</td></tr>',
'<tr><td>8</td><td>h</td><td>275.0</td></tr>',
'<tr><td>9</td><td>i</td><td>-125.0</td></tr>']
assert [str(x) for x in inputter.get_lines(table)] == expected
@pytest.mark.skipif('not HAS_BS4')
def test_htmlsplitter():
"""
Test to make sure that HTMLSplitter correctly inputs lines
of type SoupString to return a generator that gives all
header and data elements.
"""
splitter = html.HTMLSplitter()
lines = [html.SoupString(BeautifulSoup('<table><tr><th>Col 1</th><th>Col 2</th></tr></table>',
'html.parser').tr),
html.SoupString(BeautifulSoup('<table><tr><td>Data 1</td><td>Data 2</td></tr></table>',
'html.parser').tr)]
expected_data = [['Col 1', 'Col 2'], ['Data 1', 'Data 2']]
assert list(splitter(lines)) == expected_data
# Make sure the presence of a non-SoupString triggers a TypeError
lines.append('<tr><td>Data 3</td><td>Data 4</td></tr>')
with pytest.raises(TypeError):
list(splitter(lines))
# Make sure that passing an empty list triggers an error
with pytest.raises(core.InconsistentTableError):
list(splitter([]))
@pytest.mark.skipif('not HAS_BS4')
def test_htmlheader_start():
"""
Test to ensure that the start_line method of HTMLHeader
returns the first line of header data. Uses t/html.html
for sample input.
"""
f = 'data/html.html'
with open(f) as fd:
table = fd.read()
inputter = html.HTMLInputter()
inputter.html = {}
header = html.HTMLHeader()
lines = inputter.get_lines(table)
assert str(lines[header.start_line(lines)]) == \
'<tr><th>Column 1</th><th>Column 2</th><th>Column 3</th></tr>'
inputter.html['table_id'] = 'second'
lines = inputter.get_lines(table)
assert str(lines[header.start_line(lines)]) == \
'<tr><th>Column A</th><th>Column B</th><th>Column C</th></tr>'
inputter.html['table_id'] = 3
lines = inputter.get_lines(table)
assert str(lines[header.start_line(lines)]) == \
'<tr><th>C1</th><th>C2</th><th>C3</th></tr>'
# start_line should return None if no valid header is found
lines = [html.SoupString(BeautifulSoup('<table><tr><td>Data</td></tr></table>',
'html.parser').tr),
html.SoupString(BeautifulSoup('<p>Text</p>', 'html.parser').p)]
assert header.start_line(lines) is None
# Should raise an error if a non-SoupString is present
lines.append('<tr><th>Header</th></tr>')
with pytest.raises(TypeError):
header.start_line(lines)
@pytest.mark.skipif('not HAS_BS4')
def test_htmldata():
"""
Test to ensure that the start_line and end_lines methods
of HTMLData returns the first line of table data. Uses
t/html.html for sample input.
"""
f = 'data/html.html'
with open(f) as fd:
table = fd.read()
inputter = html.HTMLInputter()
inputter.html = {}
data = html.HTMLData()
lines = inputter.get_lines(table)
assert str(lines[data.start_line(lines)]) == \
'<tr><td>1</td><td>a</td><td>1.05</td></tr>'
# end_line returns the index of the last data element + 1
assert str(lines[data.end_line(lines) - 1]) == \
'<tr><td>3</td><td>c</td><td>-1.25</td></tr>'
inputter.html['table_id'] = 'second'
lines = inputter.get_lines(table)
assert str(lines[data.start_line(lines)]) == \
'<tr><td>4</td><td>d</td><td>10.5</td></tr>'
assert str(lines[data.end_line(lines) - 1]) == \
'<tr><td>6</td><td>f</td><td>-12.5</td></tr>'
inputter.html['table_id'] = 3
lines = inputter.get_lines(table)
assert str(lines[data.start_line(lines)]) == \
'<tr><td>7</td><td>g</td><td>105.0</td></tr>'
assert str(lines[data.end_line(lines) - 1]) == \
'<tr><td>9</td><td>i</td><td>-125.0</td></tr>'
# start_line should raise an error if no table data exists
lines = [html.SoupString(BeautifulSoup('<div></div>', 'html.parser').div),
html.SoupString(BeautifulSoup('<p>Text</p>', 'html.parser').p)]
with pytest.raises(core.InconsistentTableError):
data.start_line(lines)
# end_line should return None if no table data exists
assert data.end_line(lines) is None
# Should raise an error if a non-SoupString is present
lines.append('<tr><td>Data</td></tr>')
with pytest.raises(TypeError):
data.start_line(lines)
with pytest.raises(TypeError):
data.end_line(lines)
def test_multicolumn_write():
"""
Test to make sure that the HTML writer writes multidimensional
columns (those with iterable elements) using the colspan
attribute of <th>.
"""
col1 = [1, 2, 3]
col2 = [(1.0, 1.0), (2.0, 2.0), (3.0, 3.0)]
col3 = [('a', 'a', 'a'), ('b', 'b', 'b'), ('c', 'c', 'c')]
table = Table([col1, col2, col3], names=('C1', 'C2', 'C3'))
expected = """\
<html>
<head>
<meta charset="utf-8"/>
<meta content="text/html;charset=UTF-8" http-equiv="Content-type"/>
</head>
<body>
<table>
<thead>
<tr>
<th>C1</th>
<th colspan="2">C2</th>
<th colspan="3">C3</th>
</tr>
</thead>
<tr>
<td>1</td>
<td>1.0</td>
<td>1.0</td>
<td>a</td>
<td>a</td>
<td>a</td>
</tr>
<tr>
<td>2</td>
<td>2.0</td>
<td>2.0</td>
<td>b</td>
<td>b</td>
<td>b</td>
</tr>
<tr>
<td>3</td>
<td>3.0</td>
<td>3.0</td>
<td>c</td>
<td>c</td>
<td>c</td>
</tr>
</table>
</body>
</html>
"""
out = html.HTML().write(table)[0].strip()
assert out == expected.strip()
@pytest.mark.skipif('not HAS_BLEACH')
def test_multicolumn_write_escape():
"""
Test to make sure that the HTML writer writes multidimensional
columns (those with iterable elements) using the colspan
attribute of <th>.
"""
col1 = [1, 2, 3]
col2 = [(1.0, 1.0), (2.0, 2.0), (3.0, 3.0)]
col3 = [('<a></a>', '<a></a>', 'a'), ('<b></b>', 'b', 'b'), ('c', 'c', 'c')]
table = Table([col1, col2, col3], names=('C1', 'C2', 'C3'))
expected = """\
<html>
<head>
<meta charset="utf-8"/>
<meta content="text/html;charset=UTF-8" http-equiv="Content-type"/>
</head>
<body>
<table>
<thead>
<tr>
<th>C1</th>
<th colspan="2">C2</th>
<th colspan="3">C3</th>
</tr>
</thead>
<tr>
<td>1</td>
<td>1.0</td>
<td>1.0</td>
<td><a></a></td>
<td><a></a></td>
<td>a</td>
</tr>
<tr>
<td>2</td>
<td>2.0</td>
<td>2.0</td>
<td><b></b></td>
<td>b</td>
<td>b</td>
</tr>
<tr>
<td>3</td>
<td>3.0</td>
<td>3.0</td>
<td>c</td>
<td>c</td>
<td>c</td>
</tr>
</table>
</body>
</html>
"""
out = html.HTML(htmldict={'raw_html_cols': 'C3'}).write(table)[0].strip()
assert out == expected.strip()
def test_write_no_multicols():
"""
Test to make sure that the HTML writer will not use
multi-dimensional columns if the multicol parameter
is False.
"""
col1 = [1, 2, 3]
col2 = [(1.0, 1.0), (2.0, 2.0), (3.0, 3.0)]
col3 = [('a', 'a', 'a'), ('b', 'b', 'b'), ('c', 'c', 'c')]
table = Table([col1, col2, col3], names=('C1', 'C2', 'C3'))
expected = """\
<html>
<head>
<meta charset="utf-8"/>
<meta content="text/html;charset=UTF-8" http-equiv="Content-type"/>
</head>
<body>
<table>
<thead>
<tr>
<th>C1</th>
<th>C2</th>
<th>C3</th>
</tr>
</thead>
<tr>
<td>1</td>
<td>1.0 .. 1.0</td>
<td>a .. a</td>
</tr>
<tr>
<td>2</td>
<td>2.0 .. 2.0</td>
<td>b .. b</td>
</tr>
<tr>
<td>3</td>
<td>3.0 .. 3.0</td>
<td>c .. c</td>
</tr>
</table>
</body>
</html>
"""
assert html.HTML({'multicol': False}).write(table)[0].strip() == \
expected.strip()
@pytest.mark.skipif('not HAS_BS4')
def test_multicolumn_read():
"""
Test to make sure that the HTML reader inputs multidimensional
columns (those with iterable elements) using the colspan
attribute of <th>.
Ensure that any string element within a multidimensional column
casts all elements to string prior to type conversion operations.
"""
table = Table.read('data/html2.html', format='ascii.html')
str_type = np.dtype((str, 21))
expected = Table(np.array([(['1', '2.5000000000000000001'], 3),
(['1a', '1'], 3.5)],
dtype=[('A', str_type, (2,)), ('B', '<f8')]))
assert np.all(table == expected)
@pytest.mark.skipif('not HAS_BLEACH')
def test_raw_html_write():
"""
Test that columns can contain raw HTML which is not escaped.
"""
t = Table([['<em>x</em>'], ['<em>y</em>']], names=['a', 'b'])
# One column contains raw HTML (string input)
out = StringIO()
t.write(out, format='ascii.html', htmldict={'raw_html_cols': 'a'})
expected = """\
<tr>
<td><em>x</em></td>
<td><em>y</em></td>
</tr>"""
assert expected in out.getvalue()
# One column contains raw HTML (list input)
out = StringIO()
t.write(out, format='ascii.html', htmldict={'raw_html_cols': ['a']})
assert expected in out.getvalue()
# Two columns contains raw HTML (list input)
out = StringIO()
t.write(out, format='ascii.html', htmldict={'raw_html_cols': ['a', 'b']})
expected = """\
<tr>
<td><em>x</em></td>
<td><em>y</em></td>
</tr>"""
assert expected in out.getvalue()
@pytest.mark.skipif('not HAS_BLEACH')
def test_raw_html_write_clean():
"""
Test that columns can contain raw HTML which is not escaped.
"""
import bleach # noqa
t = Table([['<script>x</script>'], ['<p>y</p>'], ['<em>y</em>']], names=['a', 'b', 'c'])
# Confirm that <script> and <p> get escaped but not <em>
out = StringIO()
t.write(out, format='ascii.html', htmldict={'raw_html_cols': t.colnames})
expected = """\
<tr>
<td><script>x</script></td>
<td><p>y</p></td>
<td><em>y</em></td>
</tr>"""
assert expected in out.getvalue()
# Confirm that we can whitelist <p>
out = StringIO()
t.write(out, format='ascii.html',
htmldict={'raw_html_cols': t.colnames,
'raw_html_clean_kwargs': {'tags': bleach.ALLOWED_TAGS + ['p']}})
expected = """\
<tr>
<td><script>x</script></td>
<td><p>y</p></td>
<td><em>y</em></td>
</tr>"""
assert expected in out.getvalue()
def test_write_table_html_fill_values():
"""
Test that passing fill_values should replace any matching row
"""
buffer_output = StringIO()
t = Table([[1], [2]], names=('a', 'b'))
ascii.write(t, buffer_output, fill_values=('1', 'Hello world'),
format='html')
t_expected = Table([['Hello world'], [2]], names=('a', 'b'))
buffer_expected = StringIO()
ascii.write(t_expected, buffer_expected, format='html')
assert buffer_output.getvalue() == buffer_expected.getvalue()
def test_write_table_html_fill_values_optional_columns():
"""
Test that passing optional column in fill_values should only replace
matching columns
"""
buffer_output = StringIO()
t = Table([[1], [1]], names=('a', 'b'))
ascii.write(t, buffer_output, fill_values=('1', 'Hello world', 'b'),
format='html')
t_expected = Table([[1], ['Hello world']], names=('a', 'b'))
buffer_expected = StringIO()
ascii.write(t_expected, buffer_expected, format='html')
assert buffer_output.getvalue() == buffer_expected.getvalue()
def test_write_table_html_fill_values_masked():
"""
Test that passing masked values in fill_values should only replace
masked columns or values
"""
buffer_output = StringIO()
t = Table([[1], [1]], names=('a', 'b'), masked=True, dtype=('i4', 'i8'))
t['a'] = np.ma.masked
ascii.write(t, buffer_output, fill_values=(ascii.masked, 'TEST'),
format='html')
t_expected = Table([['TEST'], [1]], names=('a', 'b'))
buffer_expected = StringIO()
ascii.write(t_expected, buffer_expected, format='html')
assert buffer_output.getvalue() == buffer_expected.getvalue()
def test_multicolumn_table_html_fill_values():
"""
Test to make sure that the HTML writer writes multidimensional
columns with correctly replaced fill_values.
"""
col1 = [1, 2, 3]
col2 = [(1.0, 1.0), (2.0, 2.0), (3.0, 3.0)]
col3 = [('a', 'a', 'a'), ('b', 'b', 'b'), ('c', 'c', 'c')]
buffer_output = StringIO()
t = Table([col1, col2, col3], names=('C1', 'C2', 'C3'))
ascii.write(t, buffer_output, fill_values=('a', 'z'),
format='html')
col1 = [1, 2, 3]
col2 = [(1.0, 1.0), (2.0, 2.0), (3.0, 3.0)]
col3 = [('z', 'z', 'z'), ('b', 'b', 'b'), ('c', 'c', 'c')]
buffer_expected = StringIO()
t_expected = Table([col1, col2, col3], names=('C1', 'C2', 'C3'))
ascii.write(t_expected, buffer_expected, format='html')
assert buffer_output.getvalue() == buffer_expected.getvalue()
def test_multi_column_write_table_html_fill_values_masked():
"""
Test that passing masked values in fill_values should only replace
masked columns or values for multidimensional tables
"""
buffer_output = StringIO()
t = Table([[1, 2, 3, 4], ['--', 'a', '--', 'b']], names=('a', 'b'), masked=True)
t['a'][0:2] = np.ma.masked
t['b'][0:2] = np.ma.masked
ascii.write(t, buffer_output, fill_values=[(ascii.masked, 'MASKED')],
format='html')
t_expected = Table([['MASKED', 'MASKED', 3, 4], [
'MASKED', 'MASKED', '--', 'b']], names=('a', 'b'))
buffer_expected = StringIO()
ascii.write(t_expected, buffer_expected, format='html')
print(buffer_expected.getvalue())
assert buffer_output.getvalue() == buffer_expected.getvalue()
@pytest.mark.skipif('not HAS_BS4')
def test_read_html_unicode():
"""
Test reading an HTML table with unicode values
"""
table_in = ['<table>',
'<tr><td>Δ</td></tr>',
'<tr><td>Δ</td></tr>',
'</table>']
dat = Table.read(table_in, format='ascii.html')
assert np.all(dat['col1'] == ['Δ', 'Δ'])
| bsd-3-clause |
idxos/python-clblob | clblob/event.py | 3 | 8978 | # Copyright 2013 craigslist
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''craigslist blob event module.
This should only be used internally by the client module.'''
import hashlib
import clblob
import clcommon.anybase
import clcommon.profile
class Event(object):
'''Base class for various events used in the client.'''
params = []
def __init__(self, client, method, name, timeout, http_method=None):
self._client = client
if method not in self._client.events:
self._client.events[method] = dict(current=0, max=0, total=0)
self._client.events[method]['total'] += 1
self._client.events[method]['current'] += 1
current = self._client.events[method]['current']
if current > self._client.events[method]['max']:
self._client.events[method]['max'] = current
self.method = method
self.name = name
self.timeout = timeout
self.http_method = http_method or method.upper()
self.parse_response = True
self.profile = clcommon.profile.Profile()
self.data = None
self.modified = None
self.deleted = None
self.modified_deleted = None
self.index_id = None
self.store_id = None
self.encoded = None
self._buckets = None
self._replicas = None
self._is_local = None
def __del__(self):
if hasattr(self, '_client'):
self._client.events[self.method]['current'] -= 1
if hasattr(self, 'profile') and len(self.profile.marks) > 0:
self._client.log.info('profile %s', self.profile)
@property
def url(self):
'''Make a URL for this event.'''
url = '/%s' % self.name
separator = '?'
for param in self.params:
value = getattr(self, param)
if value is not None:
url = '%s%s%s=%s' % (url, separator, param, value)
separator = '&'
return url
def buckets(self, buckets=None):
'''Get or set the buckets for this event.'''
if buckets is not None:
self._buckets = buckets
return
if self._buckets is not None:
return self._buckets
self._buckets = {}
if self.encoded is not False and self._client.config['encode_name']:
self._get_encoded_buckets()
else:
self._get_buckets()
return self._buckets
def _get_buckets(self):
'''Get buckets for a name.'''
name_hash = hashlib.md5(self.name).hexdigest() # pylint: disable=E1101
name_hash = int(name_hash[:8], 16)
for cluster in xrange(len(self._client.weighted_clusters)):
weighted_cluster = self._client.weighted_clusters[cluster]
bucket = weighted_cluster[name_hash % len(weighted_cluster)]
self._buckets[cluster] = bucket
def _get_encoded_buckets(self):
'''Get buckets for an encoded name.'''
if clcommon.anybase.decode(self.name[0], 62) != 0:
raise clblob.InvalidRequest(_('Name version not valid: %s') %
self.name)
buckets = self.name[1:].split('_', 1)[0]
if len(buckets) % 2 != 0:
raise clblob.InvalidRequest(_('Name bucket list corrupt: %s') %
self.name)
buckets = [buckets[offset:offset + 2]
for offset in xrange(0, len(buckets), 2)]
for cluster, bucket in enumerate(buckets):
self._buckets[cluster] = clcommon.anybase.decode(bucket, 62)
def replicas(self, replicas=None):
'''Get or set the replicas for this event.'''
if replicas is not None:
self._replicas = replicas
return
if self._replicas is None:
self._get_replicas()
return self._replicas
def _get_replicas(self):
'''Get a preferred list of replicas for the given buckets. This
will ignore replicas in other clusters if a cluster is configured,
as well as the local replica if the client is a replica.'''
self._replicas = {}
self._is_local = False
for cluster, bucket in self.buckets().iteritems():
if self._client.cluster is None or self._client.cluster == cluster:
if self._client.bucket == bucket:
self._is_local = True
bucket = self._client.config['clusters'][cluster][bucket]
for replica in bucket['replicas']:
if self._client.replica != replica:
self._replicas[replica] = True
@property
def is_local(self):
'''Check to see if the local replica can handle this event.'''
if self._is_local is None:
self._get_replicas()
return self._is_local
@property
def info(self):
'''Make an info dictionary for responses.'''
return dict(name=self.name, modified=self.modified,
deleted=self.deleted, modified_deleted=self.modified_deleted,
buckets=self.buckets())
class Get(Event):
'''Event for tracking getting a blob.'''
params = ['response']
def __init__(self, client, name, response):
super(Get, self).__init__(client, 'get', name,
client.config['request_timeout'])
self.response = response
if response == 'data':
self.parse_response = False
class Delete(Event):
'''Event for tracking deleting a blob.'''
params = ['deleted', 'modified_deleted', 'replicate']
def __init__(self, client, name, replicate):
super(Delete, self).__init__(client, 'delete', name,
client.config['request_timeout'])
self.replicate = replicate
class Put(Event):
'''Event for tracking putting a blob.'''
params = ['modified', 'deleted', 'modified_deleted', 'replicate',
'encoded']
def __init__(self, client, name, replicate, encoded):
super(Put, self).__init__(client, 'put', name,
client.config['request_timeout'])
self.replicate = replicate
self.encoded = encoded
if encoded is False and client.config['encode_name']:
self._encode_name()
def _encode_name(self, version=0):
'''Make a name encoded with clusters and buckets.'''
encoded = [clcommon.anybase.encode(version, 62)]
for _cluster, bucket in sorted(self.buckets().iteritems()):
encoded.append(clcommon.anybase.encode(bucket, 62).zfill(2))
self.name = '%s_%s' % (''.join(encoded), self.name)
self.encoded = True
class Admin(Event):
'''Event for tracking various admin tasks.'''
def __init__(self, client, method, replica=None):
replica = replica or client.replica
if replica is None:
raise clblob.RequestError(_('Must give replica'))
elif replica not in client.config['replicas']:
raise clblob.RequestError(_('Unknown replica: %s') % replica)
super(Admin, self).__init__(client, method,
'_%s/%s' % (method, replica), client.config['admin_timeout'],
'GET')
self.replica = replica
class ConfigCheck(Event):
'''Event for tracking configcheck requests.'''
params = ['brief', 'tolerance']
def __init__(self, client, replica=None):
replica = replica or client.replica
if replica is not None and replica not in client.config['replicas']:
raise clblob.RequestError(_('Unknown replica: %s') % replica)
super(ConfigCheck, self).__init__(client, 'configcheck',
'_configcheck/%s' % replica, client.config['request_timeout'],
'PUT')
self.replica = replica
self.brief = None
self.tolerance = None
class List(Admin):
'''Event for tracking list requests.'''
params = ['modified_start', 'modified_stop', 'checksum', 'checksum_modulo']
def __init__(self, client, replica=None):
super(List, self).__init__(client, 'list', replica)
self.modified_start = None
self.modified_stop = None
self.checksum = None
self.checksum_modulo = None
class Sync(Admin):
'''Event for tracking list requests.'''
params = ['source', 'modified_start', 'modified_stop']
def __init__(self, client, replica=None):
super(Sync, self).__init__(client, 'sync', replica)
self.source = None
self.modified_start = None
self.modified_stop = None
| apache-2.0 |
kans/birgo | deps/breakpad/src/third_party/protobuf/protobuf/python/google/protobuf/internal/wire_format_test.py | 571 | 10848 | #! /usr/bin/python
#
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# http://code.google.com/p/protobuf/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Test for google.protobuf.internal.wire_format."""
__author__ = '[email protected] (Will Robinson)'
import unittest
from google.protobuf import message
from google.protobuf.internal import wire_format
class WireFormatTest(unittest.TestCase):
def testPackTag(self):
field_number = 0xabc
tag_type = 2
self.assertEqual((field_number << 3) | tag_type,
wire_format.PackTag(field_number, tag_type))
PackTag = wire_format.PackTag
# Number too high.
self.assertRaises(message.EncodeError, PackTag, field_number, 6)
# Number too low.
self.assertRaises(message.EncodeError, PackTag, field_number, -1)
def testUnpackTag(self):
# Test field numbers that will require various varint sizes.
for expected_field_number in (1, 15, 16, 2047, 2048):
for expected_wire_type in range(6): # Highest-numbered wiretype is 5.
field_number, wire_type = wire_format.UnpackTag(
wire_format.PackTag(expected_field_number, expected_wire_type))
self.assertEqual(expected_field_number, field_number)
self.assertEqual(expected_wire_type, wire_type)
self.assertRaises(TypeError, wire_format.UnpackTag, None)
self.assertRaises(TypeError, wire_format.UnpackTag, 'abc')
self.assertRaises(TypeError, wire_format.UnpackTag, 0.0)
self.assertRaises(TypeError, wire_format.UnpackTag, object())
def testZigZagEncode(self):
Z = wire_format.ZigZagEncode
self.assertEqual(0, Z(0))
self.assertEqual(1, Z(-1))
self.assertEqual(2, Z(1))
self.assertEqual(3, Z(-2))
self.assertEqual(4, Z(2))
self.assertEqual(0xfffffffe, Z(0x7fffffff))
self.assertEqual(0xffffffff, Z(-0x80000000))
self.assertEqual(0xfffffffffffffffe, Z(0x7fffffffffffffff))
self.assertEqual(0xffffffffffffffff, Z(-0x8000000000000000))
self.assertRaises(TypeError, Z, None)
self.assertRaises(TypeError, Z, 'abcd')
self.assertRaises(TypeError, Z, 0.0)
self.assertRaises(TypeError, Z, object())
def testZigZagDecode(self):
Z = wire_format.ZigZagDecode
self.assertEqual(0, Z(0))
self.assertEqual(-1, Z(1))
self.assertEqual(1, Z(2))
self.assertEqual(-2, Z(3))
self.assertEqual(2, Z(4))
self.assertEqual(0x7fffffff, Z(0xfffffffe))
self.assertEqual(-0x80000000, Z(0xffffffff))
self.assertEqual(0x7fffffffffffffff, Z(0xfffffffffffffffe))
self.assertEqual(-0x8000000000000000, Z(0xffffffffffffffff))
self.assertRaises(TypeError, Z, None)
self.assertRaises(TypeError, Z, 'abcd')
self.assertRaises(TypeError, Z, 0.0)
self.assertRaises(TypeError, Z, object())
def NumericByteSizeTestHelper(self, byte_size_fn, value, expected_value_size):
# Use field numbers that cause various byte sizes for the tag information.
for field_number, tag_bytes in ((15, 1), (16, 2), (2047, 2), (2048, 3)):
expected_size = expected_value_size + tag_bytes
actual_size = byte_size_fn(field_number, value)
self.assertEqual(expected_size, actual_size,
'byte_size_fn: %s, field_number: %d, value: %r\n'
'Expected: %d, Actual: %d'% (
byte_size_fn, field_number, value, expected_size, actual_size))
def testByteSizeFunctions(self):
# Test all numeric *ByteSize() functions.
NUMERIC_ARGS = [
# Int32ByteSize().
[wire_format.Int32ByteSize, 0, 1],
[wire_format.Int32ByteSize, 127, 1],
[wire_format.Int32ByteSize, 128, 2],
[wire_format.Int32ByteSize, -1, 10],
# Int64ByteSize().
[wire_format.Int64ByteSize, 0, 1],
[wire_format.Int64ByteSize, 127, 1],
[wire_format.Int64ByteSize, 128, 2],
[wire_format.Int64ByteSize, -1, 10],
# UInt32ByteSize().
[wire_format.UInt32ByteSize, 0, 1],
[wire_format.UInt32ByteSize, 127, 1],
[wire_format.UInt32ByteSize, 128, 2],
[wire_format.UInt32ByteSize, wire_format.UINT32_MAX, 5],
# UInt64ByteSize().
[wire_format.UInt64ByteSize, 0, 1],
[wire_format.UInt64ByteSize, 127, 1],
[wire_format.UInt64ByteSize, 128, 2],
[wire_format.UInt64ByteSize, wire_format.UINT64_MAX, 10],
# SInt32ByteSize().
[wire_format.SInt32ByteSize, 0, 1],
[wire_format.SInt32ByteSize, -1, 1],
[wire_format.SInt32ByteSize, 1, 1],
[wire_format.SInt32ByteSize, -63, 1],
[wire_format.SInt32ByteSize, 63, 1],
[wire_format.SInt32ByteSize, -64, 1],
[wire_format.SInt32ByteSize, 64, 2],
# SInt64ByteSize().
[wire_format.SInt64ByteSize, 0, 1],
[wire_format.SInt64ByteSize, -1, 1],
[wire_format.SInt64ByteSize, 1, 1],
[wire_format.SInt64ByteSize, -63, 1],
[wire_format.SInt64ByteSize, 63, 1],
[wire_format.SInt64ByteSize, -64, 1],
[wire_format.SInt64ByteSize, 64, 2],
# Fixed32ByteSize().
[wire_format.Fixed32ByteSize, 0, 4],
[wire_format.Fixed32ByteSize, wire_format.UINT32_MAX, 4],
# Fixed64ByteSize().
[wire_format.Fixed64ByteSize, 0, 8],
[wire_format.Fixed64ByteSize, wire_format.UINT64_MAX, 8],
# SFixed32ByteSize().
[wire_format.SFixed32ByteSize, 0, 4],
[wire_format.SFixed32ByteSize, wire_format.INT32_MIN, 4],
[wire_format.SFixed32ByteSize, wire_format.INT32_MAX, 4],
# SFixed64ByteSize().
[wire_format.SFixed64ByteSize, 0, 8],
[wire_format.SFixed64ByteSize, wire_format.INT64_MIN, 8],
[wire_format.SFixed64ByteSize, wire_format.INT64_MAX, 8],
# FloatByteSize().
[wire_format.FloatByteSize, 0.0, 4],
[wire_format.FloatByteSize, 1000000000.0, 4],
[wire_format.FloatByteSize, -1000000000.0, 4],
# DoubleByteSize().
[wire_format.DoubleByteSize, 0.0, 8],
[wire_format.DoubleByteSize, 1000000000.0, 8],
[wire_format.DoubleByteSize, -1000000000.0, 8],
# BoolByteSize().
[wire_format.BoolByteSize, False, 1],
[wire_format.BoolByteSize, True, 1],
# EnumByteSize().
[wire_format.EnumByteSize, 0, 1],
[wire_format.EnumByteSize, 127, 1],
[wire_format.EnumByteSize, 128, 2],
[wire_format.EnumByteSize, wire_format.UINT32_MAX, 5],
]
for args in NUMERIC_ARGS:
self.NumericByteSizeTestHelper(*args)
# Test strings and bytes.
for byte_size_fn in (wire_format.StringByteSize, wire_format.BytesByteSize):
# 1 byte for tag, 1 byte for length, 3 bytes for contents.
self.assertEqual(5, byte_size_fn(10, 'abc'))
# 2 bytes for tag, 1 byte for length, 3 bytes for contents.
self.assertEqual(6, byte_size_fn(16, 'abc'))
# 2 bytes for tag, 2 bytes for length, 128 bytes for contents.
self.assertEqual(132, byte_size_fn(16, 'a' * 128))
# Test UTF-8 string byte size calculation.
# 1 byte for tag, 1 byte for length, 8 bytes for content.
self.assertEqual(10, wire_format.StringByteSize(
5, unicode('\xd0\xa2\xd0\xb5\xd1\x81\xd1\x82', 'utf-8')))
class MockMessage(object):
def __init__(self, byte_size):
self.byte_size = byte_size
def ByteSize(self):
return self.byte_size
message_byte_size = 10
mock_message = MockMessage(byte_size=message_byte_size)
# Test groups.
# (2 * 1) bytes for begin and end tags, plus message_byte_size.
self.assertEqual(2 + message_byte_size,
wire_format.GroupByteSize(1, mock_message))
# (2 * 2) bytes for begin and end tags, plus message_byte_size.
self.assertEqual(4 + message_byte_size,
wire_format.GroupByteSize(16, mock_message))
# Test messages.
# 1 byte for tag, plus 1 byte for length, plus contents.
self.assertEqual(2 + mock_message.byte_size,
wire_format.MessageByteSize(1, mock_message))
# 2 bytes for tag, plus 1 byte for length, plus contents.
self.assertEqual(3 + mock_message.byte_size,
wire_format.MessageByteSize(16, mock_message))
# 2 bytes for tag, plus 2 bytes for length, plus contents.
mock_message.byte_size = 128
self.assertEqual(4 + mock_message.byte_size,
wire_format.MessageByteSize(16, mock_message))
# Test message set item byte size.
# 4 bytes for tags, plus 1 byte for length, plus 1 byte for type_id,
# plus contents.
mock_message.byte_size = 10
self.assertEqual(mock_message.byte_size + 6,
wire_format.MessageSetItemByteSize(1, mock_message))
# 4 bytes for tags, plus 2 bytes for length, plus 1 byte for type_id,
# plus contents.
mock_message.byte_size = 128
self.assertEqual(mock_message.byte_size + 7,
wire_format.MessageSetItemByteSize(1, mock_message))
# 4 bytes for tags, plus 2 bytes for length, plus 2 byte for type_id,
# plus contents.
self.assertEqual(mock_message.byte_size + 8,
wire_format.MessageSetItemByteSize(128, mock_message))
# Too-long varint.
self.assertRaises(message.EncodeError,
wire_format.UInt64ByteSize, 1, 1 << 128)
if __name__ == '__main__':
unittest.main()
| apache-2.0 |
tempbottle/kbengine | kbe/res/scripts/common/Lib/test/test_urllib2net.py | 60 | 12676 | import unittest
from test import support
from test.test_urllib2 import sanepathname2url
import os
import socket
import urllib.error
import urllib.request
import sys
try:
import ssl
except ImportError:
ssl = None
support.requires("network")
TIMEOUT = 60 # seconds
def _retry_thrice(func, exc, *args, **kwargs):
for i in range(3):
try:
return func(*args, **kwargs)
except exc as e:
last_exc = e
continue
except:
raise
raise last_exc
def _wrap_with_retry_thrice(func, exc):
def wrapped(*args, **kwargs):
return _retry_thrice(func, exc, *args, **kwargs)
return wrapped
# Connecting to remote hosts is flaky. Make it more robust by retrying
# the connection several times.
_urlopen_with_retry = _wrap_with_retry_thrice(urllib.request.urlopen,
urllib.error.URLError)
class AuthTests(unittest.TestCase):
"""Tests urllib2 authentication features."""
## Disabled at the moment since there is no page under python.org which
## could be used to HTTP authentication.
#
# def test_basic_auth(self):
# import http.client
#
# test_url = "http://www.python.org/test/test_urllib2/basic_auth"
# test_hostport = "www.python.org"
# test_realm = 'Test Realm'
# test_user = 'test.test_urllib2net'
# test_password = 'blah'
#
# # failure
# try:
# _urlopen_with_retry(test_url)
# except urllib2.HTTPError, exc:
# self.assertEqual(exc.code, 401)
# else:
# self.fail("urlopen() should have failed with 401")
#
# # success
# auth_handler = urllib2.HTTPBasicAuthHandler()
# auth_handler.add_password(test_realm, test_hostport,
# test_user, test_password)
# opener = urllib2.build_opener(auth_handler)
# f = opener.open('http://localhost/')
# response = _urlopen_with_retry("http://www.python.org/")
#
# # The 'userinfo' URL component is deprecated by RFC 3986 for security
# # reasons, let's not implement it! (it's already implemented for proxy
# # specification strings (that is, URLs or authorities specifying a
# # proxy), so we must keep that)
# self.assertRaises(http.client.InvalidURL,
# urllib2.urlopen, "http://evil:[email protected]")
class CloseSocketTest(unittest.TestCase):
def test_close(self):
# calling .close() on urllib2's response objects should close the
# underlying socket
url = "http://www.example.com/"
with support.transient_internet(url):
response = _urlopen_with_retry(url)
sock = response.fp
self.assertFalse(sock.closed)
response.close()
self.assertTrue(sock.closed)
class OtherNetworkTests(unittest.TestCase):
def setUp(self):
if 0: # for debugging
import logging
logger = logging.getLogger("test_urllib2net")
logger.addHandler(logging.StreamHandler())
# XXX The rest of these tests aren't very good -- they don't check much.
# They do sometimes catch some major disasters, though.
def test_ftp(self):
urls = [
'ftp://ftp.debian.org/debian/README',
('ftp://ftp.debian.org/debian/non-existent-file',
None, urllib.error.URLError),
'ftp://gatekeeper.research.compaq.com/pub/DEC/SRC'
'/research-reports/00README-Legal-Rules-Regs',
]
self._test_urls(urls, self._extra_handlers())
def test_file(self):
TESTFN = support.TESTFN
f = open(TESTFN, 'w')
try:
f.write('hi there\n')
f.close()
urls = [
'file:' + sanepathname2url(os.path.abspath(TESTFN)),
('file:///nonsensename/etc/passwd', None,
urllib.error.URLError),
]
self._test_urls(urls, self._extra_handlers(), retry=True)
finally:
os.remove(TESTFN)
self.assertRaises(ValueError, urllib.request.urlopen,'./relative_path/to/file')
# XXX Following test depends on machine configurations that are internal
# to CNRI. Need to set up a public server with the right authentication
# configuration for test purposes.
## def test_cnri(self):
## if socket.gethostname() == 'bitdiddle':
## localhost = 'bitdiddle.cnri.reston.va.us'
## elif socket.gethostname() == 'bitdiddle.concentric.net':
## localhost = 'localhost'
## else:
## localhost = None
## if localhost is not None:
## urls = [
## 'file://%s/etc/passwd' % localhost,
## 'http://%s/simple/' % localhost,
## 'http://%s/digest/' % localhost,
## 'http://%s/not/found.h' % localhost,
## ]
## bauth = HTTPBasicAuthHandler()
## bauth.add_password('basic_test_realm', localhost, 'jhylton',
## 'password')
## dauth = HTTPDigestAuthHandler()
## dauth.add_password('digest_test_realm', localhost, 'jhylton',
## 'password')
## self._test_urls(urls, self._extra_handlers()+[bauth, dauth])
def test_urlwithfrag(self):
urlwith_frag = "https://docs.python.org/2/glossary.html#glossary"
with support.transient_internet(urlwith_frag):
req = urllib.request.Request(urlwith_frag)
res = urllib.request.urlopen(req)
self.assertEqual(res.geturl(),
"https://docs.python.org/2/glossary.html#glossary")
def test_redirect_url_withfrag(self):
redirect_url_with_frag = "http://bit.ly/1iSHToT"
with support.transient_internet(redirect_url_with_frag):
req = urllib.request.Request(redirect_url_with_frag)
res = urllib.request.urlopen(req)
self.assertEqual(res.geturl(),
"https://docs.python.org/3.4/glossary.html#term-global-interpreter-lock")
def test_custom_headers(self):
url = "http://www.example.com"
with support.transient_internet(url):
opener = urllib.request.build_opener()
request = urllib.request.Request(url)
self.assertFalse(request.header_items())
opener.open(request)
self.assertTrue(request.header_items())
self.assertTrue(request.has_header('User-agent'))
request.add_header('User-Agent','Test-Agent')
opener.open(request)
self.assertEqual(request.get_header('User-agent'),'Test-Agent')
def test_sites_no_connection_close(self):
# Some sites do not send Connection: close header.
# Verify that those work properly. (#issue12576)
URL = 'http://www.imdb.com' # mangles Connection:close
with support.transient_internet(URL):
try:
with urllib.request.urlopen(URL) as res:
pass
except ValueError as e:
self.fail("urlopen failed for site not sending \
Connection:close")
else:
self.assertTrue(res)
req = urllib.request.urlopen(URL)
res = req.read()
self.assertTrue(res)
def _test_urls(self, urls, handlers, retry=True):
import time
import logging
debug = logging.getLogger("test_urllib2").debug
urlopen = urllib.request.build_opener(*handlers).open
if retry:
urlopen = _wrap_with_retry_thrice(urlopen, urllib.error.URLError)
for url in urls:
with self.subTest(url=url):
if isinstance(url, tuple):
url, req, expected_err = url
else:
req = expected_err = None
with support.transient_internet(url):
try:
f = urlopen(url, req, TIMEOUT)
except OSError as err:
if expected_err:
msg = ("Didn't get expected error(s) %s for %s %s, got %s: %s" %
(expected_err, url, req, type(err), err))
self.assertIsInstance(err, expected_err, msg)
else:
raise
except urllib.error.URLError as err:
if isinstance(err[0], socket.timeout):
print("<timeout: %s>" % url, file=sys.stderr)
continue
else:
raise
else:
try:
with support.time_out, \
support.socket_peer_reset, \
support.ioerror_peer_reset:
buf = f.read()
debug("read %d bytes" % len(buf))
except socket.timeout:
print("<timeout: %s>" % url, file=sys.stderr)
f.close()
time.sleep(0.1)
def _extra_handlers(self):
handlers = []
cfh = urllib.request.CacheFTPHandler()
self.addCleanup(cfh.clear_cache)
cfh.setTimeout(1)
handlers.append(cfh)
return handlers
class TimeoutTest(unittest.TestCase):
def test_http_basic(self):
self.assertIsNone(socket.getdefaulttimeout())
url = "http://www.example.com"
with support.transient_internet(url, timeout=None):
u = _urlopen_with_retry(url)
self.addCleanup(u.close)
self.assertIsNone(u.fp.raw._sock.gettimeout())
def test_http_default_timeout(self):
self.assertIsNone(socket.getdefaulttimeout())
url = "http://www.example.com"
with support.transient_internet(url):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(url)
self.addCleanup(u.close)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(u.fp.raw._sock.gettimeout(), 60)
def test_http_no_timeout(self):
self.assertIsNone(socket.getdefaulttimeout())
url = "http://www.example.com"
with support.transient_internet(url):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(url, timeout=None)
self.addCleanup(u.close)
finally:
socket.setdefaulttimeout(None)
self.assertIsNone(u.fp.raw._sock.gettimeout())
def test_http_timeout(self):
url = "http://www.example.com"
with support.transient_internet(url):
u = _urlopen_with_retry(url, timeout=120)
self.addCleanup(u.close)
self.assertEqual(u.fp.raw._sock.gettimeout(), 120)
FTP_HOST = "ftp://ftp.mirror.nl/pub/gnu/"
def test_ftp_basic(self):
self.assertIsNone(socket.getdefaulttimeout())
with support.transient_internet(self.FTP_HOST, timeout=None):
u = _urlopen_with_retry(self.FTP_HOST)
self.addCleanup(u.close)
self.assertIsNone(u.fp.fp.raw._sock.gettimeout())
def test_ftp_default_timeout(self):
self.assertIsNone(socket.getdefaulttimeout())
with support.transient_internet(self.FTP_HOST):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(self.FTP_HOST)
self.addCleanup(u.close)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(u.fp.fp.raw._sock.gettimeout(), 60)
def test_ftp_no_timeout(self):
self.assertIsNone(socket.getdefaulttimeout())
with support.transient_internet(self.FTP_HOST):
socket.setdefaulttimeout(60)
try:
u = _urlopen_with_retry(self.FTP_HOST, timeout=None)
self.addCleanup(u.close)
finally:
socket.setdefaulttimeout(None)
self.assertIsNone(u.fp.fp.raw._sock.gettimeout())
def test_ftp_timeout(self):
with support.transient_internet(self.FTP_HOST):
u = _urlopen_with_retry(self.FTP_HOST, timeout=60)
self.addCleanup(u.close)
self.assertEqual(u.fp.fp.raw._sock.gettimeout(), 60)
if __name__ == "__main__":
unittest.main()
| lgpl-3.0 |
ssaavedra/liquidhaskell | tests/regrtest.py | 8 | 6362 | #!/usr/bin/python
# Copyright (c) 2009 The Regents of the University of California. All rights reserved.
#
# Permission is hereby granted, without written agreement and without
# license or royalty fees, to use, copy, modify, and distribute this
# software and its documentation for any purpose, provided that the
# above copyright notice and the following two paragraphs appear in
# all copies of this software.
#
# IN NO EVENT SHALL THE UNIVERSITY OF CALIFORNIA BE LIABLE TO ANY PARTY
# FOR DIRECT, INDIRECT, SPECIAL, INCIDENTAL, OR CONSEQUENTIAL DAMAGES
# ARISING OUT OF THE USE OF THIS SOFTWARE AND ITS DOCUMENTATION, EVEN
# IF THE UNIVERSITY OF CALIFORNIA HAS BEEN ADVISED OF THE POSSIBILITY
# OF SUCH DAMAGE.
#
# THE UNIVERSITY OF CALIFORNIA SPECIFICALLY DISCLAIMS ANY WARRANTIES,
# INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY
# AND FITNESS FOR A PARTICULAR PURPOSE. THE SOFTWARE PROVIDED HEREUNDER IS
# ON AN "AS IS" BASIS, AND THE UNIVERSITY OF CALIFORNIA HAS NO OBLIGATION
# TO PROVIDE MAINTENANCE, SUPPORT, UPDATES, ENHANCEMENTS, OR MODIFICATIONS.
import time, subprocess, optparse, sys, socket, os
sys.path.append("../")
import rtest as rtest
solve = "liquid ".split()
null = open("/dev/null", "w")
now = (time.asctime(time.localtime(time.time()))).replace(" ","_")
logfile = "../tests/logs/regrtest_results_%s_%s" % (socket.gethostname (), now)
argcomment = "--! run with "
liquidcomment = "{--! run liquid with "
endcomment = "-}"
def logged_sys_call(args, out=None, err=None, dir=None):
print "exec: " + " ".join(args)
return subprocess.call(args, stdout=out, stderr=err, cwd=dir)
def solve_quals(dir,file,bare,time,quiet,flags,lflags):
if quiet: out = null
else: out = None
if time: time = ["time"]
else: time = []
if lflags: lflags = ["--" + f for f in lflags]
hygiene_flags = []
(dn, bn) = os.path.split(file)
try:
os.makedirs(os.path.join(dir,dn,".liquid"))
except OSError:
pass
out = open(os.path.join(dir,dn,".liquid",bn) + ".log", "w")
rv = logged_sys_call(time + solve + flags + lflags + hygiene_flags + [file],
out=None, err=subprocess.STDOUT, dir=dir)
out.close()
return rv
def run_script(file,quiet):
if quiet: out = null
else: out = None
return logged_sys_call(file, out)
def getfileargs(file):
f = open(file)
l = f.readline()
f.close()
if l.startswith(argcomment):
return l[len(argcomment):].strip().split(" ")
else:
return []
def getliquidargs(file):
f = open(file)
l = f.readline()
f.close()
if l.startswith(liquidcomment):
return [arg for arg in l[len(liquidcomment):].strip().split(" ")
if arg!=endcomment]
else:
return []
class Config (rtest.TestConfig):
def __init__ (self, dargs, testdirs, logfile, threadcount):
rtest.TestConfig.__init__ (self, testdirs, logfile, threadcount)
self.dargs = dargs
def run_test (self, dir, file):
path = os.path.join(dir,file)
if self.is_test(file):
lflags = getliquidargs(path)
fargs = getfileargs(path)
fargs = self.dargs + fargs
return solve_quals(dir, file, True, False, True, fargs, lflags)
elif file.endswith(".sh"):
return run_script(path, True)
def is_test (self, file):
return file.endswith(".hs") # or file.endswith(".lhs")
#####################################################################################
#DEFAULT
textIgnored = { "Data/Text/Axioms.hs"
, "Data/Text/Encoding/Error.hs"
, "Data/Text/Encoding/Fusion.hs"
, "Data/Text/Encoding/Fusion/Common.hs"
, "Data/Text/Encoding/Utf16.hs"
, "Data/Text/Encoding/Utf32.hs"
, "Data/Text/Encoding/Utf8.hs"
, "Data/Text/Fusion/CaseMapping.hs"
, "Data/Text/Fusion/Common.hs"
, "Data/Text/Fusion/Internal.hs"
, "Data/Text/IO.hs"
, "Data/Text/IO/Internal.hs"
, "Data/Text/Lazy/Builder/Functions.hs"
, "Data/Text/Lazy/Builder/Int.hs"
, "Data/Text/Lazy/Builder/Int/Digits.hs"
, "Data/Text/Lazy/Builder/Internal.hs"
, "Data/Text/Lazy/Builder/RealFloat.hs"
, "Data/Text/Lazy/Builder/RealFloat/Functions.hs"
, "Data/Text/Lazy/Encoding/Fusion.hs"
, "Data/Text/Lazy/IO.hs"
, "Data/Text/Lazy/Read.hs"
, "Data/Text/Read.hs"
, "Data/Text/Unsafe/Base.hs"
, "Data/Text/UnsafeShift.hs"
, "Data/Text/Util.hs"
}
demosIgnored = { "Composition.hs"
, "Eval.hs"
, "Inductive.hs"
, "Loop.hs"
, "TalkingAboutSets.hs"
, "refinements101reax.hs"
}
regtestdirs = [ ("pos", {}, 0)
, ("neg", {}, 1)
, ("crash", {}, 2)
, ("parser/pos", {}, 0)
, ("error_messages/pos", {}, 0)
, ("error_messages/crash", {}, 2)
]
benchtestdirs = [ ("../web/demos", demosIgnored, 0)
, ("../benchmarks/esop2013-submission", {"Base0.hs"}, 0)
, ("../benchmarks/bytestring-0.9.2.1", {}, 0)
, ("../benchmarks/text-0.11.2.3", textIgnored, 0)
, ("../benchmarks/vector-algorithms-0.5.4.2", {}, 0)
, ("../benchmarks/hscolour-1.20.0.0", {}, 0)
]
parser = optparse.OptionParser()
parser.add_option("-a", "--all", action="store_true", dest="alltests", help="run all tests")
parser.add_option("-t", "--threads", dest="threadcount", default=1, type=int, help="spawn n threads")
parser.add_option("-o", "--opts", dest="opts", default=[], action='append', type=str, help="additional arguments to liquid")
parser.disable_interspersed_args()
options, args = parser.parse_args()
print "options =", options
print "args =", args
def testdirs():
global testdirs
if options.alltests:
return regtestdirs + benchtestdirs
else:
return regtestdirs
testdirs = testdirs()
clean = os.path.abspath("../cleanup")
[os.system(("cd %s; %s; cd ../" % (d,clean))) for (d,_,_) in testdirs]
runner = rtest.TestRunner (Config (options.opts, testdirs, logfile, options.threadcount))
sys.exit(runner.run())
| bsd-3-clause |
revmischa/boto | boto/s3/__init__.py | 114 | 2836 | # Copyright (c) 2006-2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2010, Eucalyptus Systems, Inc.
# Copyright (c) 2014, Steven Richards <[email protected]>
# All rights reserved.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
from boto.regioninfo import RegionInfo, get_regions
class S3RegionInfo(RegionInfo):
def connect(self, **kw_params):
"""
Connect to this Region's endpoint. Returns an connection
object pointing to the endpoint associated with this region.
You may pass any of the arguments accepted by the connection
class's constructor as keyword arguments and they will be
passed along to the connection object.
:rtype: Connection object
:return: The connection to this regions endpoint
"""
if self.connection_cls:
return self.connection_cls(host=self.endpoint, **kw_params)
def regions():
"""
Get all available regions for the Amazon S3 service.
:rtype: list
:return: A list of :class:`boto.regioninfo.RegionInfo`
"""
from boto.s3.connection import S3Connection
return get_regions(
's3',
region_cls=S3RegionInfo,
connection_cls=S3Connection
)
def connect_to_region(region_name, **kw_params):
for region in regions():
if 'host' in kw_params.keys():
# Make sure the host specified is not nothing
if kw_params['host'] not in ['', None]:
region.endpoint = kw_params['host']
del kw_params['host']
return region.connect(**kw_params)
# If it is nothing then remove it from kw_params and proceed with default
else:
del kw_params['host']
if region.name == region_name:
return region.connect(**kw_params)
return None
| mit |
girving/tensorflow | tensorflow/python/framework/graph_io.py | 6 | 2539 | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utility functions for reading/writing graphs."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
import os.path
from google.protobuf import text_format
from tensorflow.python.framework import ops
from tensorflow.python.lib.io import file_io
from tensorflow.python.util.tf_export import tf_export
@tf_export('io.write_graph', 'train.write_graph')
def write_graph(graph_or_graph_def, logdir, name, as_text=True):
"""Writes a graph proto to a file.
The graph is written as a text proto unless `as_text` is `False`.
```python
v = tf.Variable(0, name='my_variable')
sess = tf.Session()
tf.train.write_graph(sess.graph_def, '/tmp/my-model', 'train.pbtxt')
```
or
```python
v = tf.Variable(0, name='my_variable')
sess = tf.Session()
tf.train.write_graph(sess.graph, '/tmp/my-model', 'train.pbtxt')
```
Args:
graph_or_graph_def: A `Graph` or a `GraphDef` protocol buffer.
logdir: Directory where to write the graph. This can refer to remote
filesystems, such as Google Cloud Storage (GCS).
name: Filename for the graph.
as_text: If `True`, writes the graph as an ASCII proto.
Returns:
The path of the output proto file.
"""
if isinstance(graph_or_graph_def, ops.Graph):
graph_def = graph_or_graph_def.as_graph_def()
else:
graph_def = graph_or_graph_def
# gcs does not have the concept of directory at the moment.
if not file_io.file_exists(logdir) and not logdir.startswith('gs:'):
file_io.recursive_create_dir(logdir)
path = os.path.join(logdir, name)
if as_text:
file_io.atomic_write_string_to_file(path,
text_format.MessageToString(graph_def))
else:
file_io.atomic_write_string_to_file(path, graph_def.SerializeToString())
return path
| apache-2.0 |
zak-k/cis | cis/test/plot_tests/idiff.py | 3 | 2350 | #!/usr/bin/env python
# (C) British Crown Copyright 2010 - 2014, Met Office
#
# This file was heavily influenced by a similar file in the iris package.
"""
Provides "diff-like" comparison of images.
Currently relies on matplotlib for image processing so limited to PNG format.
"""
from __future__ import (absolute_import, division, print_function)
import os.path
import shutil
import matplotlib.pyplot as plt
import matplotlib.image as mimg
import matplotlib.widgets as mwidget
def diff_viewer(expected_fname, result_fname, diff_fname):
plt.figure(figsize=(16, 16))
plt.suptitle(os.path.basename(expected_fname))
ax = plt.subplot(221)
ax.imshow(mimg.imread(expected_fname))
ax = plt.subplot(222, sharex=ax, sharey=ax)
ax.imshow(mimg.imread(result_fname))
ax = plt.subplot(223, sharex=ax, sharey=ax)
ax.imshow(mimg.imread(diff_fname))
def accept(event):
# removes the expected result, and move the most recent result in
print('ACCEPTED NEW FILE: %s' % (os.path.basename(expected_fname), ))
os.remove(expected_fname)
shutil.copy2(result_fname, expected_fname)
os.remove(diff_fname)
plt.close()
def reject(event):
print('REJECTED: %s' % (os.path.basename(expected_fname), ))
plt.close()
ax_accept = plt.axes([0.6, 0.35, 0.1, 0.075])
ax_reject = plt.axes([0.71, 0.35, 0.1, 0.075])
bnext = mwidget.Button(ax_accept, 'Accept change')
bnext.on_clicked(accept)
bprev = mwidget.Button(ax_reject, 'Reject')
bprev.on_clicked(reject)
plt.show()
def step_over_diffs():
import cis.test.plot_tests
image_dir = os.path.join(os.path.dirname(cis.test.plot_tests.__file__),
'reference', 'visual_tests')
diff_dir = os.path.join(os.path.dirname(cis.test.plot_tests.__file__),
'result_image_comparison')
for expected_fname in sorted(os.listdir(image_dir)):
result_path = os.path.join(diff_dir, expected_fname)
diff_path = result_path[:-4] + '-failed-diff.png'
# if the test failed, there will be a diff file
if os.path.exists(diff_path):
expected_path = os.path.join(image_dir, expected_fname)
diff_viewer(expected_path, result_path, diff_path)
if __name__ == '__main__':
step_over_diffs()
| gpl-3.0 |
macndesign/lettuce_webdriver | lettuce_webdriver/screenshot.py | 4 | 2043 | """Steps and utility functions for taking screenshots."""
import uuid
from lettuce import (
after,
step,
world,
)
import os.path
import json
def set_save_directory(base, source):
"""Sets the root save directory for saving screenshots.
Screenshots will be saved in subdirectories under this directory by
browser window size. """
root = os.path.join(base, source)
if not os.path.isdir(root):
os.makedirs(root)
world.screenshot_root = root
def resolution_path(world):
window_size = world.browser.get_window_size()
return os.path.join(
world.screenshot_root,
'{}x{}'.format(window_size['width'], window_size['height']),
)
@step(r'I capture a screenshot$')
def capture_screenshot(step):
feature = step.scenario.feature
step.shot_name = '{}.png'.format(uuid.uuid4())
if getattr(feature, 'dir_path', None) is None:
feature.dir_path = resolution_path(world)
if not os.path.isdir(feature.dir_path):
os.makedirs(feature.dir_path)
filename = os.path.join(
feature.dir_path,
step.shot_name,
)
world.browser.get_screenshot_as_file(filename)
@step(r'I capture a screenshot after (\d+) seconds?$')
def capture_screenshot_delay(step, delay):
time.sleep(delay)
capture_screenshot()
@after.each_feature
def record_run_feature_report(feature):
if getattr(feature, 'dir_path', None) is None:
return
feature_name_json = '{}.json'.format(os.path.splitext(
os.path.basename(feature.described_at.file)
)[0])
report = {}
for scenario in feature.scenarios:
scenario_report = []
for step in scenario.steps:
shot_name = getattr(step, 'shot_name', None)
if shot_name is not None:
scenario_report.append(shot_name)
if scenario_report:
report[scenario.name] = scenario_report
if report:
with open(os.path.join(feature.dir_path, feature_name_json), 'w') as f:
json.dump(report, f)
| mit |
rohitwaghchaure/erpnext-receipher | erpnext/patches/v5_0/update_item_and_description_again.py | 102 | 1661 | # Copyright (c) 2015, Frappe Technologies Pvt. Ltd. and Contributors
# License: GNU General Public License v3. See license.txt
import frappe
from frappe.utils import cstr
import re
def execute():
item_details = frappe._dict()
for d in frappe.db.sql("select name, description from `tabItem`", as_dict=1):
description = cstr(d.description).strip()
new_desc = extract_description(description)
item_details.setdefault(d.name, frappe._dict({
"old_description": description,
"new_description": new_desc
}))
dt_list= ["Purchase Order Item","Supplier Quotation Item", "BOM", "BOM Explosion Item" , \
"BOM Item", "Opportunity Item" , "Quotation Item" , "Sales Order Item" , "Delivery Note Item" , \
"Material Request Item" , "Purchase Receipt Item" , "Stock Entry Detail"]
for dt in dt_list:
frappe.reload_doctype(dt)
records = frappe.db.sql("""select name, `{0}` as item_code, description from `tab{1}`
where description is not null and description like '%%<table%%'"""
.format("item" if dt=="BOM" else "item_code", dt), as_dict=1)
count = 1
for d in records:
if d.item_code and item_details.get(d.item_code) \
and cstr(d.description) == item_details.get(d.item_code).old_description:
desc = item_details.get(d.item_code).new_description
else:
desc = extract_description(cstr(d.description))
frappe.db.sql("""update `tab{0}` set description = %s
where name = %s """.format(dt), (desc, d.name))
count += 1
if count % 500 == 0:
frappe.db.commit()
def extract_description(desc):
for tag in ("img", "table", "tr", "td"):
desc = re.sub("\</*{0}[^>]*\>".format(tag), "", desc)
return desc
| agpl-3.0 |
homeworkprod/byceps | byceps/services/ticketing/models/ticket_event.py | 1 | 1337 | """
byceps.services.ticketing.models.ticket_event
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
:Copyright: 2006-2019 Jochen Kupperschmidt
:License: Modified BSD, see LICENSE for details.
"""
from datetime import datetime
from typing import Any, Dict
from ....database import db, generate_uuid
from ....util.instances import ReprBuilder
from ..transfer.models import TicketID
TicketEventData = Dict[str, Any]
class TicketEvent(db.Model):
"""An event that refers to a ticket."""
__tablename__ = 'ticket_events'
id = db.Column(db.Uuid, default=generate_uuid, primary_key=True)
occurred_at = db.Column(db.DateTime, nullable=False)
event_type = db.Column(db.UnicodeText, index=True, nullable=False)
ticket_id = db.Column(db.Uuid, db.ForeignKey('tickets.id'), index=True, nullable=False)
data = db.Column(db.JSONB)
def __init__(self, occurred_at: datetime, event_type: str,
ticket_id: TicketID, data: TicketEventData) -> None:
self.occurred_at = occurred_at
self.event_type = event_type
self.ticket_id = ticket_id
self.data = data
def __repr__(self) -> str:
return ReprBuilder(self) \
.add_custom(repr(self.event_type)) \
.add_with_lookup('ticket_id') \
.add_with_lookup('data') \
.build()
| bsd-3-clause |
sinkuri256/python-for-android | python3-alpha/python3-src/Lib/distutils/tests/test_install_lib.py | 47 | 3460 | """Tests for distutils.command.install_data."""
import sys
import os
import unittest
from distutils.command.install_lib import install_lib
from distutils.extension import Extension
from distutils.tests import support
from distutils.errors import DistutilsOptionError
from test.support import run_unittest
class InstallLibTestCase(support.TempdirManager,
support.LoggingSilencer,
support.EnvironGuard,
unittest.TestCase):
def test_finalize_options(self):
pkg_dir, dist = self.create_dist()
cmd = install_lib(dist)
cmd.finalize_options()
self.assertEqual(cmd.compile, 1)
self.assertEqual(cmd.optimize, 0)
# optimize must be 0, 1, or 2
cmd.optimize = 'foo'
self.assertRaises(DistutilsOptionError, cmd.finalize_options)
cmd.optimize = '4'
self.assertRaises(DistutilsOptionError, cmd.finalize_options)
cmd.optimize = '2'
cmd.finalize_options()
self.assertEqual(cmd.optimize, 2)
@unittest.skipUnless(not sys.dont_write_bytecode,
'byte-compile not supported')
def test_byte_compile(self):
pkg_dir, dist = self.create_dist()
cmd = install_lib(dist)
cmd.compile = cmd.optimize = 1
f = os.path.join(pkg_dir, 'foo.py')
self.write_file(f, '# python file')
cmd.byte_compile([f])
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'foo.pyc')))
self.assertTrue(os.path.exists(os.path.join(pkg_dir, 'foo.pyo')))
def test_get_outputs(self):
pkg_dir, dist = self.create_dist()
cmd = install_lib(dist)
# setting up a dist environment
cmd.compile = cmd.optimize = 1
cmd.install_dir = pkg_dir
f = os.path.join(pkg_dir, 'foo.py')
self.write_file(f, '# python file')
cmd.distribution.py_modules = [pkg_dir]
cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
cmd.distribution.packages = [pkg_dir]
cmd.distribution.script_name = 'setup.py'
# get_output should return 4 elements
self.assertTrue(len(cmd.get_outputs()) >= 2)
def test_get_inputs(self):
pkg_dir, dist = self.create_dist()
cmd = install_lib(dist)
# setting up a dist environment
cmd.compile = cmd.optimize = 1
cmd.install_dir = pkg_dir
f = os.path.join(pkg_dir, 'foo.py')
self.write_file(f, '# python file')
cmd.distribution.py_modules = [pkg_dir]
cmd.distribution.ext_modules = [Extension('foo', ['xxx'])]
cmd.distribution.packages = [pkg_dir]
cmd.distribution.script_name = 'setup.py'
# get_input should return 2 elements
self.assertEqual(len(cmd.get_inputs()), 2)
def test_dont_write_bytecode(self):
# makes sure byte_compile is not used
pkg_dir, dist = self.create_dist()
cmd = install_lib(dist)
cmd.compile = 1
cmd.optimize = 1
old_dont_write_bytecode = sys.dont_write_bytecode
sys.dont_write_bytecode = True
try:
cmd.byte_compile([])
finally:
sys.dont_write_bytecode = old_dont_write_bytecode
self.assertTrue('byte-compiling is disabled' in self.logs[0][1])
def test_suite():
return unittest.makeSuite(InstallLibTestCase)
if __name__ == "__main__":
run_unittest(test_suite())
| apache-2.0 |
jalavik/invenio | invenio/legacy/bibingest/ingestion_package_interface.py | 13 | 17273 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011 CERN.
#
# Invenio is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307, USA.
"""The ingestion package interface."""
__revision__ = "$Id$"
from datetime import datetime
try:
from hashlib import md5
except:
import md5
from .config import CFG_BIBINGEST_VERSIONING, \
CFG_BIBINGEST_ONE_STORAGE_ENGINE_INSTANCE_PER_STORAGE_ENGINE
# ********************
# Validation functions
# ********************
class IngestionFieldValidationError(Exception):
"""
Custom Exception class when field validation errors occur.
"""
pass
def positive_int(fieldname, value):
try:
value = int(value)
if value > 0:
return value
else:
msg = "For field name %s, received a negative integer, expected a positive integer" % (fieldname,)
raise IngestionFieldValidationError(msg)
except:
msg = "For field name %s, received a non integer, expected a positive integer" % (fieldname,)
raise IngestionFieldValidationError(msg)
def valid_string(fieldname, value):
if not value or not (isinstance(value, str) or isinstance(value, unicode)):
msg = "For field name %s, received an invalid or zero length string, expected a non zero length string" % (fieldname,)
raise IngestionFieldValidationError(msg)
else:
return value
_STANDARD_TIME_FORMAT = "%Y-%m-%d %H:%M:%S"
def valid_date(fieldname, value):
if isinstance(value, datetime):
return str(value.strftime(_STANDARD_TIME_FORMAT))
else:
try:
if isinstance(datetime.strptime(value , _STANDARD_TIME_FORMAT), datetime):
return value
except:
msg = "For field name %s, received an unrecognizable datetime format '%s', expected a string like '2002-04-18 14:57:11' or an instance of datetime.datetime" % (fieldname, str(value))
raise IngestionFieldValidationError(msg)
def valid_bit(dummy, value):
if value:
return 1
return 0
_ACCEPTED_FIELD_NAMES = {
# Don't use underscores ('_') for the field names.
# example
# 'fieldname': (default_value, validation_function),
# the ingestion package submission ID
'subid' : (lambda:'0', valid_string),
# the ingestion package record ID
'recid' : (lambda:0, positive_int),
# the date on which the ingestion package was submitted
'date' : (lambda:datetime.now().strftime(_STANDARD_TIME_FORMAT), valid_date),
# the ingestion package itself
'content' : (lambda:None, valid_string),
# the source of the ingestion package
'source' : (lambda:None, valid_string),
}
if CFG_BIBINGEST_VERSIONING:
version = {
# the version of the ingestion package
'version' : (lambda:1, valid_bit),
}
_ACCEPTED_FIELD_NAMES.update(version)
_ACCEPTED_FIELD_OPERATORS = (
#'value', # When no operator is used, the "value" keyword is reserved
# values greater than this
'from',
# values greater or equal than this
'and_from',
# values lower than this
'to',
# values lower or equal than this
'and_to',
# this value should be treated as a regular expression
'regex',
# range of values
'in',
# sort results ascending
'sort_by_asc',
# sort results descending
'sort_by_desc',
# group results
'group_by',
# limit the number results
'limit',
# skip this number of results from the beginning
'skip',
)
class IngestionPackage(object):
"""The Ingestion Package default class"""
def __init__(self, storage_engine_instance, storage_engine_settings = None):
"""
The constructor.
"""
self._accepted_field_names = _ACCEPTED_FIELD_NAMES
self._accepted_field_operators = _ACCEPTED_FIELD_OPERATORS
self._storage_engine = storage_engine_instance
self._storage_engine_settings = storage_engine_settings
if self._storage_engine_settings is not None:
self.reconfigure_storage_engine()
def reconfigure_storage_engine(self):
"""
Reconfigures the storage engine according to the given settings.
"""
self._storage_engine.reconfigure(self._storage_engine_settings)
# Helper functions
def _parse_kwargs(self, kwargs):
"""
Parses the given kwargs based on the list of accepted field names and
operators and returns a dictionary.
"""
parsed_kwargs = {}
if kwargs is None:
return parsed_kwargs
for kwarg_key, kwarg_value in kwargs.iteritems():
# Split the fieldname from the operator
kwarg_key_field_name_and_operator = kwarg_key.split('_', 1)
if len(kwarg_key_field_name_and_operator) == 1:
# Received a fieldname without any operators
kwarg_key_field_name, kwarg_key_field_operator = (kwarg_key_field_name_and_operator[0], '')
else:
# Received a fieldname with an operator
kwarg_key_field_name, kwarg_key_field_operator = kwarg_key_field_name_and_operator
if kwarg_key_field_name:
# Check if this field name is in the list of accpeted field names
if kwarg_key_field_name in self._accepted_field_names.keys():
# Check if this field name is already in the list of parsed keyword arguments
if parsed_kwargs.has_key(kwarg_key_field_name):
# Check if there is an operator set for this field name
if kwarg_key_field_operator:
# Check if the operator is in the list of accepted field operators
if kwarg_key_field_operator in self._accepted_field_operators:
# Add this field operator and its value to the parsed keyword arguments
parsed_kwargs[kwarg_key_field_name][kwarg_key_field_operator] = kwarg_value
else:
# No field operator was set, therefore add the value for this field
# to the parser keyword arguments
kwarg_value = self._accepted_field_names[kwarg_key_field_name][1](kwarg_key_field_name, kwarg_value)
parsed_kwargs[kwarg_key_field_name]['value'] = kwarg_value
else:
# This is a new field name. Check if an operator was set for this field name
if kwarg_key_field_operator:
# Check if the operator is in the list of accepted field operators
if kwarg_key_field_operator in self._accepted_field_operators:
# Add this field operator and its value to the parsed keyword arguments
parsed_kwargs[kwarg_key_field_name] = { kwarg_key_field_operator : kwarg_value }
else:
# No field operator was set, therefore add the value for this field
# to the parser keyword arguments
kwarg_value = self._accepted_field_names[kwarg_key_field_name][1](kwarg_key_field_name, kwarg_value)
parsed_kwargs[kwarg_key_field_name] = { 'value' : kwarg_value }
else:
# The kwarg_key_field_name is empty, it means we have
# an operator like filter, skip, etc
if kwarg_key_field_operator in self._accepted_field_operators:
if parsed_kwargs.has_key('_operators'):
parsed_kwargs['_operators'][kwarg_key_field_operator] = kwarg_value
else:
parsed_kwargs['_operators'] = { kwarg_key_field_operator : kwarg_value }
if CFG_BIBINGEST_VERSIONING:
# Set the latest version, unless it has been explicitly set
version_field_name = 'version'
version_default_value = self._accepted_field_names[version_field_name][0]()
parsed_kwargs.setdefault(version_field_name, { 'value' : version_default_value })
return parsed_kwargs
def _complete_parsed_kwargs(self, parsed_kwargs):
"""
Completes the dictionary of parsed_kwargs with the necessary default values.
"""
for items in self._accepted_field_names.iteritems():
fieldname = items[0]
default_value = items[1][0]()
if fieldname not in parsed_kwargs.keys() and default_value is not None:
parsed_kwargs[fieldname] = { 'value' : default_value }
return parsed_kwargs
# Implement all the CRUD functions: create, read, update and delete
# Read one
def get_one(self, unique_id):
"""
Retrieves the ingestion package from the database given its unique ID.
"""
# TODO: what if we have concurrent requests and the storage engine
# gets reconfigured before actually executing the query?
if CFG_BIBINGEST_ONE_STORAGE_ENGINE_INSTANCE_PER_STORAGE_ENGINE:
self.reconfigure_storage_engine()
return self._storage_engine.get_one(unique_id)
# Read many
def get_many(self, **kwargs):
"""
Retrieves all the ingestion packages from the database that match the given
arguments. Arguments must comply to a specified list of argument names.
"""
parsed_kwargs = self._parse_kwargs(kwargs)
if CFG_BIBINGEST_ONE_STORAGE_ENGINE_INSTANCE_PER_STORAGE_ENGINE:
self.reconfigure_storage_engine()
return self._storage_engine.get_many(parsed_kwargs)
# Create one
def store_one(self, **kwargs):
"""
Stores the ingestion package into the database.
Returns the id of the ingestion_package in the storage engine.
"""
parsed_kwargs = self._parse_kwargs(kwargs)
parsed_kwargs = self._complete_parsed_kwargs(parsed_kwargs)
if CFG_BIBINGEST_ONE_STORAGE_ENGINE_INSTANCE_PER_STORAGE_ENGINE:
self.reconfigure_storage_engine()
# TODO: add optional check to make sure we don't store duplicates
# could do a get_many before storing to check if any results come up
return self._storage_engine.store_one(parsed_kwargs)
# Create many
def store_many(self, ingestion_packages):
"""
Stores the ingestion packages into the database.
Must be given an iterable of dictionaries as input.
Each dictionary must contain "key: value" pairs containing field names and
their values as they would have been give to the store_ingestion_package
function.
"""
data = []
for ingestion_package in ingestion_packages:
parsed_kwargs = self._parse_kwargs(ingestion_package)
parsed_kwargs = self._complete_parsed_kwargs(parsed_kwargs)
data.append(parsed_kwargs)
if CFG_BIBINGEST_ONE_STORAGE_ENGINE_INSTANCE_PER_STORAGE_ENGINE:
self.reconfigure_storage_engine()
# TODO: add optional check to make sure we don't store duplicates
# could do a get_many before storing to check if any results come up
return self._storage_engine.store_many(data)
# Delete one
def remove_one(self, unique_id):
"""
Removes the ingestion package given its unique ID.
"""
if CFG_BIBINGEST_ONE_STORAGE_ENGINE_INSTANCE_PER_STORAGE_ENGINE:
self.reconfigure_storage_engine()
return self._storage_engine.remove_one(unique_id)
# Delete many
def remove_many(self, **kwargs):
"""
Removes the ingestion packages based on the given arguments.
"""
parsed_kwargs = self._parse_kwargs(kwargs)
if CFG_BIBINGEST_ONE_STORAGE_ENGINE_INSTANCE_PER_STORAGE_ENGINE:
self.reconfigure_storage_engine()
if CFG_BIBINGEST_VERSIONING:
# MAYBE: check if version is set as 0 (old versions) and don't continue?
version_field_name = 'version'
version_default_value = self._accepted_field_names[version_field_name][0]()
#changes = { version_field_name : int( not version_default_value ) }
#parsed_changes = self._parse_kwargs(changes)
parsed_changes = { version_field_name : { 'value' : int( not version_default_value ) } }
return self._storage_engine.update_many(parsed_changes, parsed_kwargs)
else:
return self._storage_engine.remove_many(parsed_kwargs)
# Update one
def update_one(self, changes = None, **kwargs):
"""
Updates one ingestion package (the first one found) matching the kwargs
according to the changes dictionary.
The changes dictionary must contain "key: value" pairs containing field names
and their values as they would have been given to the
store_ingestion_package function.
"""
parsed_kwargs = self._parse_kwargs(kwargs)
if CFG_BIBINGEST_ONE_STORAGE_ENGINE_INSTANCE_PER_STORAGE_ENGINE:
self.reconfigure_storage_engine()
if CFG_BIBINGEST_VERSIONING:
version_field_name = 'version'
version_default_value = self._accepted_field_names[version_field_name][0]()
matching_entries = self._storage_engine.get_many(parsed_kwargs)
for matching_entry in matching_entries:
matching_entry.update({ version_field_name : int( not version_default_value ) })
parsed_matching_entry = self._parse_kwargs(matching_entry)
self._storage_engine.store_one(parsed_matching_entry)
break
date_field_name = 'date'
date_now_value = datetime.now().strftime(_STANDARD_TIME_FORMAT)
date_changes = { date_field_name : date_now_value }
changes.update(date_changes)
parsed_changes = self._parse_kwargs(changes)
return self._storage_engine.update_one(parsed_changes, parsed_kwargs)
# Update many
def update_many(self, changes = None, **kwargs):
"""
Updates all the ingestion package matching the kwargs
according to the changes dictionary.
The changes dictionary must contain "key: value" pairs containing field names
and their values as they would have been given to the
store_ingestion_package function.
"""
parsed_kwargs = self._parse_kwargs(kwargs)
if CFG_BIBINGEST_ONE_STORAGE_ENGINE_INSTANCE_PER_STORAGE_ENGINE:
self.reconfigure_storage_engine()
if CFG_BIBINGEST_VERSIONING:
version_field_name = 'version'
version_default_value = self._accepted_field_names[version_field_name][0]()
matching_entries = self._storage_engine.get_many(parsed_kwargs)
# TODO: make this more efficient. Gather all the matching entries,
# change 'version' for all of them and then run store_many
# for all of them together
for matching_entry in matching_entries:
matching_entry.update({ version_field_name : int( not version_default_value ) })
parsed_matching_entry = self._parse_kwargs(matching_entry)
self._storage_engine.store_one(parsed_matching_entry)
date_field_name = 'date'
date_now_value = datetime.now().strftime(_STANDARD_TIME_FORMAT)
date_changes = { date_field_name : date_now_value }
changes.update(date_changes)
parsed_changes = self._parse_kwargs(changes)
return self._storage_engine.update_many(parsed_changes, parsed_kwargs)
# Other functions
def count(self):
"""
Returns the count of total entries for this ingestion package.
"""
if CFG_BIBINGEST_ONE_STORAGE_ENGINE_INSTANCE_PER_STORAGE_ENGINE:
self.reconfigure_storage_engine()
return self._storage_engine.count()
# Validate
def validate(self, content, md5_hash):
"""
Validates the ingestion package by checking its md5 hash.
"""
try:
# when we pass to python >= 2.5 we should
# be able to use md5 from hashlib
content_md5_hash = md5(content).hexdigest()
except:
content_md5_hash = md5.new(content).hexdigest()
return content_md5_hash == md5_hash
| gpl-2.0 |
iambibhas/django | django/core/mail/backends/filebased.py | 35 | 2765 | """Email backend that writes messages to a file."""
import datetime
import os
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.core.mail.backends.console import EmailBackend as ConsoleEmailBackend
from django.utils import six
class EmailBackend(ConsoleEmailBackend):
def __init__(self, *args, **kwargs):
self._fname = None
if 'file_path' in kwargs:
self.file_path = kwargs.pop('file_path')
else:
self.file_path = getattr(settings, 'EMAIL_FILE_PATH', None)
# Make sure self.file_path is a string.
if not isinstance(self.file_path, six.string_types):
raise ImproperlyConfigured('Path for saving emails is invalid: %r' % self.file_path)
self.file_path = os.path.abspath(self.file_path)
# Make sure that self.file_path is an directory if it exists.
if os.path.exists(self.file_path) and not os.path.isdir(self.file_path):
raise ImproperlyConfigured(
'Path for saving email messages exists, but is not a directory: %s' % self.file_path
)
# Try to create it, if it not exists.
elif not os.path.exists(self.file_path):
try:
os.makedirs(self.file_path)
except OSError as err:
raise ImproperlyConfigured(
'Could not create directory for saving email messages: %s (%s)' % (self.file_path, err)
)
# Make sure that self.file_path is writable.
if not os.access(self.file_path, os.W_OK):
raise ImproperlyConfigured('Could not write to directory: %s' % self.file_path)
# Finally, call super().
# Since we're using the console-based backend as a base,
# force the stream to be None, so we don't default to stdout
kwargs['stream'] = None
super(EmailBackend, self).__init__(*args, **kwargs)
def write_message(self, message):
self.stream.write(message.message().as_bytes() + b'\n')
self.stream.write(b'-' * 79)
self.stream.write(b'\n')
def _get_filename(self):
"""Return a unique file name."""
if self._fname is None:
timestamp = datetime.datetime.now().strftime("%Y%m%d-%H%M%S")
fname = "%s-%s.log" % (timestamp, abs(id(self)))
self._fname = os.path.join(self.file_path, fname)
return self._fname
def open(self):
if self.stream is None:
self.stream = open(self._get_filename(), 'ab')
return True
return False
def close(self):
try:
if self.stream is not None:
self.stream.close()
finally:
self.stream = None
| bsd-3-clause |
jfhumann/servo | tests/wpt/css-tests/tools/wptserve/tests/functional/test_server.py | 299 | 1320 | import os
import unittest
import urllib2
import json
import wptserve
from base import TestUsingServer, doc_root
class TestFileHandler(TestUsingServer):
def test_not_handled(self):
with self.assertRaises(urllib2.HTTPError) as cm:
resp = self.request("/not_existing")
self.assertEquals(cm.exception.code, 404)
class TestRewriter(TestUsingServer):
def test_rewrite(self):
@wptserve.handlers.handler
def handler(request, response):
return request.request_path
route = ("GET", "/test/rewritten", handler)
self.server.rewriter.register("GET", "/test/original", route[1])
self.server.router.register(*route)
resp = self.request("/test/original")
self.assertEquals(200, resp.getcode())
self.assertEquals("/test/rewritten", resp.read())
class TestRequestHandler(TestUsingServer):
def test_exception(self):
@wptserve.handlers.handler
def handler(request, response):
raise Exception
route = ("GET", "/test/raises", handler)
self.server.router.register(*route)
with self.assertRaises(urllib2.HTTPError) as cm:
resp = self.request("/test/raises")
self.assertEquals(cm.exception.code, 500)
if __name__ == "__main__":
unittest.main()
| mpl-2.0 |
fossoult/odoo | addons/portal/mail_mail.py | 320 | 2625 | # -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2011 OpenERP S.A (<http://www.openerp.com>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from openerp import SUPERUSER_ID
from openerp.osv import osv
from openerp.tools.translate import _
class mail_mail(osv.Model):
""" Update of mail_mail class, to add the signin URL to notifications. """
_inherit = 'mail.mail'
def _get_partner_access_link(self, cr, uid, mail, partner=None, context=None):
""" Generate URLs for links in mails:
- partner is not an user: signup_url
- partner is an user: fallback on classic URL
"""
if context is None:
context = {}
partner_obj = self.pool.get('res.partner')
if partner and not partner.user_ids:
contex_signup = dict(context, signup_valid=True)
signup_url = partner_obj._get_signup_url_for_action(cr, SUPERUSER_ID, [partner.id],
action='mail.action_mail_redirect',
model=mail.model, res_id=mail.res_id,
context=contex_signup)[partner.id]
return ", <span class='oe_mail_footer_access'><small>%(access_msg)s <a style='color:inherit' href='%(portal_link)s'>%(portal_msg)s</a></small></span>" % {
'access_msg': _('access directly to'),
'portal_link': signup_url,
'portal_msg': '%s %s' % (context.get('model_name', ''), mail.record_name) if mail.record_name else _('your messages '),
}
else:
return super(mail_mail, self)._get_partner_access_link(cr, uid, mail, partner=partner, context=context)
| agpl-3.0 |